1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Normalize {
240 source: PathBuf,
242 #[arg(short, long)]
244 out: Option<PathBuf>,
245 #[arg(long)]
247 write: bool,
248 #[arg(long)]
250 dry_run: bool,
251 #[arg(long)]
253 rewrite_ids: bool,
254 #[arg(long)]
256 id_map: Option<PathBuf>,
257 #[arg(long)]
261 resync_provenance: bool,
262 #[arg(long)]
264 json: bool,
265 },
266 Proof {
268 frontier: PathBuf,
270 #[arg(long, short = 'o', default_value = "proof-packet")]
272 out: PathBuf,
273 #[arg(long, default_value = "bbb-alzheimer")]
275 template: String,
276 #[arg(long)]
278 gold: Option<PathBuf>,
279 #[arg(long)]
281 record_proof_state: bool,
282 #[arg(long)]
284 json: bool,
285 },
286 Repo {
288 #[command(subcommand)]
289 action: RepoAction,
290 },
291 Serve {
293 #[arg(required_unless_present_any = ["frontiers", "setup"])]
295 frontier: Option<PathBuf>,
296 #[arg(long)]
298 frontiers: Option<PathBuf>,
299 #[arg(short, long)]
301 backend: Option<String>,
302 #[arg(long)]
304 http: Option<u16>,
305 #[arg(long)]
307 setup: bool,
308 #[arg(long)]
310 check_tools: bool,
311 #[arg(long)]
313 json: bool,
314 #[arg(long)]
318 workbench: bool,
319 },
320 Status {
324 frontier: PathBuf,
325 #[arg(long)]
327 json: bool,
328 },
329 Log {
332 frontier: PathBuf,
333 #[arg(long, default_value = "20")]
335 limit: usize,
336 #[arg(long)]
338 kind: Option<String>,
339 #[arg(long)]
341 json: bool,
342 },
343 Inbox {
347 frontier: PathBuf,
348 #[arg(long)]
350 kind: Option<String>,
351 #[arg(long, default_value = "30")]
353 limit: usize,
354 #[arg(long)]
356 json: bool,
357 },
358 Ask {
363 frontier: PathBuf,
364 #[arg(trailing_var_arg = true)]
366 question: Vec<String>,
367 #[arg(long)]
369 json: bool,
370 },
371 Stats {
373 frontier: PathBuf,
375 #[arg(long)]
377 json: bool,
378 },
379 Search {
381 query: String,
383 #[arg(long)]
385 source: Option<PathBuf>,
386 #[arg(long)]
388 entity: Option<String>,
389 #[arg(long)]
391 r#type: Option<String>,
392 #[arg(long)]
394 all: Option<PathBuf>,
395 #[arg(long, default_value = "20")]
397 limit: usize,
398 #[arg(long)]
400 json: bool,
401 },
402 Tensions {
404 source: PathBuf,
405 #[arg(long)]
406 both_high: bool,
407 #[arg(long)]
408 cross_domain: bool,
409 #[arg(long, default_value = "20")]
410 top: usize,
411 #[arg(long)]
412 json: bool,
413 },
414 Gaps {
416 #[command(subcommand)]
417 action: GapsAction,
418 },
419 Bridge {
421 #[arg(required = true)]
423 inputs: Vec<PathBuf>,
424 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
426 novelty: bool,
427 #[arg(long, default_value = "30")]
429 top: usize,
430 },
431 Export {
433 frontier: PathBuf,
434 #[arg(short, long, default_value = "csv")]
435 format: String,
436 #[arg(short, long)]
437 output: Option<PathBuf>,
438 },
439 Packet {
441 #[command(subcommand)]
442 action: PacketAction,
443 },
444 Verify {
451 path: PathBuf,
453 #[arg(long)]
454 json: bool,
455 },
456 Bench {
468 frontier: Option<PathBuf>,
470 #[arg(long)]
472 gold: Option<PathBuf>,
473 #[arg(long)]
477 candidate: Option<PathBuf>,
478 #[arg(long)]
482 sources: Option<PathBuf>,
483 #[arg(long)]
486 threshold: Option<f64>,
487 #[arg(long)]
490 report: Option<PathBuf>,
491 #[arg(long)]
492 entity_gold: Option<PathBuf>,
493 #[arg(long)]
494 link_gold: Option<PathBuf>,
495 #[arg(long)]
496 suite: Option<PathBuf>,
497 #[arg(long)]
498 suite_ready: bool,
499 #[arg(long)]
500 min_f1: Option<f64>,
501 #[arg(long)]
502 min_precision: Option<f64>,
503 #[arg(long)]
504 min_recall: Option<f64>,
505 #[arg(long)]
506 no_thresholds: bool,
507 #[arg(long)]
508 json: bool,
509 },
510 Conformance {
512 #[arg(default_value = "tests/conformance")]
513 dir: PathBuf,
514 },
515 Version,
517 Sign {
519 #[command(subcommand)]
520 action: SignAction,
521 },
522 Actor {
524 #[command(subcommand)]
525 action: ActorAction,
526 },
527 Federation {
532 #[command(subcommand)]
533 action: FederationAction,
534 },
535 Causal {
542 #[command(subcommand)]
543 action: CausalAction,
544 },
545 Frontier {
549 #[command(subcommand)]
550 action: FrontierAction,
551 },
552 Queue {
555 #[command(subcommand)]
556 action: QueueAction,
557 },
558 Registry {
561 #[command(subcommand)]
562 action: RegistryAction,
563 },
564 Init {
566 #[arg(default_value = ".")]
567 path: PathBuf,
568 #[arg(long, default_value = "unnamed")]
569 name: String,
570 #[arg(long, default_value = "default")]
571 template: String,
572 #[arg(long)]
573 no_git: bool,
574 #[arg(long)]
575 json: bool,
576 },
577 Import {
579 frontier: PathBuf,
580 #[arg(long)]
581 into: Option<PathBuf>,
582 },
583 Diff {
593 target: String,
596 frontier_b: Option<PathBuf>,
599 #[arg(long)]
603 frontier: Option<PathBuf>,
604 #[arg(long, default_value = "reviewer:preview")]
606 reviewer: String,
607 #[arg(long)]
608 json: bool,
609 #[arg(long)]
610 quiet: bool,
611 },
612 Proposals {
614 #[command(subcommand)]
615 action: ProposalAction,
616 },
617 ArtifactToState {
619 frontier: PathBuf,
621 packet: PathBuf,
623 #[arg(long)]
625 actor: String,
626 #[arg(long)]
628 apply_artifacts: bool,
629 #[arg(long)]
630 json: bool,
631 },
632 BridgeKit {
634 #[command(subcommand)]
635 action: BridgeKitAction,
636 },
637 SourceAdapter {
639 #[command(subcommand)]
640 action: SourceAdapterAction,
641 },
642 RuntimeAdapter {
644 #[command(subcommand)]
645 action: RuntimeAdapterAction,
646 },
647 Finding {
649 #[command(subcommand)]
650 command: FindingCommands,
651 },
652 Link {
656 #[command(subcommand)]
657 action: LinkAction,
658 },
659 Workbench {
664 #[arg(default_value = ".")]
666 path: PathBuf,
667 #[arg(long, default_value_t = 3850)]
669 port: u16,
670 #[arg(long)]
672 no_open: bool,
673 },
674 Bridges {
680 #[command(subcommand)]
681 action: BridgesAction,
682 },
683 Entity {
688 #[command(subcommand)]
689 action: EntityAction,
690 },
691 Review {
693 frontier: PathBuf,
695 finding_id: String,
697 #[arg(long)]
699 status: Option<String>,
700 #[arg(long)]
702 reason: Option<String>,
703 #[arg(long)]
705 reviewer: String,
706 #[arg(long)]
708 apply: bool,
709 #[arg(long)]
711 json: bool,
712 },
713 Note {
715 frontier: PathBuf,
716 finding_id: String,
717 #[arg(long)]
718 text: String,
719 #[arg(long)]
720 author: String,
721 #[arg(long)]
723 apply: bool,
724 #[arg(long)]
725 json: bool,
726 },
727 Caveat {
729 frontier: PathBuf,
730 finding_id: String,
731 #[arg(long)]
732 text: String,
733 #[arg(long)]
734 author: String,
735 #[arg(long)]
736 apply: bool,
737 #[arg(long)]
738 json: bool,
739 },
740 Revise {
742 frontier: PathBuf,
743 finding_id: String,
744 #[arg(long)]
746 confidence: f64,
747 #[arg(long)]
749 reason: String,
750 #[arg(long)]
752 reviewer: String,
753 #[arg(long)]
754 apply: bool,
755 #[arg(long)]
756 json: bool,
757 },
758 Reject {
760 frontier: PathBuf,
761 finding_id: String,
762 #[arg(long)]
763 reason: String,
764 #[arg(long)]
765 reviewer: String,
766 #[arg(long)]
767 apply: bool,
768 #[arg(long)]
769 json: bool,
770 },
771 History {
773 frontier: PathBuf,
774 finding_id: String,
775 #[arg(long)]
776 json: bool,
777 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
781 as_of: Option<String>,
782 },
783 ImportEvents {
785 source: PathBuf,
786 #[arg(long)]
787 into: PathBuf,
788 #[arg(long)]
789 json: bool,
790 },
791 Retract {
793 source: PathBuf,
794 finding_id: String,
795 #[arg(long)]
796 reason: String,
797 #[arg(long)]
798 reviewer: String,
799 #[arg(long)]
800 apply: bool,
801 #[arg(long)]
802 json: bool,
803 },
804 EntityResolve {
808 frontier: PathBuf,
809 finding_id: String,
810 #[arg(long)]
811 entity: String,
812 #[arg(long)]
813 source: String,
814 #[arg(long)]
815 id: String,
816 #[arg(long)]
817 confidence: f64,
818 #[arg(long)]
819 matched_name: Option<String>,
820 #[arg(long, default_value = "manual")]
821 resolution_method: String,
822 #[arg(long)]
823 reviewer: String,
824 #[arg(long)]
825 reason: String,
826 #[arg(long)]
827 apply: bool,
828 #[arg(long)]
829 json: bool,
830 },
831 SourceFetch {
839 identifier: String,
842 #[arg(long)]
846 cache: Option<PathBuf>,
847 #[arg(long)]
849 out: Option<PathBuf>,
850 #[arg(long)]
852 refresh: bool,
853 #[arg(long)]
854 json: bool,
855 },
856 SpanRepair {
859 frontier: PathBuf,
860 finding_id: String,
861 #[arg(long)]
862 section: String,
863 #[arg(long)]
864 text: String,
865 #[arg(long)]
866 reviewer: String,
867 #[arg(long)]
868 reason: String,
869 #[arg(long)]
870 apply: bool,
871 #[arg(long)]
872 json: bool,
873 },
874 LocatorRepair {
879 frontier: PathBuf,
880 atom_id: String,
881 #[arg(long)]
884 locator: Option<String>,
885 #[arg(long)]
888 reviewer: String,
889 #[arg(long)]
891 reason: String,
892 #[arg(long)]
894 apply: bool,
895 #[arg(long)]
896 json: bool,
897 },
898 Propagate {
900 frontier: PathBuf,
901 #[arg(long)]
902 retract: Option<String>,
903 #[arg(long)]
904 reduce_confidence: Option<String>,
905 #[arg(long)]
906 to: Option<f64>,
907 #[arg(short, long)]
908 output: Option<PathBuf>,
909 },
910 Replicate {
919 frontier: PathBuf,
921 target: String,
923 #[arg(long)]
925 outcome: String,
926 #[arg(long)]
928 by: String,
929 #[arg(long)]
933 conditions: String,
934 #[arg(long)]
936 source_title: String,
937 #[arg(long)]
939 doi: Option<String>,
940 #[arg(long)]
942 pmid: Option<String>,
943 #[arg(long)]
945 sample_size: Option<String>,
946 #[arg(long, default_value = "")]
949 note: String,
950 #[arg(long)]
952 previous_attempt: Option<String>,
953 #[arg(long, default_value_t = false)]
960 no_cascade: bool,
961 #[arg(long)]
963 json: bool,
964 },
965 Replications {
968 frontier: PathBuf,
970 #[arg(long)]
972 target: Option<String>,
973 #[arg(long)]
975 json: bool,
976 },
977 DatasetAdd {
984 frontier: PathBuf,
986 #[arg(long)]
988 name: String,
989 #[arg(long)]
991 version: Option<String>,
992 #[arg(long)]
996 content_hash: String,
997 #[arg(long)]
999 url: Option<String>,
1000 #[arg(long)]
1002 license: Option<String>,
1003 #[arg(long)]
1005 source_title: String,
1006 #[arg(long)]
1008 doi: Option<String>,
1009 #[arg(long)]
1011 row_count: Option<u64>,
1012 #[arg(long)]
1014 json: bool,
1015 },
1016 Datasets {
1018 frontier: PathBuf,
1019 #[arg(long)]
1020 json: bool,
1021 },
1022 CodeAdd {
1026 frontier: PathBuf,
1028 #[arg(long)]
1030 language: String,
1031 #[arg(long)]
1033 repo_url: Option<String>,
1034 #[arg(long)]
1037 commit: Option<String>,
1038 #[arg(long)]
1040 path: String,
1041 #[arg(long)]
1043 content_hash: String,
1044 #[arg(long)]
1046 line_start: Option<u32>,
1047 #[arg(long)]
1049 line_end: Option<u32>,
1050 #[arg(long)]
1052 entry_point: Option<String>,
1053 #[arg(long)]
1055 json: bool,
1056 },
1057 CodeArtifacts {
1059 frontier: PathBuf,
1060 #[arg(long)]
1061 json: bool,
1062 },
1063 ArtifactAdd {
1068 frontier: PathBuf,
1070 #[arg(long)]
1073 kind: String,
1074 #[arg(long)]
1076 name: String,
1077 #[arg(long)]
1080 file: Option<PathBuf>,
1081 #[arg(long)]
1083 url: Option<String>,
1084 #[arg(long)]
1086 content_hash: Option<String>,
1087 #[arg(long)]
1089 media_type: Option<String>,
1090 #[arg(long)]
1092 license: Option<String>,
1093 #[arg(long)]
1095 source_title: Option<String>,
1096 #[arg(long)]
1098 source_url: Option<String>,
1099 #[arg(long)]
1101 doi: Option<String>,
1102 #[arg(long)]
1104 target: Vec<String>,
1105 #[arg(long)]
1107 metadata: Vec<String>,
1108 #[arg(long, default_value = "public")]
1110 access_tier: String,
1111 #[arg(long, default_value = "reviewer:manual")]
1113 deposited_by: String,
1114 #[arg(long, default_value = "artifact deposit")]
1116 reason: String,
1117 #[arg(long)]
1119 json: bool,
1120 },
1121 Artifacts {
1123 frontier: PathBuf,
1124 #[arg(long)]
1126 target: Option<String>,
1127 #[arg(long)]
1128 json: bool,
1129 },
1130 ArtifactAudit {
1132 frontier: PathBuf,
1133 #[arg(long)]
1135 json: bool,
1136 },
1137 DecisionBrief {
1139 frontier: PathBuf,
1140 #[arg(long)]
1142 json: bool,
1143 },
1144 TrialSummary {
1146 frontier: PathBuf,
1147 #[arg(long)]
1149 json: bool,
1150 },
1151 SourceVerification {
1153 frontier: PathBuf,
1154 #[arg(long)]
1156 json: bool,
1157 },
1158 SourceIngestPlan {
1160 frontier: PathBuf,
1161 #[arg(long)]
1163 json: bool,
1164 },
1165 ClinicalTrialImport {
1168 frontier: PathBuf,
1170 nct_id: String,
1172 #[arg(long)]
1175 input_json: Option<PathBuf>,
1176 #[arg(long)]
1178 target: Vec<String>,
1179 #[arg(long, default_value = "reviewer:manual")]
1181 deposited_by: String,
1182 #[arg(long, default_value = "clinical trial record import")]
1184 reason: String,
1185 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1187 license: String,
1188 #[arg(long)]
1190 json: bool,
1191 },
1192 NegativeResultAdd {
1200 frontier: PathBuf,
1202 #[arg(long)]
1204 kind: String,
1205 #[arg(long)]
1207 deposited_by: String,
1208 #[arg(long)]
1210 reason: String,
1211 #[arg(long)]
1214 conditions_text: String,
1215 #[arg(long, default_value = "")]
1217 notes: String,
1218 #[arg(long)]
1221 target: Vec<String>,
1222 #[arg(long)]
1226 endpoint: Option<String>,
1227 #[arg(long)]
1229 intervention: Option<String>,
1230 #[arg(long)]
1232 comparator: Option<String>,
1233 #[arg(long)]
1235 population: Option<String>,
1236 #[arg(long)]
1238 n_enrolled: Option<u32>,
1239 #[arg(long)]
1241 power: Option<f64>,
1242 #[arg(long)]
1244 ci_lower: Option<f64>,
1245 #[arg(long)]
1247 ci_upper: Option<f64>,
1248 #[arg(long)]
1250 effect_size_threshold: Option<f64>,
1251 #[arg(long)]
1253 registry_id: Option<String>,
1254 #[arg(long)]
1257 reagent: Option<String>,
1258 #[arg(long)]
1260 observation: Option<String>,
1261 #[arg(long)]
1263 attempts: Option<u32>,
1264 #[arg(long)]
1267 source_title: String,
1268 #[arg(long)]
1270 doi: Option<String>,
1271 #[arg(long)]
1273 url: Option<String>,
1274 #[arg(long)]
1276 year: Option<i32>,
1277 #[arg(long)]
1279 json: bool,
1280 },
1281 NegativeResults {
1283 frontier: PathBuf,
1284 #[arg(long)]
1286 target: Option<String>,
1287 #[arg(long)]
1288 json: bool,
1289 },
1290 TrajectoryCreate {
1295 frontier: PathBuf,
1297 #[arg(long)]
1299 deposited_by: String,
1300 #[arg(long)]
1302 reason: String,
1303 #[arg(long)]
1308 target: Vec<String>,
1309 #[arg(long, default_value = "")]
1311 notes: String,
1312 #[arg(long)]
1313 json: bool,
1314 },
1315 TrajectoryStep {
1318 frontier: PathBuf,
1320 trajectory_id: String,
1322 #[arg(long)]
1324 kind: String,
1325 #[arg(long)]
1329 description: String,
1330 #[arg(long)]
1332 actor: String,
1333 #[arg(long)]
1335 reason: String,
1336 #[arg(long)]
1339 reference: Vec<String>,
1340 #[arg(long)]
1341 json: bool,
1342 },
1343 Trajectories {
1345 frontier: PathBuf,
1346 #[arg(long)]
1348 target: Option<String>,
1349 #[arg(long)]
1350 json: bool,
1351 },
1352 TierSet {
1358 frontier: PathBuf,
1360 #[arg(long)]
1362 object_type: String,
1363 #[arg(long)]
1365 object_id: String,
1366 #[arg(long)]
1368 tier: String,
1369 #[arg(long)]
1372 actor: String,
1373 #[arg(long)]
1376 reason: String,
1377 #[arg(long)]
1378 json: bool,
1379 },
1380 Predict {
1387 frontier: PathBuf,
1389 #[arg(long)]
1391 by: String,
1392 #[arg(long)]
1395 claim: String,
1396 #[arg(long)]
1398 criterion: String,
1399 #[arg(long)]
1401 resolves_by: Option<String>,
1402 #[arg(long)]
1404 confidence: f64,
1405 #[arg(long, default_value = "")]
1407 target: String,
1408 #[arg(long, default_value = "affirmed")]
1410 outcome: String,
1411 #[arg(long, default_value = "")]
1413 conditions: String,
1414 #[arg(long)]
1416 json: bool,
1417 },
1418 Resolve {
1423 frontier: PathBuf,
1425 prediction: String,
1427 #[arg(long)]
1429 outcome: String,
1430 #[arg(long)]
1432 matched: bool,
1433 #[arg(long)]
1436 by: String,
1437 #[arg(long, default_value = "1.0")]
1439 confidence: f64,
1440 #[arg(long, default_value = "")]
1442 source_title: String,
1443 #[arg(long)]
1445 doi: Option<String>,
1446 #[arg(long)]
1448 json: bool,
1449 },
1450 Predictions {
1452 frontier: PathBuf,
1453 #[arg(long)]
1455 by: Option<String>,
1456 #[arg(long)]
1458 open: bool,
1459 #[arg(long)]
1461 json: bool,
1462 },
1463 Calibration {
1466 frontier: PathBuf,
1467 #[arg(long)]
1469 actor: Option<String>,
1470 #[arg(long)]
1472 json: bool,
1473 },
1474 PredictionsExpire {
1482 frontier: PathBuf,
1483 #[arg(long)]
1486 now: Option<String>,
1487 #[arg(long)]
1490 dry_run: bool,
1491 #[arg(long)]
1492 json: bool,
1493 },
1494 Consensus {
1503 frontier: PathBuf,
1505 target: String,
1507 #[arg(long, default_value = "composite")]
1510 weighting: String,
1511 #[arg(long)]
1516 causal_claim: Option<String>,
1517 #[arg(long)]
1522 causal_grade_min: Option<String>,
1523 #[arg(long)]
1525 json: bool,
1526 },
1527
1528 Ingest {
1544 path: String,
1547 #[arg(long)]
1550 frontier: PathBuf,
1551 #[arg(short, long)]
1555 backend: Option<String>,
1556 #[arg(long)]
1560 actor: Option<String>,
1561 #[arg(long)]
1563 dry_run: bool,
1564 #[arg(long)]
1565 json: bool,
1566 },
1567
1568 Propose {
1574 frontier: PathBuf,
1575 finding_id: String,
1576 #[arg(long)]
1578 status: String,
1579 #[arg(long)]
1580 reason: String,
1581 #[arg(long)]
1582 reviewer: String,
1583 #[arg(long)]
1586 apply: bool,
1587 #[arg(long)]
1588 json: bool,
1589 },
1590
1591 Accept {
1595 frontier: PathBuf,
1596 proposal_id: String,
1597 #[arg(long)]
1598 reviewer: String,
1599 #[arg(long)]
1600 reason: String,
1601 #[arg(long)]
1602 json: bool,
1603 },
1604
1605 Attest {
1610 frontier: PathBuf,
1611 #[arg(long)]
1612 key: PathBuf,
1613 #[arg(long)]
1614 json: bool,
1615 },
1616
1617 Lineage {
1620 frontier: PathBuf,
1621 finding_id: String,
1622 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1623 as_of: Option<String>,
1624 #[arg(long)]
1625 json: bool,
1626 },
1627
1628 Carina {
1631 #[command(subcommand)]
1632 action: CarinaAction,
1633 },
1634}
1635
1636#[derive(Subcommand)]
1640enum CarinaAction {
1641 Validate {
1646 path: PathBuf,
1650 #[arg(long)]
1653 primitive: Option<String>,
1654 #[arg(long)]
1655 json: bool,
1656 },
1657 List {
1659 #[arg(long)]
1660 json: bool,
1661 },
1662 Schema { primitive: String },
1664}
1665
1666#[derive(Subcommand)]
1667enum PacketAction {
1668 Inspect {
1670 path: PathBuf,
1671 #[arg(long)]
1672 json: bool,
1673 },
1674 Validate {
1676 path: PathBuf,
1677 #[arg(long)]
1678 json: bool,
1679 },
1680}
1681
1682#[derive(Subcommand)]
1683enum SignAction {
1684 GenerateKeypair {
1686 #[arg(long, default_value = ".vela/keys")]
1687 out: PathBuf,
1688 #[arg(long)]
1689 json: bool,
1690 },
1691 Apply {
1693 frontier: PathBuf,
1694 #[arg(long)]
1695 private_key: PathBuf,
1696 #[arg(long)]
1697 json: bool,
1698 },
1699 Verify {
1701 frontier: PathBuf,
1702 #[arg(long)]
1703 public_key: Option<PathBuf>,
1704 #[arg(long)]
1705 json: bool,
1706 },
1707 ThresholdSet {
1712 frontier: PathBuf,
1713 finding_id: String,
1715 #[arg(long)]
1717 to: u32,
1718 #[arg(long)]
1719 json: bool,
1720 },
1721}
1722
1723#[derive(Subcommand)]
1724enum ActorAction {
1725 Add {
1727 frontier: PathBuf,
1728 id: String,
1730 #[arg(long)]
1732 pubkey: String,
1733 #[arg(long)]
1737 tier: Option<String>,
1738 #[arg(long)]
1742 orcid: Option<String>,
1743 #[arg(long)]
1748 clearance: Option<String>,
1749 #[arg(long)]
1750 json: bool,
1751 },
1752 List {
1754 frontier: PathBuf,
1755 #[arg(long)]
1756 json: bool,
1757 },
1758}
1759
1760#[derive(Subcommand)]
1761enum CausalAction {
1762 Audit {
1766 frontier: PathBuf,
1767 #[arg(long)]
1770 problems_only: bool,
1771 #[arg(long)]
1772 json: bool,
1773 },
1774 Effect {
1787 frontier: PathBuf,
1788 source: String,
1790 #[arg(long)]
1792 on: String,
1793 #[arg(long)]
1794 json: bool,
1795 },
1796 Graph {
1799 frontier: PathBuf,
1800 #[arg(long)]
1802 node: Option<String>,
1803 #[arg(long)]
1804 json: bool,
1805 },
1806 Counterfactual {
1813 frontier: PathBuf,
1814 intervene_on: String,
1816 #[arg(long)]
1818 set_to: f64,
1819 #[arg(long)]
1821 target: String,
1822 #[arg(long)]
1823 json: bool,
1824 },
1825}
1826
1827#[derive(Subcommand)]
1828enum BridgesAction {
1829 Derive {
1833 frontier_a: PathBuf,
1836 #[arg(long, default_value = "a")]
1838 label_a: String,
1839 frontier_b: PathBuf,
1841 #[arg(long, default_value = "b")]
1843 label_b: String,
1844 #[arg(long)]
1845 json: bool,
1846 },
1847 List {
1849 frontier: PathBuf,
1851 #[arg(long)]
1853 status: Option<String>,
1854 #[arg(long)]
1855 json: bool,
1856 },
1857 Show {
1859 frontier: PathBuf,
1860 bridge_id: String,
1861 #[arg(long)]
1862 json: bool,
1863 },
1864 Confirm {
1869 frontier: PathBuf,
1870 bridge_id: String,
1871 #[arg(long)]
1874 reviewer: Option<String>,
1875 #[arg(long)]
1877 note: Option<String>,
1878 #[arg(long)]
1879 json: bool,
1880 },
1881 Refute {
1884 frontier: PathBuf,
1885 bridge_id: String,
1886 #[arg(long)]
1887 reviewer: Option<String>,
1888 #[arg(long)]
1889 note: Option<String>,
1890 #[arg(long)]
1891 json: bool,
1892 },
1893}
1894
1895#[derive(Subcommand)]
1896enum FederationAction {
1897 PeerAdd {
1901 frontier: PathBuf,
1902 id: String,
1904 #[arg(long)]
1906 url: String,
1907 #[arg(long)]
1909 pubkey: String,
1910 #[arg(long, default_value = "")]
1912 note: String,
1913 #[arg(long)]
1914 json: bool,
1915 },
1916 PeerList {
1918 frontier: PathBuf,
1919 #[arg(long)]
1920 json: bool,
1921 },
1922 PeerRemove {
1926 frontier: PathBuf,
1927 id: String,
1928 #[arg(long)]
1929 json: bool,
1930 },
1931 Sync {
1948 frontier: PathBuf,
1949 peer_id: String,
1951 #[arg(long)]
1953 url: Option<String>,
1954 #[arg(long)]
1958 via_hub: bool,
1959 #[arg(long)]
1962 vfr_id: Option<String>,
1963 #[arg(long)]
1970 allow_cross_vfr: bool,
1971 #[arg(long)]
1973 dry_run: bool,
1974 #[arg(long)]
1975 json: bool,
1976 },
1977 PushResolution {
1990 frontier: PathBuf,
1991 conflict_event_id: String,
1995 #[arg(long = "to")]
1997 to: String,
1998 #[arg(long)]
2002 key: Option<PathBuf>,
2003 #[arg(long)]
2006 vfr_id: Option<String>,
2007 #[arg(long)]
2008 json: bool,
2009 },
2010}
2011
2012#[derive(Subcommand)]
2013enum FrontierAction {
2014 New {
2021 path: PathBuf,
2023 #[arg(long)]
2025 name: String,
2026 #[arg(long, default_value = "")]
2028 description: String,
2029 #[arg(long)]
2031 force: bool,
2032 #[arg(long)]
2033 json: bool,
2034 },
2035 Materialize {
2037 frontier: PathBuf,
2039 #[arg(long)]
2040 json: bool,
2041 },
2042 AddDep {
2046 frontier: PathBuf,
2048 vfr_id: String,
2050 #[arg(long)]
2053 locator: String,
2054 #[arg(long)]
2057 snapshot: String,
2058 #[arg(long)]
2060 name: Option<String>,
2061 #[arg(long)]
2062 json: bool,
2063 },
2064 ListDeps {
2066 frontier: PathBuf,
2067 #[arg(long)]
2068 json: bool,
2069 },
2070 RemoveDep {
2073 frontier: PathBuf,
2074 vfr_id: String,
2075 #[arg(long)]
2076 json: bool,
2077 },
2078 RefreshDeps {
2085 frontier: PathBuf,
2086 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2088 from: String,
2089 #[arg(long)]
2091 dry_run: bool,
2092 #[arg(long)]
2093 json: bool,
2094 },
2095 Diff {
2107 frontier: PathBuf,
2109 #[arg(long)]
2112 since: Option<String>,
2113 #[arg(long)]
2116 week: Option<String>,
2117 #[arg(long)]
2119 json: bool,
2120 },
2121}
2122
2123#[derive(Subcommand)]
2124enum RepoAction {
2125 Status {
2127 frontier: PathBuf,
2129 #[arg(long)]
2131 json: bool,
2132 },
2133 Doctor {
2135 frontier: PathBuf,
2137 #[arg(long)]
2139 json: bool,
2140 },
2141}
2142
2143#[derive(Subcommand)]
2144enum QueueAction {
2145 List {
2147 #[arg(long)]
2148 queue_file: Option<PathBuf>,
2149 #[arg(long)]
2150 json: bool,
2151 },
2152 Sign {
2155 #[arg(long)]
2157 actor: String,
2158 #[arg(long)]
2160 key: PathBuf,
2161 #[arg(long)]
2163 queue_file: Option<PathBuf>,
2164 #[arg(long, alias = "all")]
2170 yes_to_all: bool,
2171 #[arg(long)]
2172 json: bool,
2173 },
2174 Clear {
2176 #[arg(long)]
2177 queue_file: Option<PathBuf>,
2178 #[arg(long)]
2179 json: bool,
2180 },
2181}
2182
2183#[derive(Subcommand)]
2184enum RegistryAction {
2185 List {
2187 #[arg(long)]
2189 from: Option<String>,
2190 #[arg(long)]
2191 json: bool,
2192 },
2193 Publish {
2195 frontier: PathBuf,
2197 #[arg(long)]
2199 owner: String,
2200 #[arg(long)]
2202 key: PathBuf,
2203 #[arg(long)]
2210 locator: Option<String>,
2211 #[arg(long)]
2213 to: Option<String>,
2214 #[arg(long)]
2215 json: bool,
2216 },
2217 DependsOn {
2224 vfr_id: String,
2226 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2228 from: String,
2229 #[arg(long)]
2230 json: bool,
2231 },
2232 Mirror {
2240 vfr_id: String,
2242 #[arg(long)]
2244 from: String,
2245 #[arg(long)]
2247 to: String,
2248 #[arg(long)]
2249 json: bool,
2250 },
2251 Pull {
2253 vfr_id: String,
2255 #[arg(long)]
2257 from: Option<String>,
2258 #[arg(long)]
2262 out: PathBuf,
2263 #[arg(long)]
2266 transitive: bool,
2267 #[arg(long, default_value = "4")]
2270 depth: usize,
2271 #[arg(long)]
2272 json: bool,
2273 },
2274}
2275
2276#[derive(Subcommand)]
2277enum GapsAction {
2278 Rank {
2280 frontier: PathBuf,
2281 #[arg(long, default_value = "10")]
2282 top: usize,
2283 #[arg(long)]
2284 domain: Option<String>,
2285 #[arg(long)]
2286 json: bool,
2287 },
2288}
2289
2290#[derive(Subcommand)]
2291enum LinkAction {
2292 Add {
2297 frontier: PathBuf,
2299 #[arg(long)]
2301 from: String,
2302 #[arg(long)]
2304 to: String,
2305 #[arg(long, default_value = "supports")]
2307 r#type: String,
2308 #[arg(long, default_value = "")]
2310 note: String,
2311 #[arg(long, default_value = "reviewer")]
2313 inferred_by: String,
2314 #[arg(long)]
2323 no_check_target: bool,
2324 #[arg(long)]
2325 json: bool,
2326 },
2327}
2328
2329#[derive(Subcommand)]
2330enum EntityAction {
2331 Resolve {
2338 frontier: PathBuf,
2339 #[arg(long)]
2341 force: bool,
2342 #[arg(long)]
2343 json: bool,
2344 },
2345 List {
2347 #[arg(long)]
2348 json: bool,
2349 },
2350}
2351
2352#[derive(Subcommand)]
2353enum FindingCommands {
2354 Add {
2356 frontier: PathBuf,
2358 #[arg(long)]
2360 assertion: String,
2361 #[arg(long, default_value = "mechanism")]
2363 r#type: String,
2364 #[arg(long, default_value = "manual finding")]
2366 source: String,
2367 #[arg(long, default_value = "expert_assertion")]
2369 source_type: String,
2370 #[arg(long)]
2372 author: String,
2373 #[arg(long, default_value = "0.3")]
2375 confidence: f64,
2376 #[arg(long, default_value = "theoretical")]
2378 evidence_type: String,
2379 #[arg(long, default_value = "")]
2381 entities: String,
2382 #[arg(long)]
2384 entities_reviewed: bool,
2385 #[arg(long)]
2387 evidence_span: Vec<String>,
2388 #[arg(long)]
2390 gap: bool,
2391 #[arg(long)]
2393 negative_space: bool,
2394 #[arg(long)]
2396 doi: Option<String>,
2397 #[arg(long)]
2399 pmid: Option<String>,
2400 #[arg(long)]
2402 year: Option<i32>,
2403 #[arg(long)]
2405 journal: Option<String>,
2406 #[arg(long)]
2408 url: Option<String>,
2409 #[arg(long)]
2411 source_authors: Option<String>,
2412 #[arg(long)]
2414 conditions_text: Option<String>,
2415 #[arg(long)]
2417 species: Option<String>,
2418 #[arg(long)]
2420 in_vivo: bool,
2421 #[arg(long)]
2423 in_vitro: bool,
2424 #[arg(long)]
2426 human_data: bool,
2427 #[arg(long)]
2429 clinical_trial: bool,
2430 #[arg(long)]
2432 json: bool,
2433 #[arg(long)]
2435 apply: bool,
2436 },
2437 Supersede {
2444 frontier: PathBuf,
2446 old_id: String,
2448 #[arg(long)]
2450 assertion: String,
2451 #[arg(long, default_value = "mechanism")]
2453 r#type: String,
2454 #[arg(long, default_value = "manual finding")]
2456 source: String,
2457 #[arg(long, default_value = "expert_assertion")]
2459 source_type: String,
2460 #[arg(long)]
2462 author: String,
2463 #[arg(long)]
2465 reason: String,
2466 #[arg(long, default_value = "0.5")]
2468 confidence: f64,
2469 #[arg(long, default_value = "experimental")]
2471 evidence_type: String,
2472 #[arg(long, default_value = "")]
2474 entities: String,
2475 #[arg(long)]
2477 doi: Option<String>,
2478 #[arg(long)]
2480 pmid: Option<String>,
2481 #[arg(long)]
2483 year: Option<i32>,
2484 #[arg(long)]
2486 journal: Option<String>,
2487 #[arg(long)]
2489 url: Option<String>,
2490 #[arg(long)]
2492 source_authors: Option<String>,
2493 #[arg(long)]
2495 conditions_text: Option<String>,
2496 #[arg(long)]
2498 species: Option<String>,
2499 #[arg(long)]
2500 in_vivo: bool,
2501 #[arg(long)]
2502 in_vitro: bool,
2503 #[arg(long)]
2504 human_data: bool,
2505 #[arg(long)]
2506 clinical_trial: bool,
2507 #[arg(long)]
2508 json: bool,
2509 #[arg(long)]
2511 apply: bool,
2512 },
2513 CausalSet {
2519 frontier: PathBuf,
2521 finding_id: String,
2523 #[arg(long)]
2525 claim: String,
2526 #[arg(long)]
2529 grade: Option<String>,
2530 #[arg(long)]
2533 actor: String,
2534 #[arg(long)]
2537 reason: String,
2538 #[arg(long)]
2539 json: bool,
2540 },
2541}
2542
2543#[derive(Subcommand)]
2544enum ProposalAction {
2545 List {
2547 frontier: PathBuf,
2548 #[arg(long)]
2549 status: Option<String>,
2550 #[arg(long)]
2551 json: bool,
2552 },
2553 Show {
2555 frontier: PathBuf,
2556 proposal_id: String,
2557 #[arg(long)]
2558 json: bool,
2559 },
2560 Preview {
2562 frontier: PathBuf,
2563 proposal_id: String,
2564 #[arg(long, default_value = "reviewer:preview")]
2565 reviewer: String,
2566 #[arg(long)]
2567 json: bool,
2568 },
2569 Import {
2571 frontier: PathBuf,
2572 source: PathBuf,
2573 #[arg(long)]
2574 json: bool,
2575 },
2576 Validate {
2578 source: PathBuf,
2579 #[arg(long)]
2580 json: bool,
2581 },
2582 Export {
2584 frontier: PathBuf,
2585 output: PathBuf,
2586 #[arg(long)]
2587 status: Option<String>,
2588 #[arg(long)]
2589 json: bool,
2590 },
2591 Accept {
2593 frontier: PathBuf,
2594 proposal_id: String,
2595 #[arg(long)]
2596 reviewer: String,
2597 #[arg(long)]
2598 reason: String,
2599 #[arg(long)]
2600 json: bool,
2601 },
2602 Reject {
2604 frontier: PathBuf,
2605 proposal_id: String,
2606 #[arg(long)]
2607 reviewer: String,
2608 #[arg(long)]
2609 reason: String,
2610 #[arg(long)]
2611 json: bool,
2612 },
2613}
2614
2615#[derive(Subcommand)]
2616enum SourceAdapterAction {
2617 Run {
2619 frontier: PathBuf,
2621 adapter: String,
2623 #[arg(long)]
2625 actor: String,
2626 #[arg(long = "entry")]
2628 entries: Vec<String>,
2629 #[arg(long)]
2631 priority: Option<String>,
2632 #[arg(long)]
2634 include_excluded: bool,
2635 #[arg(long)]
2637 allow_partial: bool,
2638 #[arg(long)]
2640 dry_run: bool,
2641 #[arg(long)]
2643 input_dir: Option<PathBuf>,
2644 #[arg(long)]
2646 apply_artifacts: bool,
2647 #[arg(long)]
2649 json: bool,
2650 },
2651}
2652
2653#[derive(Subcommand)]
2654enum RuntimeAdapterAction {
2655 Run {
2657 frontier: PathBuf,
2659 adapter: String,
2661 #[arg(long)]
2663 input: PathBuf,
2664 #[arg(long)]
2666 actor: String,
2667 #[arg(long)]
2669 dry_run: bool,
2670 #[arg(long)]
2672 apply_artifacts: bool,
2673 #[arg(long)]
2675 json: bool,
2676 },
2677}
2678
2679#[derive(Subcommand)]
2680enum BridgeKitAction {
2681 Validate {
2683 source: PathBuf,
2685 #[arg(long)]
2687 json: bool,
2688 },
2689}
2690
2691pub async fn run_command() {
2692 dotenvy::dotenv().ok();
2693
2694 match Cli::parse().command {
2695 Commands::Scout {
2696 folder,
2697 frontier,
2698 backend,
2699 dry_run,
2700 json,
2701 } => {
2702 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
2703 }
2704 Commands::CompileNotes {
2705 vault,
2706 frontier,
2707 backend,
2708 max_files,
2709 max_items_per_category,
2710 dry_run,
2711 json,
2712 } => {
2713 cmd_compile_notes(
2714 &vault,
2715 &frontier,
2716 backend.as_deref(),
2717 max_files,
2718 max_items_per_category,
2719 dry_run,
2720 json,
2721 )
2722 .await;
2723 }
2724 Commands::CompileCode {
2725 root,
2726 frontier,
2727 backend,
2728 max_files,
2729 dry_run,
2730 json,
2731 } => {
2732 cmd_compile_code(
2733 &root,
2734 &frontier,
2735 backend.as_deref(),
2736 max_files,
2737 dry_run,
2738 json,
2739 )
2740 .await;
2741 }
2742 Commands::CompileData {
2743 root,
2744 frontier,
2745 backend,
2746 sample_rows,
2747 dry_run,
2748 json,
2749 } => {
2750 cmd_compile_data(
2751 &root,
2752 &frontier,
2753 backend.as_deref(),
2754 sample_rows,
2755 dry_run,
2756 json,
2757 )
2758 .await;
2759 }
2760 Commands::ReviewPending {
2761 frontier,
2762 backend,
2763 max_proposals,
2764 batch_size,
2765 dry_run,
2766 json,
2767 } => {
2768 cmd_review_pending(
2769 &frontier,
2770 backend.as_deref(),
2771 max_proposals,
2772 batch_size,
2773 dry_run,
2774 json,
2775 )
2776 .await;
2777 }
2778 Commands::FindTensions {
2779 frontier,
2780 backend,
2781 max_findings,
2782 dry_run,
2783 json,
2784 } => {
2785 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
2786 }
2787 Commands::PlanExperiments {
2788 frontier,
2789 backend,
2790 max_findings,
2791 dry_run,
2792 json,
2793 } => {
2794 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
2795 }
2796 Commands::Check {
2797 source,
2798 schema,
2799 stats,
2800 conformance,
2801 conformance_dir,
2802 all,
2803 schema_only,
2804 strict,
2805 fix,
2806 json,
2807 } => cmd_check(
2808 source.as_deref(),
2809 schema,
2810 stats,
2811 conformance,
2812 &conformance_dir,
2813 all,
2814 schema_only,
2815 strict,
2816 fix,
2817 json,
2818 ),
2819 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
2820 Commands::Impact {
2821 frontier,
2822 finding_id,
2823 depth,
2824 json,
2825 } => cmd_impact(&frontier, &finding_id, depth, json),
2826 Commands::Normalize {
2827 source,
2828 out,
2829 write,
2830 dry_run,
2831 rewrite_ids,
2832 id_map,
2833 resync_provenance,
2834 json,
2835 } => cmd_normalize(
2836 &source,
2837 out.as_deref(),
2838 write,
2839 dry_run,
2840 rewrite_ids,
2841 id_map.as_deref(),
2842 resync_provenance,
2843 json,
2844 ),
2845 Commands::Proof {
2846 frontier,
2847 out,
2848 template,
2849 gold,
2850 record_proof_state,
2851 json,
2852 } => cmd_proof(
2853 &frontier,
2854 &out,
2855 &template,
2856 gold.as_deref(),
2857 record_proof_state,
2858 json,
2859 ),
2860 Commands::Repo { action } => cmd_repo(action),
2861 Commands::Serve {
2862 frontier,
2863 frontiers,
2864 backend,
2865 http,
2866 setup,
2867 check_tools,
2868 json,
2869 workbench,
2870 } => {
2871 if setup {
2872 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
2873 } else if check_tools {
2874 let source =
2875 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
2876 match serve::check_tools(source) {
2877 Ok(report) => {
2878 if json {
2879 println!(
2880 "{}",
2881 serde_json::to_string_pretty(&report)
2882 .expect("failed to serialize tool check report")
2883 );
2884 } else {
2885 print_tool_check_report(&report);
2886 }
2887 }
2888 Err(e) => fail(&format!("Tool check failed: {e}")),
2889 }
2890 } else {
2891 let source =
2892 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
2893 let resolved_port = if workbench {
2895 Some(http.unwrap_or(3848))
2896 } else {
2897 http
2898 };
2899 if let Some(port) = resolved_port {
2900 serve::run_http(source, backend.as_deref(), port, workbench).await;
2901 } else {
2902 serve::run(source, backend.as_deref()).await;
2903 }
2904 }
2905 }
2906 Commands::Status { frontier, json } => cmd_status(&frontier, json),
2907 Commands::Log {
2908 frontier,
2909 limit,
2910 kind,
2911 json,
2912 } => cmd_log(&frontier, limit, kind.as_deref(), json),
2913 Commands::Inbox {
2914 frontier,
2915 kind,
2916 limit,
2917 json,
2918 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
2919 Commands::Ask {
2920 frontier,
2921 question,
2922 json,
2923 } => cmd_ask(&frontier, &question.join(" "), json),
2924 Commands::Stats { frontier, json } => {
2925 if json {
2926 print_stats_json(&frontier);
2927 } else {
2928 cmd_stats(&frontier);
2929 }
2930 }
2931 Commands::Search {
2932 source,
2933 query,
2934 entity,
2935 r#type,
2936 all,
2937 limit,
2938 json,
2939 } => cmd_search(
2940 source.as_deref(),
2941 &query,
2942 entity.as_deref(),
2943 r#type.as_deref(),
2944 all.as_deref(),
2945 limit,
2946 json,
2947 ),
2948 Commands::Tensions {
2949 source,
2950 both_high,
2951 cross_domain,
2952 top,
2953 json,
2954 } => cmd_tensions(&source, both_high, cross_domain, top, json),
2955 Commands::Gaps { action } => cmd_gaps(action),
2956 Commands::Bridge {
2957 inputs,
2958 novelty,
2959 top,
2960 } => cmd_bridge(&inputs, novelty, top).await,
2961 Commands::Export {
2962 frontier,
2963 format,
2964 output,
2965 } => export::run(&frontier, &format, output.as_deref()),
2966 Commands::Packet { action } => cmd_packet(action),
2967 Commands::Verify { path, json } => cmd_verify(&path, json),
2968 Commands::Bench {
2969 frontier,
2970 gold,
2971 candidate,
2972 sources,
2973 threshold,
2974 report,
2975 entity_gold,
2976 link_gold,
2977 suite,
2978 suite_ready,
2979 min_f1,
2980 min_precision,
2981 min_recall,
2982 no_thresholds,
2983 json,
2984 } => {
2985 if let Some(cand) = candidate.clone() {
2990 let Some(g) = gold.clone() else {
2991 eprintln!(
2992 "{} `vela bench --candidate <…>` requires `--gold <…>`",
2993 style::err_prefix()
2994 );
2995 std::process::exit(2);
2996 };
2997 cmd_agent_bench(
2998 &g,
2999 &cand,
3000 sources.as_deref(),
3001 threshold,
3002 report.as_deref(),
3003 json,
3004 );
3005 } else {
3006 cmd_bench(BenchArgs {
3007 frontier,
3008 gold,
3009 entity_gold,
3010 link_gold,
3011 suite,
3012 suite_ready,
3013 min_f1,
3014 min_precision,
3015 min_recall,
3016 no_thresholds,
3017 json,
3018 });
3019 }
3020 }
3021 Commands::Conformance { dir } => {
3022 let _ = conformance::run(&dir);
3023 }
3024 Commands::Version => println!("vela 0.36.0"),
3025 Commands::Sign { action } => cmd_sign(action),
3026 Commands::Actor { action } => cmd_actor(action),
3027 Commands::Federation { action } => cmd_federation(action),
3028 Commands::Causal { action } => cmd_causal(action),
3029 Commands::Frontier { action } => cmd_frontier(action),
3030 Commands::Queue { action } => cmd_queue(action),
3031 Commands::Registry { action } => cmd_registry(action),
3032 Commands::Init {
3033 path,
3034 name,
3035 template,
3036 no_git,
3037 json,
3038 } => cmd_init(&path, &name, &template, !no_git, json),
3039 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3040 Commands::Diff {
3041 target,
3042 frontier_b,
3043 frontier,
3044 reviewer,
3045 json,
3046 quiet,
3047 } => {
3048 if target.starts_with("vpr_") {
3053 let frontier_root = frontier
3054 .clone()
3055 .or_else(|| frontier_b.clone())
3056 .unwrap_or_else(|| std::path::PathBuf::from("."));
3057 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3058 .unwrap_or_else(|e| fail_return(&e));
3059 let payload = json!({
3060 "ok": true,
3061 "command": "diff.proposal",
3062 "frontier": frontier_root.display().to_string(),
3063 "proposal_id": target,
3064 "preview": preview,
3065 });
3066 if json {
3067 println!(
3068 "{}",
3069 serde_json::to_string_pretty(&payload)
3070 .expect("failed to serialize diff preview")
3071 );
3072 } else {
3073 println!("vela diff · proposal preview");
3074 println!(" proposal: {}", target);
3075 println!(" kind: {}", preview.kind);
3076 println!(
3077 " findings: {} -> {}",
3078 preview.findings_before, preview.findings_after
3079 );
3080 println!(
3081 " artifacts: {} -> {}",
3082 preview.artifacts_before, preview.artifacts_after
3083 );
3084 println!(
3085 " events: {} -> {}",
3086 preview.events_before, preview.events_after
3087 );
3088 if !preview.changed_findings.is_empty() {
3089 println!(
3090 " findings changed: {}",
3091 preview.changed_findings.join(", ")
3092 );
3093 }
3094 }
3095 } else {
3096 let frontier_a = std::path::PathBuf::from(&target);
3097 let b = frontier_b.unwrap_or_else(|| {
3098 fail_return(
3099 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3100 )
3101 });
3102 diff::run(&frontier_a, &b, json, quiet);
3103 }
3104 }
3105 Commands::Proposals { action } => cmd_proposals(action),
3106 Commands::ArtifactToState {
3107 frontier,
3108 packet,
3109 actor,
3110 apply_artifacts,
3111 json,
3112 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3113 Commands::BridgeKit { action } => cmd_bridge_kit(action),
3114 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3115 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3116 Commands::Link { action } => cmd_link(action),
3117 Commands::Workbench {
3118 path,
3119 port,
3120 no_open,
3121 } => {
3122 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3123 fail(&e);
3124 }
3125 }
3126 Commands::Bridges { action } => cmd_bridges(action),
3127 Commands::Entity { action } => cmd_entity(action),
3128 Commands::Finding { command } => match command {
3129 FindingCommands::Add {
3130 frontier,
3131 assertion,
3132 r#type,
3133 source,
3134 source_type,
3135 author,
3136 confidence,
3137 evidence_type,
3138 entities,
3139 entities_reviewed,
3140 evidence_span,
3141 gap,
3142 negative_space,
3143 doi,
3144 pmid,
3145 year,
3146 journal,
3147 url,
3148 source_authors,
3149 conditions_text,
3150 species,
3151 in_vivo,
3152 in_vitro,
3153 human_data,
3154 clinical_trial,
3155 json,
3156 apply,
3157 } => {
3158 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3159 validate_enum_arg(
3160 "--evidence-type",
3161 &evidence_type,
3162 bundle::VALID_EVIDENCE_TYPES,
3163 );
3164 validate_enum_arg(
3165 "--source-type",
3166 &source_type,
3167 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3168 );
3169 let parsed_entities = parse_entities(&entities);
3170 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3171 for (name, etype) in &parsed_entities {
3172 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3173 fail(&format!(
3174 "invalid entity type '{}' for '{}'. Valid: {}",
3175 etype,
3176 name,
3177 bundle::VALID_ENTITY_TYPES.join(", "),
3178 ));
3179 }
3180 }
3181 let parsed_source_authors = source_authors
3182 .map(|s| {
3183 s.split(';')
3184 .map(|a| a.trim().to_string())
3185 .filter(|a| !a.is_empty())
3186 .collect()
3187 })
3188 .unwrap_or_default();
3189 let parsed_species = species
3190 .map(|s| {
3191 s.split(';')
3192 .map(|a| a.trim().to_string())
3193 .filter(|a| !a.is_empty())
3194 .collect()
3195 })
3196 .unwrap_or_default();
3197 let report = state::add_finding(
3198 &frontier,
3199 state::FindingDraftOptions {
3200 text: assertion,
3201 assertion_type: r#type,
3202 source,
3203 source_type,
3204 author,
3205 confidence,
3206 evidence_type,
3207 entities: parsed_entities,
3208 doi,
3209 pmid,
3210 year,
3211 journal,
3212 url,
3213 source_authors: parsed_source_authors,
3214 conditions_text,
3215 species: parsed_species,
3216 in_vivo,
3217 in_vitro,
3218 human_data,
3219 clinical_trial,
3220 entities_reviewed,
3221 evidence_spans: parsed_evidence_spans,
3222 gap,
3223 negative_space,
3224 },
3225 apply,
3226 )
3227 .unwrap_or_else(|e| fail_return(&e));
3228 print_state_report(&report, json);
3229 }
3230 FindingCommands::Supersede {
3231 frontier,
3232 old_id,
3233 assertion,
3234 r#type,
3235 source,
3236 source_type,
3237 author,
3238 reason,
3239 confidence,
3240 evidence_type,
3241 entities,
3242 doi,
3243 pmid,
3244 year,
3245 journal,
3246 url,
3247 source_authors,
3248 conditions_text,
3249 species,
3250 in_vivo,
3251 in_vitro,
3252 human_data,
3253 clinical_trial,
3254 json,
3255 apply,
3256 } => {
3257 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3258 validate_enum_arg(
3259 "--evidence-type",
3260 &evidence_type,
3261 bundle::VALID_EVIDENCE_TYPES,
3262 );
3263 validate_enum_arg(
3264 "--source-type",
3265 &source_type,
3266 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3267 );
3268 let parsed_entities = parse_entities(&entities);
3269 for (name, etype) in &parsed_entities {
3270 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3271 fail(&format!(
3272 "invalid entity type '{}' for '{}'. Valid: {}",
3273 etype,
3274 name,
3275 bundle::VALID_ENTITY_TYPES.join(", "),
3276 ));
3277 }
3278 }
3279 let parsed_source_authors = source_authors
3280 .map(|s| {
3281 s.split(';')
3282 .map(|a| a.trim().to_string())
3283 .filter(|a| !a.is_empty())
3284 .collect()
3285 })
3286 .unwrap_or_default();
3287 let parsed_species = species
3288 .map(|s| {
3289 s.split(';')
3290 .map(|a| a.trim().to_string())
3291 .filter(|a| !a.is_empty())
3292 .collect()
3293 })
3294 .unwrap_or_default();
3295 let report = state::supersede_finding(
3296 &frontier,
3297 &old_id,
3298 &reason,
3299 state::FindingDraftOptions {
3300 text: assertion,
3301 assertion_type: r#type,
3302 source,
3303 source_type,
3304 author,
3305 confidence,
3306 evidence_type,
3307 entities: parsed_entities,
3308 doi,
3309 pmid,
3310 year,
3311 journal,
3312 url,
3313 source_authors: parsed_source_authors,
3314 conditions_text,
3315 species: parsed_species,
3316 in_vivo,
3317 in_vitro,
3318 human_data,
3319 clinical_trial,
3320 entities_reviewed: false,
3321 evidence_spans: Vec::new(),
3322 gap: false,
3323 negative_space: false,
3324 },
3325 apply,
3326 )
3327 .unwrap_or_else(|e| fail_return(&e));
3328 print_state_report(&report, json);
3329 }
3330 FindingCommands::CausalSet {
3331 frontier,
3332 finding_id,
3333 claim,
3334 grade,
3335 actor,
3336 reason,
3337 json,
3338 } => {
3339 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3340 fail(&format!(
3341 "invalid --claim '{claim}'; valid: {:?}",
3342 bundle::VALID_CAUSAL_CLAIMS
3343 ));
3344 }
3345 if let Some(g) = grade.as_deref()
3346 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3347 {
3348 fail(&format!(
3349 "invalid --grade '{g}'; valid: {:?}",
3350 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3351 ));
3352 }
3353 let report = state::set_causal(
3354 &frontier,
3355 &finding_id,
3356 &claim,
3357 grade.as_deref(),
3358 &actor,
3359 &reason,
3360 )
3361 .unwrap_or_else(|e| fail_return(&e));
3362 print_state_report(&report, json);
3363 }
3364 },
3365 Commands::Review {
3366 frontier,
3367 finding_id,
3368 status,
3369 reason,
3370 reviewer,
3371 apply,
3372 json,
3373 } => {
3374 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3375 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3376 let report = state::review_finding(
3377 &frontier,
3378 &finding_id,
3379 state::ReviewOptions {
3380 status,
3381 reason,
3382 reviewer,
3383 },
3384 apply,
3385 )
3386 .unwrap_or_else(|e| fail_return(&e));
3387 print_state_report(&report, json);
3388 }
3389 Commands::Note {
3390 frontier,
3391 finding_id,
3392 text,
3393 author,
3394 apply,
3395 json,
3396 } => {
3397 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3398 .unwrap_or_else(|e| fail_return(&e));
3399 print_state_report(&report, json);
3400 }
3401 Commands::Caveat {
3402 frontier,
3403 finding_id,
3404 text,
3405 author,
3406 apply,
3407 json,
3408 } => {
3409 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3410 .unwrap_or_else(|e| fail_return(&e));
3411 print_state_report(&report, json);
3412 }
3413 Commands::Revise {
3414 frontier,
3415 finding_id,
3416 confidence,
3417 reason,
3418 reviewer,
3419 apply,
3420 json,
3421 } => {
3422 let report = state::revise_confidence(
3423 &frontier,
3424 &finding_id,
3425 state::ReviseOptions {
3426 confidence,
3427 reason,
3428 reviewer,
3429 },
3430 apply,
3431 )
3432 .unwrap_or_else(|e| fail_return(&e));
3433 print_state_report(&report, json);
3434 }
3435 Commands::Reject {
3436 frontier,
3437 finding_id,
3438 reason,
3439 reviewer,
3440 apply,
3441 json,
3442 } => {
3443 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3444 .unwrap_or_else(|e| fail_return(&e));
3445 print_state_report(&report, json);
3446 }
3447 Commands::History {
3448 frontier,
3449 finding_id,
3450 json,
3451 as_of,
3452 } => {
3453 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3454 .unwrap_or_else(|e| fail_return(&e));
3455 if json {
3456 println!(
3457 "{}",
3458 serde_json::to_string_pretty(&payload)
3459 .expect("failed to serialize history response")
3460 );
3461 } else {
3462 print_history(&payload);
3463 }
3464 }
3465 Commands::ImportEvents { source, into, json } => {
3466 let report =
3467 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3468 if json {
3469 println!(
3470 "{}",
3471 serde_json::to_string_pretty(&json!({
3472 "ok": true,
3473 "command": "import-events",
3474 "source": report.source,
3475 "target": into.display().to_string(),
3476 "summary": {
3477 "imported": report.imported,
3478 "new": report.new,
3479 "duplicate": report.duplicate,
3480 "canonical_events_imported": report.events_imported,
3481 "canonical_events_new": report.events_new,
3482 "canonical_events_duplicate": report.events_duplicate,
3483 }
3484 }))
3485 .expect("failed to serialize import-events response")
3486 );
3487 } else {
3488 println!("{report}");
3489 }
3490 }
3491 Commands::Retract {
3492 source,
3493 finding_id,
3494 reason,
3495 reviewer,
3496 apply,
3497 json,
3498 } => {
3499 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3500 .unwrap_or_else(|e| fail_return(&e));
3501 print_state_report(&report, json);
3502 }
3503 Commands::LocatorRepair {
3504 frontier,
3505 atom_id,
3506 locator,
3507 reviewer,
3508 reason,
3509 apply,
3510 json,
3511 } => {
3512 cmd_locator_repair(
3513 &frontier,
3514 &atom_id,
3515 locator.as_deref(),
3516 &reviewer,
3517 &reason,
3518 apply,
3519 json,
3520 );
3521 }
3522 Commands::SourceFetch {
3523 identifier,
3524 cache,
3525 out,
3526 refresh,
3527 json,
3528 } => {
3529 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3530 }
3531 Commands::SpanRepair {
3532 frontier,
3533 finding_id,
3534 section,
3535 text,
3536 reviewer,
3537 reason,
3538 apply,
3539 json,
3540 } => {
3541 cmd_span_repair(
3542 &frontier,
3543 &finding_id,
3544 §ion,
3545 &text,
3546 &reviewer,
3547 &reason,
3548 apply,
3549 json,
3550 );
3551 }
3552 Commands::EntityResolve {
3553 frontier,
3554 finding_id,
3555 entity,
3556 source,
3557 id,
3558 confidence,
3559 matched_name,
3560 resolution_method,
3561 reviewer,
3562 reason,
3563 apply,
3564 json,
3565 } => {
3566 cmd_entity_resolve(
3567 &frontier,
3568 &finding_id,
3569 &entity,
3570 &source,
3571 &id,
3572 confidence,
3573 matched_name.as_deref(),
3574 &resolution_method,
3575 &reviewer,
3576 &reason,
3577 apply,
3578 json,
3579 );
3580 }
3581 Commands::Propagate {
3582 frontier,
3583 retract,
3584 reduce_confidence,
3585 to,
3586 output,
3587 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3588 Commands::Replicate {
3589 frontier,
3590 target,
3591 outcome,
3592 by,
3593 conditions,
3594 source_title,
3595 doi,
3596 pmid,
3597 sample_size,
3598 note,
3599 previous_attempt,
3600 no_cascade,
3601 json,
3602 } => cmd_replicate(
3603 &frontier,
3604 &target,
3605 &outcome,
3606 &by,
3607 &conditions,
3608 &source_title,
3609 doi.as_deref(),
3610 pmid.as_deref(),
3611 sample_size.as_deref(),
3612 ¬e,
3613 previous_attempt.as_deref(),
3614 no_cascade,
3615 json,
3616 ),
3617 Commands::Replications {
3618 frontier,
3619 target,
3620 json,
3621 } => cmd_replications(&frontier, target.as_deref(), json),
3622 Commands::DatasetAdd {
3623 frontier,
3624 name,
3625 version,
3626 content_hash,
3627 url,
3628 license,
3629 source_title,
3630 doi,
3631 row_count,
3632 json,
3633 } => cmd_dataset_add(
3634 &frontier,
3635 &name,
3636 version.as_deref(),
3637 &content_hash,
3638 url.as_deref(),
3639 license.as_deref(),
3640 &source_title,
3641 doi.as_deref(),
3642 row_count,
3643 json,
3644 ),
3645 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
3646 Commands::CodeAdd {
3647 frontier,
3648 language,
3649 repo_url,
3650 commit,
3651 path,
3652 content_hash,
3653 line_start,
3654 line_end,
3655 entry_point,
3656 json,
3657 } => cmd_code_add(
3658 &frontier,
3659 &language,
3660 repo_url.as_deref(),
3661 commit.as_deref(),
3662 &path,
3663 &content_hash,
3664 line_start,
3665 line_end,
3666 entry_point.as_deref(),
3667 json,
3668 ),
3669 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
3670 Commands::ArtifactAdd {
3671 frontier,
3672 kind,
3673 name,
3674 file,
3675 url,
3676 content_hash,
3677 media_type,
3678 license,
3679 source_title,
3680 source_url,
3681 doi,
3682 target,
3683 metadata,
3684 access_tier,
3685 deposited_by,
3686 reason,
3687 json,
3688 } => cmd_artifact_add(
3689 &frontier,
3690 &kind,
3691 &name,
3692 file.as_deref(),
3693 url.as_deref(),
3694 content_hash.as_deref(),
3695 media_type.as_deref(),
3696 license.as_deref(),
3697 source_title.as_deref(),
3698 source_url.as_deref(),
3699 doi.as_deref(),
3700 target,
3701 metadata,
3702 &access_tier,
3703 &deposited_by,
3704 &reason,
3705 json,
3706 ),
3707 Commands::Artifacts {
3708 frontier,
3709 target,
3710 json,
3711 } => cmd_artifacts(&frontier, target.as_deref(), json),
3712 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
3713 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
3714 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
3715 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
3716 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
3717 Commands::ClinicalTrialImport {
3718 frontier,
3719 nct_id,
3720 input_json,
3721 target,
3722 deposited_by,
3723 reason,
3724 license,
3725 json,
3726 } => {
3727 cmd_clinical_trial_import(
3728 &frontier,
3729 &nct_id,
3730 input_json.as_deref(),
3731 target,
3732 &deposited_by,
3733 &reason,
3734 &license,
3735 json,
3736 )
3737 .await
3738 }
3739 Commands::NegativeResultAdd {
3740 frontier,
3741 kind,
3742 deposited_by,
3743 reason,
3744 conditions_text,
3745 notes,
3746 target,
3747 endpoint,
3748 intervention,
3749 comparator,
3750 population,
3751 n_enrolled,
3752 power,
3753 ci_lower,
3754 ci_upper,
3755 effect_size_threshold,
3756 registry_id,
3757 reagent,
3758 observation,
3759 attempts,
3760 source_title,
3761 doi,
3762 url,
3763 year,
3764 json,
3765 } => cmd_negative_result_add(
3766 &frontier,
3767 &kind,
3768 &deposited_by,
3769 &reason,
3770 &conditions_text,
3771 ¬es,
3772 target,
3773 endpoint.as_deref(),
3774 intervention.as_deref(),
3775 comparator.as_deref(),
3776 population.as_deref(),
3777 n_enrolled,
3778 power,
3779 ci_lower,
3780 ci_upper,
3781 effect_size_threshold,
3782 registry_id.as_deref(),
3783 reagent.as_deref(),
3784 observation.as_deref(),
3785 attempts,
3786 &source_title,
3787 doi.as_deref(),
3788 url.as_deref(),
3789 year,
3790 json,
3791 ),
3792 Commands::NegativeResults {
3793 frontier,
3794 target,
3795 json,
3796 } => cmd_negative_results(&frontier, target.as_deref(), json),
3797 Commands::TrajectoryCreate {
3798 frontier,
3799 deposited_by,
3800 reason,
3801 target,
3802 notes,
3803 json,
3804 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
3805 Commands::TrajectoryStep {
3806 frontier,
3807 trajectory_id,
3808 kind,
3809 description,
3810 actor,
3811 reason,
3812 reference,
3813 json,
3814 } => cmd_trajectory_step(
3815 &frontier,
3816 &trajectory_id,
3817 &kind,
3818 &description,
3819 &actor,
3820 &reason,
3821 reference,
3822 json,
3823 ),
3824 Commands::Trajectories {
3825 frontier,
3826 target,
3827 json,
3828 } => cmd_trajectories(&frontier, target.as_deref(), json),
3829 Commands::TierSet {
3830 frontier,
3831 object_type,
3832 object_id,
3833 tier,
3834 actor,
3835 reason,
3836 json,
3837 } => cmd_tier_set(
3838 &frontier,
3839 &object_type,
3840 &object_id,
3841 &tier,
3842 &actor,
3843 &reason,
3844 json,
3845 ),
3846 Commands::Predict {
3847 frontier,
3848 by,
3849 claim,
3850 criterion,
3851 resolves_by,
3852 confidence,
3853 target,
3854 outcome,
3855 conditions,
3856 json,
3857 } => cmd_predict(
3858 &frontier,
3859 &by,
3860 &claim,
3861 &criterion,
3862 resolves_by.as_deref(),
3863 confidence,
3864 &target,
3865 &outcome,
3866 &conditions,
3867 json,
3868 ),
3869 Commands::Resolve {
3870 frontier,
3871 prediction,
3872 outcome,
3873 matched,
3874 by,
3875 confidence,
3876 source_title,
3877 doi,
3878 json,
3879 } => cmd_resolve(
3880 &frontier,
3881 &prediction,
3882 &outcome,
3883 matched,
3884 &by,
3885 confidence,
3886 &source_title,
3887 doi.as_deref(),
3888 json,
3889 ),
3890 Commands::Predictions {
3891 frontier,
3892 by,
3893 open,
3894 json,
3895 } => cmd_predictions(&frontier, by.as_deref(), open, json),
3896 Commands::Calibration {
3897 frontier,
3898 actor,
3899 json,
3900 } => cmd_calibration(&frontier, actor.as_deref(), json),
3901 Commands::PredictionsExpire {
3902 frontier,
3903 now,
3904 dry_run,
3905 json,
3906 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
3907 Commands::Consensus {
3908 frontier,
3909 target,
3910 weighting,
3911 causal_claim,
3912 causal_grade_min,
3913 json,
3914 } => cmd_consensus(
3915 &frontier,
3916 &target,
3917 &weighting,
3918 causal_claim.as_deref(),
3919 causal_grade_min.as_deref(),
3920 json,
3921 ),
3922
3923 Commands::Ingest {
3926 path,
3927 frontier,
3928 backend,
3929 actor,
3930 dry_run,
3931 json,
3932 } => {
3933 cmd_ingest(
3934 &path,
3935 &frontier,
3936 backend.as_deref(),
3937 actor.as_deref(),
3938 dry_run,
3939 json,
3940 )
3941 .await
3942 }
3943
3944 Commands::Propose {
3945 frontier,
3946 finding_id,
3947 status,
3948 reason,
3949 reviewer,
3950 apply,
3951 json,
3952 } => {
3953 let options = state::ReviewOptions {
3956 status: status.clone(),
3957 reason: reason.clone(),
3958 reviewer: reviewer.clone(),
3959 };
3960 let report = state::review_finding(&frontier, &finding_id, options, apply)
3961 .unwrap_or_else(|e| fail_return(&e));
3962 print_state_report(&report, json);
3963 }
3964
3965 Commands::Accept {
3966 frontier,
3967 proposal_id,
3968 reviewer,
3969 reason,
3970 json,
3971 } => {
3972 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
3974 .unwrap_or_else(|e| fail_return(&e));
3975 let payload = json!({
3976 "ok": true,
3977 "command": "accept",
3978 "frontier": frontier.display().to_string(),
3979 "proposal_id": proposal_id,
3980 "reviewer": reviewer,
3981 "applied_event_id": event_id,
3982 });
3983 if json {
3984 println!(
3985 "{}",
3986 serde_json::to_string_pretty(&payload)
3987 .expect("failed to serialize accept response")
3988 );
3989 } else {
3990 println!(
3991 "{} accepted and applied proposal {}",
3992 style::ok("ok"),
3993 proposal_id
3994 );
3995 println!(" event: {}", event_id);
3996 }
3997 }
3998
3999 Commands::Attest {
4000 frontier,
4001 key,
4002 json,
4003 } => {
4004 let count = sign::sign_frontier(&frontier, &key).unwrap_or_else(|e| fail_return(&e));
4008 let payload = json!({
4009 "ok": true,
4010 "command": "attest",
4011 "frontier": frontier.display().to_string(),
4012 "private_key": key.display().to_string(),
4013 "signed": count,
4014 });
4015 if json {
4016 println!(
4017 "{}",
4018 serde_json::to_string_pretty(&payload)
4019 .expect("failed to serialize attest response")
4020 );
4021 } else {
4022 println!(
4023 "{} {count} findings in {}",
4024 style::ok("attested"),
4025 frontier.display()
4026 );
4027 }
4028 }
4029
4030 Commands::Lineage {
4031 frontier,
4032 finding_id,
4033 as_of,
4034 json,
4035 } => {
4036 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4038 .unwrap_or_else(|e| fail_return(&e));
4039 if json {
4040 println!(
4041 "{}",
4042 serde_json::to_string_pretty(&payload)
4043 .expect("failed to serialize lineage response")
4044 );
4045 } else {
4046 print_history(&payload);
4047 }
4048 }
4049
4050 Commands::Carina { action } => cmd_carina(action),
4051 }
4052}
4053
4054fn cmd_carina(action: CarinaAction) {
4057 match action {
4058 CarinaAction::List { json } => {
4059 if json {
4060 println!(
4061 "{}",
4062 serde_json::to_string_pretty(&json!({
4063 "ok": true,
4064 "command": "carina.list",
4065 "primitives": carina_validate::PRIMITIVE_NAMES,
4066 }))
4067 .expect("failed to serialize carina.list")
4068 );
4069 } else {
4070 println!("Carina primitives bundled with this build:");
4071 for name in carina_validate::PRIMITIVE_NAMES {
4072 println!(" · {name}");
4073 }
4074 }
4075 }
4076 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4077 Some(text) => print!("{text}"),
4078 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4079 },
4080 CarinaAction::Validate {
4081 path,
4082 primitive,
4083 json,
4084 } => {
4085 let text = std::fs::read_to_string(&path)
4086 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4087 let value: Value = serde_json::from_str(&text)
4088 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4089 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4095 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4096 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4097 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4098 for (key, child) in primitives {
4099 let outcome = carina_validate::validate(key, child)
4100 .map(|()| carina_validate::detect_primitive(child));
4101 report.push((key.clone(), outcome));
4102 }
4103 } else {
4104 let outcome = match primitive.as_deref() {
4105 Some(name) => carina_validate::validate(name, &value).map(|()| {
4106 carina_validate::PRIMITIVE_NAMES
4107 .iter()
4108 .copied()
4109 .find(|p| *p == name)
4110 }),
4111 None => carina_validate::validate_auto(&value).map(Some),
4112 };
4113 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4114 report.push((label, outcome));
4115 }
4116
4117 let total = report.len();
4118 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4119 let fail = total - pass;
4120
4121 if json {
4122 let entries: Vec<Value> = report
4123 .iter()
4124 .map(|(label, r)| match r {
4125 Ok(name) => json!({
4126 "key": label,
4127 "primitive": name,
4128 "ok": true,
4129 }),
4130 Err(errs) => json!({
4131 "key": label,
4132 "ok": false,
4133 "errors": errs,
4134 }),
4135 })
4136 .collect();
4137 println!(
4138 "{}",
4139 serde_json::to_string_pretty(&json!({
4140 "ok": fail == 0,
4141 "command": "carina.validate",
4142 "file": path.display().to_string(),
4143 "total": total,
4144 "passed": pass,
4145 "failed": fail,
4146 "entries": entries,
4147 }))
4148 .expect("failed to serialize carina.validate")
4149 );
4150 } else {
4151 for (label, r) in &report {
4152 match r {
4153 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4154 Ok(None) => println!(" {} {label}", style::ok("ok")),
4155 Err(errs) => {
4156 println!(" {} {label}", style::lost("fail"));
4157 for e in errs {
4158 println!(" {e}");
4159 }
4160 }
4161 }
4162 }
4163 println!();
4164 if fail == 0 {
4165 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4166 } else {
4167 println!(
4168 "{} {pass}/{total} valid · {fail} failed",
4169 style::lost("carina.validate")
4170 );
4171 }
4172 }
4173
4174 if fail > 0 {
4175 std::process::exit(1);
4176 }
4177 }
4178 }
4179}
4180
4181fn cmd_consensus(
4184 frontier: &Path,
4185 target: &str,
4186 weighting_str: &str,
4187 causal_claim: Option<&str>,
4188 causal_grade_min: Option<&str>,
4189 json: bool,
4190) {
4191 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4192
4193 if !target.starts_with("vf_") {
4194 fail(&format!("target `{target}` is not a vf_ finding id"));
4195 }
4196 let scheme =
4197 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4198
4199 let parsed_claim = match causal_claim {
4200 None => None,
4201 Some("correlation") => Some(CausalClaim::Correlation),
4202 Some("mediation") => Some(CausalClaim::Mediation),
4203 Some("intervention") => Some(CausalClaim::Intervention),
4204 Some(other) => fail_return(&format!(
4205 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4206 )),
4207 };
4208 let parsed_grade = match causal_grade_min {
4209 None => None,
4210 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4211 Some("observational") => Some(CausalEvidenceGrade::Observational),
4212 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4213 Some("rct") => Some(CausalEvidenceGrade::Rct),
4214 Some(other) => fail_return(&format!(
4215 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4216 )),
4217 };
4218 let filter = crate::aggregate::AggregateFilter {
4219 causal_claim: parsed_claim,
4220 causal_grade_min: parsed_grade,
4221 };
4222 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4223
4224 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4225 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4226
4227 if json {
4228 println!(
4229 "{}",
4230 serde_json::to_string_pretty(&result).expect("serialize consensus")
4231 );
4232 return;
4233 }
4234
4235 println!();
4236 println!(
4237 " {}",
4238 format!(
4239 "VELA · CONSENSUS · {} ({})",
4240 result.target, result.weighting
4241 )
4242 .to_uppercase()
4243 .dimmed()
4244 );
4245 println!(" {}", style::tick_row(60));
4246 println!(
4247 " target: {}",
4248 truncate(&result.target_assertion, 80)
4249 );
4250 println!(" similar findings: {}", result.n_findings);
4251 println!(
4252 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
4253 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
4254 );
4255 println!();
4256 println!(" constituents (sorted by weight):");
4257 let mut sorted = result.constituents.clone();
4258 sorted.sort_by(|a, b| {
4259 b.weight
4260 .partial_cmp(&a.weight)
4261 .unwrap_or(std::cmp::Ordering::Equal)
4262 });
4263 for c in sorted.iter().take(10) {
4264 let repls = if c.n_replications > 0 {
4265 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
4266 } else {
4267 String::new()
4268 };
4269 println!(
4270 " · w={:.2} raw={:.2} adj={:.2}{}",
4271 c.weight, c.raw_score, c.adjusted_score, repls
4272 );
4273 println!(" {}", truncate(&c.assertion_text, 88));
4274 }
4275 if result.constituents.len() > 10 {
4276 println!(" ... ({} more)", result.constituents.len() - 10);
4277 }
4278}
4279
4280fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
4286 let trimmed = s.trim();
4287 if trimmed.eq_ignore_ascii_case("affirmed") {
4288 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
4289 }
4290 if trimmed.eq_ignore_ascii_case("falsified") {
4291 return Ok(crate::bundle::ExpectedOutcome::Falsified);
4292 }
4293 if let Some(rest) = trimmed.strip_prefix("cat:") {
4294 return Ok(crate::bundle::ExpectedOutcome::Categorical {
4295 value: rest.to_string(),
4296 });
4297 }
4298 if let Some(rest) = trimmed.strip_prefix("quant:") {
4299 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
4300 let (val_s, tol_s) = vt
4301 .split_once('±')
4302 .or_else(|| vt.split_once("+/-"))
4303 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
4304 let value: f64 = val_s
4305 .parse()
4306 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
4307 let tolerance: f64 = tol_s
4308 .parse()
4309 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
4310 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
4311 value,
4312 tolerance,
4313 units: units.to_string(),
4314 });
4315 }
4316 Err(format!(
4317 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
4318 ))
4319}
4320
4321#[allow(clippy::too_many_arguments)]
4323fn cmd_predict(
4324 frontier: &Path,
4325 by: &str,
4326 claim: &str,
4327 criterion: &str,
4328 resolves_by: Option<&str>,
4329 confidence: f64,
4330 target_csv: &str,
4331 outcome: &str,
4332 conditions_text: &str,
4333 json: bool,
4334) {
4335 if !(0.0..=1.0).contains(&confidence) {
4336 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4337 }
4338 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
4339
4340 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4341
4342 let targets: Vec<String> = target_csv
4343 .split(',')
4344 .map(|s| s.trim().to_string())
4345 .filter(|s| !s.is_empty())
4346 .collect();
4347 for t in &targets {
4348 if !t.starts_with("vf_") {
4349 fail(&format!("target `{t}` is not a vf_ id"));
4350 }
4351 if !project.findings.iter().any(|f| f.id == *t) {
4352 fail(&format!("target `{t}` not present in frontier"));
4353 }
4354 }
4355
4356 let lower = conditions_text.to_lowercase();
4357 let conditions = crate::bundle::Conditions {
4358 text: conditions_text.to_string(),
4359 species_verified: Vec::new(),
4360 species_unverified: Vec::new(),
4361 in_vitro: lower.contains("in vitro"),
4362 in_vivo: lower.contains("in vivo"),
4363 human_data: lower.contains("human") || lower.contains("clinical"),
4364 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
4365 concentration_range: None,
4366 duration: None,
4367 age_group: None,
4368 cell_type: None,
4369 };
4370
4371 let prediction = crate::bundle::Prediction::new(
4372 claim.to_string(),
4373 targets,
4374 None,
4375 resolves_by.map(|s| s.to_string()),
4376 criterion.to_string(),
4377 expected,
4378 by.to_string(),
4379 confidence,
4380 conditions,
4381 );
4382
4383 if project.predictions.iter().any(|p| p.id == prediction.id) {
4384 if json {
4385 println!(
4386 "{}",
4387 serde_json::to_string_pretty(&json!({
4388 "ok": false,
4389 "command": "predict",
4390 "reason": "prediction_already_exists",
4391 "id": prediction.id,
4392 }))
4393 .expect("serialize")
4394 );
4395 } else {
4396 println!(
4397 "{} prediction {} already exists in {}; skipping.",
4398 style::warn("predict"),
4399 prediction.id,
4400 frontier.display()
4401 );
4402 }
4403 return;
4404 }
4405
4406 let new_id = prediction.id.clone();
4407 project.predictions.push(prediction);
4408 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4409
4410 if json {
4411 println!(
4412 "{}",
4413 serde_json::to_string_pretty(&json!({
4414 "ok": true,
4415 "command": "predict",
4416 "id": new_id,
4417 "made_by": by,
4418 "confidence": confidence,
4419 "frontier": frontier.display().to_string(),
4420 }))
4421 .expect("serialize predict result")
4422 );
4423 } else {
4424 println!();
4425 println!(
4426 " {}",
4427 format!("VELA · PREDICT · {}", new_id)
4428 .to_uppercase()
4429 .dimmed()
4430 );
4431 println!(" {}", style::tick_row(60));
4432 println!(" by: {by}");
4433 println!(" confidence: {confidence:.3}");
4434 if let Some(d) = resolves_by {
4435 println!(" resolves by: {d}");
4436 }
4437 println!(" outcome: {outcome}");
4438 println!(" claim: {}", truncate(claim, 88));
4439 println!();
4440 println!(
4441 " {} prediction recorded in {}",
4442 style::ok("ok"),
4443 frontier.display()
4444 );
4445 }
4446}
4447
4448#[allow(clippy::too_many_arguments)]
4450fn cmd_resolve(
4451 frontier: &Path,
4452 prediction_id: &str,
4453 actual_outcome: &str,
4454 matched: bool,
4455 by: &str,
4456 confidence: f64,
4457 source_title: &str,
4458 doi: Option<&str>,
4459 json: bool,
4460) {
4461 if !prediction_id.starts_with("vpred_") {
4462 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
4463 }
4464 if !(0.0..=1.0).contains(&confidence) {
4465 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4466 }
4467 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4468 if !project.predictions.iter().any(|p| p.id == prediction_id) {
4469 fail(&format!(
4470 "prediction `{prediction_id}` not present in frontier"
4471 ));
4472 }
4473
4474 let evidence = crate::bundle::Evidence {
4475 evidence_type: "experimental".to_string(),
4476 model_system: String::new(),
4477 species: None,
4478 method: "prediction_resolution".to_string(),
4479 sample_size: None,
4480 effect_size: None,
4481 p_value: None,
4482 replicated: false,
4483 replication_count: None,
4484 evidence_spans: if source_title.is_empty() {
4485 Vec::new()
4486 } else {
4487 vec![serde_json::json!({"text": source_title})]
4488 },
4489 };
4490
4491 let _ = doi; let resolution = crate::bundle::Resolution::new(
4498 prediction_id.to_string(),
4499 actual_outcome.to_string(),
4500 matched,
4501 by.to_string(),
4502 evidence,
4503 confidence,
4504 );
4505
4506 if project.resolutions.iter().any(|r| r.id == resolution.id) {
4507 if json {
4508 println!(
4509 "{}",
4510 serde_json::to_string_pretty(&json!({
4511 "ok": false,
4512 "command": "resolve",
4513 "reason": "resolution_already_exists",
4514 "id": resolution.id,
4515 }))
4516 .expect("serialize")
4517 );
4518 } else {
4519 println!(
4520 "{} resolution {} already exists in {}; skipping.",
4521 style::warn("resolve"),
4522 resolution.id,
4523 frontier.display()
4524 );
4525 }
4526 return;
4527 }
4528
4529 let new_id = resolution.id.clone();
4530 project.resolutions.push(resolution);
4531 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4532
4533 if json {
4534 println!(
4535 "{}",
4536 serde_json::to_string_pretty(&json!({
4537 "ok": true,
4538 "command": "resolve",
4539 "id": new_id,
4540 "prediction": prediction_id,
4541 "matched": matched,
4542 "frontier": frontier.display().to_string(),
4543 }))
4544 .expect("serialize resolve result")
4545 );
4546 } else {
4547 println!();
4548 println!(
4549 " {}",
4550 format!("VELA · RESOLVE · {}", new_id)
4551 .to_uppercase()
4552 .dimmed()
4553 );
4554 println!(" {}", style::tick_row(60));
4555 println!(" prediction: {prediction_id}");
4556 println!(
4557 " matched: {}",
4558 if matched {
4559 style::ok("yes")
4560 } else {
4561 style::lost("no")
4562 }
4563 );
4564 println!(" by: {by}");
4565 println!(" outcome: {}", truncate(actual_outcome, 80));
4566 println!();
4567 println!(
4568 " {} resolution recorded in {}",
4569 style::ok("ok"),
4570 frontier.display()
4571 );
4572 }
4573}
4574
4575fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
4577 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4578
4579 let resolved_ids: std::collections::HashSet<&str> = project
4580 .resolutions
4581 .iter()
4582 .map(|r| r.prediction_id.as_str())
4583 .collect();
4584
4585 let mut filtered: Vec<&crate::bundle::Prediction> = project
4586 .predictions
4587 .iter()
4588 .filter(|p| by.is_none_or(|b| p.made_by == b))
4589 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
4590 .collect();
4591 filtered.sort_by(|a, b| {
4592 a.resolves_by
4593 .as_deref()
4594 .unwrap_or("9999")
4595 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
4596 });
4597
4598 if json {
4599 let payload: Vec<serde_json::Value> = filtered
4600 .iter()
4601 .map(|p| {
4602 json!({
4603 "id": p.id,
4604 "claim_text": p.claim_text,
4605 "made_by": p.made_by,
4606 "confidence": p.confidence,
4607 "predicted_at": p.predicted_at,
4608 "resolves_by": p.resolves_by,
4609 "expected_outcome": p.expected_outcome,
4610 "resolved": resolved_ids.contains(p.id.as_str()),
4611 })
4612 })
4613 .collect();
4614 println!(
4615 "{}",
4616 serde_json::to_string_pretty(&json!({
4617 "ok": true,
4618 "command": "predictions",
4619 "frontier": frontier.display().to_string(),
4620 "count": payload.len(),
4621 "predictions": payload,
4622 }))
4623 .expect("serialize predictions")
4624 );
4625 return;
4626 }
4627
4628 println!();
4629 println!(
4630 " {}",
4631 format!("VELA · PREDICTIONS · {}", frontier.display())
4632 .to_uppercase()
4633 .dimmed()
4634 );
4635 println!(" {}", style::tick_row(60));
4636 if filtered.is_empty() {
4637 println!(" (no predictions matching filters)");
4638 return;
4639 }
4640 for p in &filtered {
4641 let resolved = resolved_ids.contains(p.id.as_str());
4642 let chip = if resolved {
4643 style::ok("resolved")
4644 } else {
4645 style::warn("open")
4646 };
4647 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
4648 println!(
4649 " · {} {} by {} → {}",
4650 p.id.dimmed(),
4651 chip,
4652 p.made_by,
4653 deadline,
4654 );
4655 println!(" claim: {}", truncate(&p.claim_text, 90));
4656 println!(" confidence: {:.2}", p.confidence);
4657 }
4658}
4659
4660fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
4665 use chrono::DateTime;
4666
4667 let now_dt = match now_override {
4668 Some(s) => DateTime::parse_from_rfc3339(s)
4669 .map(|dt| dt.with_timezone(&chrono::Utc))
4670 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
4671 None => chrono::Utc::now(),
4672 };
4673
4674 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4675 if dry_run {
4676 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4678 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
4679 if json {
4680 println!(
4681 "{}",
4682 serde_json::to_string_pretty(&json!({
4683 "ok": true,
4684 "command": "predictions.expire",
4685 "dry_run": true,
4686 "report": report,
4687 }))
4688 .expect("serialize predictions.expire (dry-run)")
4689 );
4690 } else {
4691 println!(
4692 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
4693 style::ok("ok"),
4694 report.now,
4695 report.newly_expired.len(),
4696 report.already_expired.len(),
4697 report.already_resolved.len(),
4698 report.still_open.len(),
4699 );
4700 for id in &report.newly_expired {
4701 println!(" · {id}");
4702 }
4703 }
4704 return;
4705 }
4706
4707 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
4708 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4709
4710 if json {
4711 println!(
4712 "{}",
4713 serde_json::to_string_pretty(&json!({
4714 "ok": true,
4715 "command": "predictions.expire",
4716 "report": report,
4717 }))
4718 .expect("serialize predictions.expire")
4719 );
4720 } else {
4721 println!(
4722 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
4723 style::ok("expired"),
4724 report.now,
4725 report.newly_expired.len(),
4726 report.already_expired.len(),
4727 report.already_resolved.len(),
4728 report.still_open.len(),
4729 );
4730 for id in &report.newly_expired {
4731 println!(" · {id}");
4732 }
4733 }
4734}
4735
4736fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
4737 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4738 let records = match actor {
4739 Some(a) => {
4740 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
4741 .map(|r| vec![r])
4742 .unwrap_or_default()
4743 }
4744 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
4745 };
4746
4747 if json {
4748 println!(
4749 "{}",
4750 serde_json::to_string_pretty(&json!({
4751 "ok": true,
4752 "command": "calibration",
4753 "frontier": frontier.display().to_string(),
4754 "filter_actor": actor,
4755 "records": records,
4756 }))
4757 .expect("serialize calibration")
4758 );
4759 return;
4760 }
4761
4762 println!();
4763 println!(
4764 " {}",
4765 format!("VELA · CALIBRATION · {}", frontier.display())
4766 .to_uppercase()
4767 .dimmed()
4768 );
4769 println!(" {}", style::tick_row(60));
4770 if records.is_empty() {
4771 println!(" (no calibration records)");
4772 return;
4773 }
4774 for r in &records {
4775 println!(" · {}", r.actor);
4776 println!(
4777 " predictions: {} resolved: {} hits: {}",
4778 r.n_predictions, r.n_resolved, r.n_hit
4779 );
4780 match r.hit_rate {
4781 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
4782 None => println!(" hit rate: n/a"),
4783 }
4784 match r.brier_score {
4785 Some(b) => println!(
4786 " brier: {:.4} (lower is better; 0.25 = chance)",
4787 b
4788 ),
4789 None => println!(" brier: n/a"),
4790 }
4791 match r.log_score {
4792 Some(l) => println!(
4793 " log score: {:.4} (higher is better; 0 = perfect)",
4794 l
4795 ),
4796 None => println!(" log score: n/a"),
4797 }
4798 }
4799}
4800
4801#[allow(clippy::too_many_arguments)]
4803fn cmd_dataset_add(
4804 frontier: &Path,
4805 name: &str,
4806 version: Option<&str>,
4807 content_hash: &str,
4808 url: Option<&str>,
4809 license: Option<&str>,
4810 source_title: &str,
4811 doi: Option<&str>,
4812 row_count: Option<u64>,
4813 json: bool,
4814) {
4815 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4816
4817 let provenance = crate::bundle::Provenance {
4818 source_type: "data_release".to_string(),
4819 doi: doi.map(|s| s.to_string()),
4820 pmid: None,
4821 pmc: None,
4822 openalex_id: None,
4823 url: url.map(|s| s.to_string()),
4824 title: source_title.to_string(),
4825 authors: Vec::new(),
4826 year: None,
4827 journal: None,
4828 license: license.map(|s| s.to_string()),
4829 publisher: None,
4830 funders: Vec::new(),
4831 extraction: crate::bundle::Extraction {
4832 method: "manual_curation".to_string(),
4833 model: None,
4834 model_version: None,
4835 extracted_at: chrono::Utc::now().to_rfc3339(),
4836 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
4837 },
4838 review: None,
4839 citation_count: None,
4840 };
4841
4842 let mut dataset = crate::bundle::Dataset::new(
4843 name.to_string(),
4844 version.map(|s| s.to_string()),
4845 content_hash.to_string(),
4846 url.map(|s| s.to_string()),
4847 license.map(|s| s.to_string()),
4848 provenance,
4849 );
4850 dataset.row_count = row_count;
4851
4852 if project.datasets.iter().any(|d| d.id == dataset.id) {
4853 if json {
4854 println!(
4855 "{}",
4856 serde_json::to_string_pretty(&json!({
4857 "ok": false,
4858 "command": "dataset.add",
4859 "reason": "dataset_already_exists",
4860 "id": dataset.id,
4861 }))
4862 .expect("serialize")
4863 );
4864 } else {
4865 println!(
4866 "{} dataset {} already exists in {}; skipping.",
4867 style::warn("dataset"),
4868 dataset.id,
4869 frontier.display()
4870 );
4871 }
4872 return;
4873 }
4874
4875 let new_id = dataset.id.clone();
4876 project.datasets.push(dataset);
4877 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4878
4879 if json {
4880 println!(
4881 "{}",
4882 serde_json::to_string_pretty(&json!({
4883 "ok": true,
4884 "command": "dataset.add",
4885 "id": new_id,
4886 "name": name,
4887 "version": version,
4888 "frontier": frontier.display().to_string(),
4889 }))
4890 .expect("failed to serialize dataset.add result")
4891 );
4892 } else {
4893 println!();
4894 println!(
4895 " {}",
4896 format!("VELA · DATASET · {}", new_id)
4897 .to_uppercase()
4898 .dimmed()
4899 );
4900 println!(" {}", style::tick_row(60));
4901 println!(" name: {name}");
4902 if let Some(v) = version {
4903 println!(" version: {v}");
4904 }
4905 println!(" content_hash: {content_hash}");
4906 if let Some(u) = url {
4907 println!(" url: {u}");
4908 }
4909 println!(" source: {source_title}");
4910 println!();
4911 println!(
4912 " {} dataset recorded in {}",
4913 style::ok("ok"),
4914 frontier.display()
4915 );
4916 }
4917}
4918
4919#[allow(clippy::too_many_arguments)]
4925fn cmd_negative_result_add(
4926 frontier: &Path,
4927 kind: &str,
4928 deposited_by: &str,
4929 reason: &str,
4930 conditions_text: &str,
4931 notes: &str,
4932 targets: Vec<String>,
4933 endpoint: Option<&str>,
4934 intervention: Option<&str>,
4935 comparator: Option<&str>,
4936 population: Option<&str>,
4937 n_enrolled: Option<u32>,
4938 power: Option<f64>,
4939 ci_lower: Option<f64>,
4940 ci_upper: Option<f64>,
4941 effect_size_threshold: Option<f64>,
4942 registry_id: Option<&str>,
4943 reagent: Option<&str>,
4944 observation: Option<&str>,
4945 attempts: Option<u32>,
4946 source_title: &str,
4947 doi: Option<&str>,
4948 url: Option<&str>,
4949 year: Option<i32>,
4950 json: bool,
4951) {
4952 let nr_kind = match kind {
4953 "registered_trial" => {
4954 let endpoint =
4955 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
4956 let intervention = intervention
4957 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
4958 let comparator = comparator
4959 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
4960 let population = population
4961 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
4962 let n_enrolled = n_enrolled
4963 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
4964 let power =
4965 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
4966 let ci_lower =
4967 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
4968 let ci_upper =
4969 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
4970 crate::bundle::NegativeResultKind::RegisteredTrial {
4971 endpoint: endpoint.to_string(),
4972 intervention: intervention.to_string(),
4973 comparator: comparator.to_string(),
4974 population: population.to_string(),
4975 n_enrolled,
4976 power,
4977 effect_size_ci: (ci_lower, ci_upper),
4978 effect_size_threshold,
4979 registry_id: registry_id.map(|s| s.to_string()),
4980 }
4981 }
4982 "exploratory" => {
4983 let reagent =
4984 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
4985 let observation = observation
4986 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
4987 let attempts =
4988 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
4989 crate::bundle::NegativeResultKind::Exploratory {
4990 reagent: reagent.to_string(),
4991 observation: observation.to_string(),
4992 attempts,
4993 }
4994 }
4995 other => fail_return(&format!(
4996 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
4997 )),
4998 };
4999
5000 let conditions = crate::bundle::Conditions {
5001 text: conditions_text.to_string(),
5002 species_verified: Vec::new(),
5003 species_unverified: Vec::new(),
5004 in_vitro: false,
5005 in_vivo: false,
5006 human_data: false,
5007 clinical_trial: matches!(kind, "registered_trial"),
5008 concentration_range: None,
5009 duration: None,
5010 age_group: None,
5011 cell_type: None,
5012 };
5013
5014 let provenance = crate::bundle::Provenance {
5015 source_type: if matches!(kind, "registered_trial") {
5016 "clinical_trial".to_string()
5017 } else {
5018 "lab_notebook".to_string()
5019 },
5020 doi: doi.map(|s| s.to_string()),
5021 pmid: None,
5022 pmc: None,
5023 openalex_id: None,
5024 url: url.map(|s| s.to_string()),
5025 title: source_title.to_string(),
5026 authors: Vec::new(),
5027 year,
5028 journal: None,
5029 license: None,
5030 publisher: None,
5031 funders: Vec::new(),
5032 extraction: crate::bundle::Extraction {
5033 method: "manual_curation".to_string(),
5034 model: None,
5035 model_version: None,
5036 extracted_at: chrono::Utc::now().to_rfc3339(),
5037 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5038 },
5039 review: None,
5040 citation_count: None,
5041 };
5042
5043 let report = state::add_negative_result(
5044 frontier,
5045 nr_kind,
5046 targets,
5047 deposited_by,
5048 conditions,
5049 provenance,
5050 notes,
5051 reason,
5052 )
5053 .unwrap_or_else(|e| fail_return(&e));
5054
5055 if json {
5056 println!(
5057 "{}",
5058 serde_json::to_string_pretty(&report).expect("serialize report")
5059 );
5060 } else {
5061 println!();
5062 println!(
5063 " {}",
5064 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5065 .to_uppercase()
5066 .dimmed()
5067 );
5068 println!(" {}", style::tick_row(60));
5069 println!(" kind: {kind}");
5070 println!(" deposited_by: {deposited_by}");
5071 if let Some(ev) = &report.applied_event_id {
5072 println!(" event: {ev}");
5073 }
5074 println!(
5075 " {} negative_result deposited in {}",
5076 style::ok("ok"),
5077 frontier.display()
5078 );
5079 }
5080}
5081
5082fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5085 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5086 let filtered: Vec<&crate::bundle::NegativeResult> = project
5087 .negative_results
5088 .iter()
5089 .filter(|nr| {
5090 target
5091 .map(|t| nr.target_findings.iter().any(|f| f == t))
5092 .unwrap_or(true)
5093 })
5094 .collect();
5095
5096 if json {
5097 println!(
5098 "{}",
5099 serde_json::to_string_pretty(&json!({
5100 "ok": true,
5101 "command": "negative_results",
5102 "frontier": frontier.display().to_string(),
5103 "count": filtered.len(),
5104 "negative_results": filtered,
5105 }))
5106 .expect("serialize negative_results")
5107 );
5108 return;
5109 }
5110
5111 if filtered.is_empty() {
5112 println!(" no negative_results in {}", frontier.display());
5113 return;
5114 }
5115
5116 println!();
5117 println!(
5118 " {} ({})",
5119 "VELA · NEGATIVE RESULTS".dimmed(),
5120 filtered.len()
5121 );
5122 println!(" {}", style::tick_row(60));
5123 for nr in &filtered {
5124 let kind_label = match &nr.kind {
5125 crate::bundle::NegativeResultKind::RegisteredTrial {
5126 endpoint, power, ..
5127 } => format!("trial · {endpoint} · power {power:.2}"),
5128 crate::bundle::NegativeResultKind::Exploratory {
5129 reagent, attempts, ..
5130 } => format!("exploratory · {reagent} · {attempts} attempts"),
5131 };
5132 let retracted = if nr.retracted { " [retracted]" } else { "" };
5133 let review = nr
5134 .review_state
5135 .as_ref()
5136 .map(|s| format!(" [{s:?}]"))
5137 .unwrap_or_default();
5138 println!(" {}{}{}", nr.id, retracted, review);
5139 println!(" {kind_label}");
5140 if !nr.target_findings.is_empty() {
5141 println!(" targets: {}", nr.target_findings.join(", "));
5142 }
5143 }
5144 println!();
5145}
5146
5147#[allow(clippy::too_many_arguments)]
5149fn cmd_tier_set(
5150 frontier: &Path,
5151 object_type: &str,
5152 object_id: &str,
5153 tier: &str,
5154 actor: &str,
5155 reason: &str,
5156 json: bool,
5157) {
5158 let parsed_tier =
5159 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5160 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5161 .unwrap_or_else(|e| fail_return(&e));
5162
5163 if json {
5164 println!(
5165 "{}",
5166 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5167 );
5168 } else {
5169 println!();
5170 println!(
5171 " {}",
5172 format!("VELA · TIER · {}", object_id)
5173 .to_uppercase()
5174 .dimmed()
5175 );
5176 println!(" {}", style::tick_row(60));
5177 println!(" object_type: {object_type}");
5178 println!(" new_tier: {}", parsed_tier.canonical());
5179 println!(" actor: {actor}");
5180 if let Some(ev) = &report.applied_event_id {
5181 println!(" event: {ev}");
5182 }
5183 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5184 }
5185}
5186
5187#[allow(clippy::too_many_arguments)]
5189fn cmd_trajectory_create(
5190 frontier: &Path,
5191 deposited_by: &str,
5192 reason: &str,
5193 targets: Vec<String>,
5194 notes: &str,
5195 json: bool,
5196) {
5197 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5198 .unwrap_or_else(|e| fail_return(&e));
5199
5200 if json {
5201 println!(
5202 "{}",
5203 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5204 );
5205 } else {
5206 println!();
5207 println!(
5208 " {}",
5209 format!("VELA · TRAJECTORY · {}", report.finding_id)
5210 .to_uppercase()
5211 .dimmed()
5212 );
5213 println!(" {}", style::tick_row(60));
5214 println!(" deposited_by: {deposited_by}");
5215 if let Some(ev) = &report.applied_event_id {
5216 println!(" event: {ev}");
5217 }
5218 println!(
5219 " {} trajectory opened in {}",
5220 style::ok("ok"),
5221 frontier.display()
5222 );
5223 }
5224}
5225
5226#[allow(clippy::too_many_arguments)]
5228fn cmd_trajectory_step(
5229 frontier: &Path,
5230 trajectory_id: &str,
5231 kind: &str,
5232 description: &str,
5233 actor: &str,
5234 reason: &str,
5235 references: Vec<String>,
5236 json: bool,
5237) {
5238 let parsed_kind = match kind {
5239 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5240 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5241 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5242 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5243 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5244 other => fail_return(&format!(
5245 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5246 )),
5247 };
5248 let report = state::append_trajectory_step(
5249 frontier,
5250 trajectory_id,
5251 parsed_kind,
5252 description,
5253 actor,
5254 references,
5255 reason,
5256 )
5257 .unwrap_or_else(|e| fail_return(&e));
5258
5259 if json {
5260 println!(
5261 "{}",
5262 serde_json::to_string_pretty(&report).expect("serialize step report")
5263 );
5264 } else {
5265 println!();
5266 println!(
5267 " {}",
5268 format!("VELA · STEP · {}", report.finding_id)
5269 .to_uppercase()
5270 .dimmed()
5271 );
5272 println!(" {}", style::tick_row(60));
5273 println!(" trajectory: {trajectory_id}");
5274 println!(" kind: {kind}");
5275 println!(" actor: {actor}");
5276 println!(
5277 " {} step appended in {}",
5278 style::ok("ok"),
5279 frontier.display()
5280 );
5281 }
5282}
5283
5284fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
5286 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5287 let filtered: Vec<&crate::bundle::Trajectory> = project
5288 .trajectories
5289 .iter()
5290 .filter(|t| {
5291 target
5292 .map(|tg| t.target_findings.iter().any(|f| f == tg))
5293 .unwrap_or(true)
5294 })
5295 .collect();
5296
5297 if json {
5298 println!(
5299 "{}",
5300 serde_json::to_string_pretty(&json!({
5301 "ok": true,
5302 "command": "trajectories",
5303 "frontier": frontier.display().to_string(),
5304 "count": filtered.len(),
5305 "trajectories": filtered,
5306 }))
5307 .expect("serialize trajectories")
5308 );
5309 return;
5310 }
5311
5312 if filtered.is_empty() {
5313 println!(" no trajectories in {}", frontier.display());
5314 return;
5315 }
5316
5317 println!();
5318 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
5319 println!(" {}", style::tick_row(60));
5320 for t in &filtered {
5321 let retracted = if t.retracted { " [retracted]" } else { "" };
5322 let review = t
5323 .review_state
5324 .as_ref()
5325 .map(|s| format!(" [{s:?}]"))
5326 .unwrap_or_default();
5327 println!(" {}{}{}", t.id, retracted, review);
5328 println!(
5329 " {} step(s){}",
5330 t.steps.len(),
5331 if t.target_findings.is_empty() {
5332 String::new()
5333 } else {
5334 format!(" · targets: {}", t.target_findings.join(", "))
5335 }
5336 );
5337 for step in &t.steps {
5338 let label = match step.kind {
5339 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
5340 crate::bundle::TrajectoryStepKind::Tried => "tried",
5341 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
5342 crate::bundle::TrajectoryStepKind::Observed => "observed",
5343 crate::bundle::TrajectoryStepKind::Refined => "refined",
5344 };
5345 let preview: String = step.description.chars().take(80).collect();
5346 println!(" [{label}] {preview}");
5347 }
5348 }
5349 println!();
5350}
5351
5352fn cmd_datasets(frontier: &Path, json: bool) {
5354 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5355 if json {
5356 println!(
5357 "{}",
5358 serde_json::to_string_pretty(&json!({
5359 "ok": true,
5360 "command": "datasets",
5361 "frontier": frontier.display().to_string(),
5362 "count": project.datasets.len(),
5363 "datasets": project.datasets,
5364 }))
5365 .expect("serialize datasets")
5366 );
5367 return;
5368 }
5369 println!();
5370 println!(
5371 " {}",
5372 format!("VELA · DATASETS · {}", frontier.display())
5373 .to_uppercase()
5374 .dimmed()
5375 );
5376 println!(" {}", style::tick_row(60));
5377 if project.datasets.is_empty() {
5378 println!(" (no datasets registered)");
5379 return;
5380 }
5381 for ds in &project.datasets {
5382 let v = ds
5383 .version
5384 .as_deref()
5385 .map(|s| format!("@{s}"))
5386 .unwrap_or_default();
5387 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
5388 if let Some(u) = &ds.url {
5389 println!(" url: {}", truncate(u, 80));
5390 }
5391 println!(" hash: {}", truncate(&ds.content_hash, 80));
5392 }
5393}
5394
5395#[allow(clippy::too_many_arguments)]
5397fn cmd_code_add(
5398 frontier: &Path,
5399 language: &str,
5400 repo_url: Option<&str>,
5401 commit: Option<&str>,
5402 path: &str,
5403 content_hash: &str,
5404 line_start: Option<u32>,
5405 line_end: Option<u32>,
5406 entry_point: Option<&str>,
5407 json: bool,
5408) {
5409 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5410
5411 let line_range = match (line_start, line_end) {
5412 (Some(a), Some(b)) => Some((a, b)),
5413 (Some(a), None) => Some((a, a)),
5414 _ => None,
5415 };
5416
5417 let artifact = crate::bundle::CodeArtifact::new(
5418 language.to_string(),
5419 repo_url.map(|s| s.to_string()),
5420 commit.map(|s| s.to_string()),
5421 path.to_string(),
5422 line_range,
5423 content_hash.to_string(),
5424 entry_point.map(|s| s.to_string()),
5425 );
5426
5427 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
5428 if json {
5429 println!(
5430 "{}",
5431 serde_json::to_string_pretty(&json!({
5432 "ok": false,
5433 "command": "code.add",
5434 "reason": "artifact_already_exists",
5435 "id": artifact.id,
5436 }))
5437 .expect("serialize")
5438 );
5439 } else {
5440 println!(
5441 "{} code artifact {} already exists in {}; skipping.",
5442 style::warn("code"),
5443 artifact.id,
5444 frontier.display()
5445 );
5446 }
5447 return;
5448 }
5449
5450 let new_id = artifact.id.clone();
5451 project.code_artifacts.push(artifact);
5452 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5453
5454 if json {
5455 println!(
5456 "{}",
5457 serde_json::to_string_pretty(&json!({
5458 "ok": true,
5459 "command": "code.add",
5460 "id": new_id,
5461 "language": language,
5462 "path": path,
5463 "frontier": frontier.display().to_string(),
5464 }))
5465 .expect("failed to serialize code.add result")
5466 );
5467 } else {
5468 println!();
5469 println!(
5470 " {}",
5471 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
5472 );
5473 println!(" {}", style::tick_row(60));
5474 println!(" language: {language}");
5475 if let Some(r) = repo_url {
5476 println!(" repo: {r}");
5477 }
5478 if let Some(c) = commit {
5479 println!(" commit: {c}");
5480 }
5481 println!(" path: {path}");
5482 if let Some((a, b)) = line_range {
5483 println!(" lines: {a}-{b}");
5484 }
5485 println!(" content_hash: {content_hash}");
5486 println!();
5487 println!(
5488 " {} code artifact recorded in {}",
5489 style::ok("ok"),
5490 frontier.display()
5491 );
5492 }
5493}
5494
5495fn cmd_code_artifacts(frontier: &Path, json: bool) {
5497 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5498 if json {
5499 println!(
5500 "{}",
5501 serde_json::to_string_pretty(&json!({
5502 "ok": true,
5503 "command": "code-artifacts",
5504 "frontier": frontier.display().to_string(),
5505 "count": project.code_artifacts.len(),
5506 "code_artifacts": project.code_artifacts,
5507 }))
5508 .expect("serialize code-artifacts")
5509 );
5510 return;
5511 }
5512 println!();
5513 println!(
5514 " {}",
5515 format!("VELA · CODE · {}", frontier.display())
5516 .to_uppercase()
5517 .dimmed()
5518 );
5519 println!(" {}", style::tick_row(60));
5520 if project.code_artifacts.is_empty() {
5521 println!(" (no code artifacts registered)");
5522 return;
5523 }
5524 for c in &project.code_artifacts {
5525 let lr = c
5526 .line_range
5527 .map(|(a, b)| format!(":{a}-{b}"))
5528 .unwrap_or_default();
5529 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
5530 if let Some(r) = &c.repo_url {
5531 println!(" repo: {}", truncate(r, 80));
5532 }
5533 if let Some(g) = &c.git_commit {
5534 println!(" commit: {g}");
5535 }
5536 }
5537}
5538
5539fn sha256_for_bytes(bytes: &[u8]) -> String {
5540 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
5541}
5542
5543fn sha256_hex_part(content_hash: &str) -> &str {
5544 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
5545}
5546
5547fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
5548 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
5549 return None;
5550 };
5551 let hex = sha256_hex_part(content_hash);
5552 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
5553 let path = root.join(&rel);
5554 if let Some(parent) = path.parent() {
5555 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
5556 fail(&format!(
5557 "Failed to create artifact blob directory {}: {e}",
5558 parent.display()
5559 ))
5560 });
5561 }
5562 if !path.is_file() {
5563 std::fs::write(&path, bytes)
5564 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
5565 }
5566 Some(rel)
5567}
5568
5569fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
5570 let mut out = BTreeMap::new();
5571 for pair in pairs {
5572 let Some((key, value)) = pair.split_once('=') else {
5573 fail(&format!("--metadata must be key=value, got {pair:?}"));
5574 };
5575 let key = key.trim();
5576 if key.is_empty() {
5577 fail("--metadata key must be non-empty");
5578 }
5579 out.insert(key.to_string(), Value::String(value.trim().to_string()));
5580 }
5581 out
5582}
5583
5584fn artifact_source_type(kind: &str) -> &'static str {
5585 match kind {
5586 "clinical_trial_record" | "protocol" => "clinical_trial",
5587 "dataset" => "data_release",
5588 "model_output" => "model_output",
5589 "registry_record" => "database_record",
5590 "lab_file" => "lab_notebook",
5591 _ => "database_record",
5592 }
5593}
5594
5595fn artifact_provenance(
5596 kind: &str,
5597 title: &str,
5598 url: Option<&str>,
5599 doi: Option<&str>,
5600 license: Option<&str>,
5601) -> crate::bundle::Provenance {
5602 crate::bundle::Provenance {
5603 source_type: artifact_source_type(kind).to_string(),
5604 doi: doi.map(str::to_string),
5605 pmid: None,
5606 pmc: None,
5607 openalex_id: None,
5608 url: url.map(str::to_string),
5609 title: title.to_string(),
5610 authors: Vec::new(),
5611 year: None,
5612 journal: None,
5613 license: license.map(str::to_string),
5614 publisher: None,
5615 funders: Vec::new(),
5616 extraction: crate::bundle::Extraction {
5617 method: "artifact_deposit".to_string(),
5618 model: None,
5619 model_version: None,
5620 extracted_at: chrono::Utc::now().to_rfc3339(),
5621 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5622 },
5623 review: None,
5624 citation_count: None,
5625 }
5626}
5627
5628#[allow(clippy::too_many_arguments)]
5629fn cmd_artifact_add(
5630 frontier: &Path,
5631 kind: &str,
5632 name: &str,
5633 file: Option<&Path>,
5634 url: Option<&str>,
5635 content_hash: Option<&str>,
5636 media_type: Option<&str>,
5637 license: Option<&str>,
5638 source_title: Option<&str>,
5639 source_url: Option<&str>,
5640 doi: Option<&str>,
5641 target: Vec<String>,
5642 metadata: Vec<String>,
5643 access_tier: &str,
5644 deposited_by: &str,
5645 reason: &str,
5646 json_out: bool,
5647) {
5648 let tier =
5649 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
5650 let mut size_bytes = None;
5651 let mut storage_mode = "pointer".to_string();
5652 let mut locator = url.map(str::to_string);
5653 let mut computed_hash = content_hash.map(str::to_string);
5654
5655 if let Some(path) = file {
5656 let bytes = std::fs::read(path)
5657 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
5658 let actual_hash = sha256_for_bytes(&bytes);
5659 if let Some(expected) = content_hash {
5660 let expected_hex = sha256_hex_part(expected);
5661 let actual_hex = sha256_hex_part(&actual_hash);
5662 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
5663 fail(&format!(
5664 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
5665 ));
5666 }
5667 }
5668 size_bytes = Some(bytes.len() as u64);
5669 computed_hash = Some(actual_hash.clone());
5670 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
5671 storage_mode = "local_blob".to_string();
5672 locator = Some(rel);
5673 } else {
5674 storage_mode = "local_file".to_string();
5675 locator = Some(path.display().to_string());
5676 }
5677 }
5678
5679 let Some(content_hash) = computed_hash else {
5680 fail("Provide --content-hash unless --file is present.");
5681 };
5682 let content_hash_for_print = content_hash.clone();
5683 if file.is_none() && url.is_some() {
5684 storage_mode = "remote".to_string();
5685 }
5686
5687 let source_url_effective = source_url.or(url);
5688 let source_title = source_title.unwrap_or(name);
5689 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
5690 let metadata = parse_metadata_pairs(metadata);
5691 let artifact = crate::bundle::Artifact::new(
5692 kind.to_string(),
5693 name.to_string(),
5694 content_hash,
5695 size_bytes,
5696 media_type.map(str::to_string),
5697 storage_mode,
5698 locator,
5699 source_url_effective.map(str::to_string),
5700 license.map(str::to_string),
5701 target,
5702 provenance,
5703 metadata,
5704 tier,
5705 )
5706 .unwrap_or_else(|e| fail_return(&e));
5707
5708 let artifact_id = artifact.id.clone();
5709 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
5710 .unwrap_or_else(|e| fail_return(&e));
5711
5712 if json_out {
5713 println!(
5714 "{}",
5715 serde_json::to_string_pretty(&json!({
5716 "ok": true,
5717 "command": "artifact.add",
5718 "id": artifact_id,
5719 "frontier": frontier.display().to_string(),
5720 "event": report.applied_event_id,
5721 }))
5722 .expect("serialize artifact.add")
5723 );
5724 } else {
5725 println!();
5726 println!(
5727 " {}",
5728 format!("VELA · ARTIFACT · {}", artifact_id)
5729 .to_uppercase()
5730 .dimmed()
5731 );
5732 println!(" {}", style::tick_row(60));
5733 println!(" kind: {kind}");
5734 println!(" name: {name}");
5735 println!(" hash: {content_hash_for_print}");
5736 println!(
5737 " {} artifact recorded in {}",
5738 style::ok("ok"),
5739 frontier.display()
5740 );
5741 }
5742}
5743
5744fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
5745 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5746 let filtered: Vec<&crate::bundle::Artifact> = project
5747 .artifacts
5748 .iter()
5749 .filter(|artifact| {
5750 target
5751 .map(|t| artifact.target_findings.iter().any(|f| f == t))
5752 .unwrap_or(true)
5753 })
5754 .collect();
5755
5756 if json_out {
5757 println!(
5758 "{}",
5759 serde_json::to_string_pretty(&json!({
5760 "ok": true,
5761 "command": "artifacts",
5762 "frontier": frontier.display().to_string(),
5763 "count": filtered.len(),
5764 "artifacts": filtered,
5765 }))
5766 .expect("serialize artifacts")
5767 );
5768 return;
5769 }
5770
5771 println!();
5772 println!(
5773 " {}",
5774 format!("VELA · ARTIFACTS · {}", frontier.display())
5775 .to_uppercase()
5776 .dimmed()
5777 );
5778 println!(" {}", style::tick_row(60));
5779 if filtered.is_empty() {
5780 println!(" (no artifacts registered)");
5781 return;
5782 }
5783 for artifact in filtered {
5784 println!(
5785 " · {} {} · {}",
5786 artifact.id.dimmed(),
5787 artifact.kind,
5788 artifact.name
5789 );
5790 if let Some(locator) = &artifact.locator {
5791 println!(" locator: {}", truncate(locator, 88));
5792 }
5793 if !artifact.target_findings.is_empty() {
5794 println!(" targets: {}", artifact.target_findings.join(", "));
5795 }
5796 }
5797}
5798
5799fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
5800 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5801 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
5802 if json_out {
5803 println!(
5804 "{}",
5805 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
5806 );
5807 if !audit.ok {
5808 std::process::exit(1);
5809 }
5810 return;
5811 }
5812
5813 println!();
5814 println!(
5815 " {}",
5816 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
5817 .to_uppercase()
5818 .dimmed()
5819 );
5820 println!(" {}", style::tick_row(60));
5821 println!(" artifacts: {}", audit.artifact_count);
5822 println!(" checked local blobs: {}", audit.checked_local_blobs);
5823 println!(" local blob bytes: {}", audit.local_blob_bytes);
5824 if !audit.by_kind.is_empty() {
5825 let kinds = audit
5826 .by_kind
5827 .iter()
5828 .map(|(kind, count)| format!("{kind}:{count}"))
5829 .collect::<Vec<_>>()
5830 .join(", ");
5831 println!(" kinds: {kinds}");
5832 }
5833 if audit.ok {
5834 println!(" {} artifact audit passed.", style::ok("ok"));
5835 return;
5836 }
5837 for issue in &audit.issues {
5838 println!(
5839 " {} {} {}: {}",
5840 style::lost("invalid"),
5841 issue.id,
5842 issue.field,
5843 issue.message
5844 );
5845 }
5846 std::process::exit(1);
5847}
5848
5849fn cmd_decision_brief(frontier: &Path, json_out: bool) {
5850 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5851 let report = decision::load_decision_brief(frontier, &project);
5852 if json_out {
5853 println!(
5854 "{}",
5855 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
5856 );
5857 if !report.ok {
5858 std::process::exit(1);
5859 }
5860 return;
5861 }
5862 println!();
5863 println!(
5864 " {}",
5865 format!("VELA · DECISION BRIEF · {}", project.project.name)
5866 .to_uppercase()
5867 .dimmed()
5868 );
5869 println!(" {}", style::tick_row(60));
5870 if !report.ok {
5871 print_projection_issues(&report.issues, report.error.as_deref());
5872 std::process::exit(1);
5873 }
5874 let brief = report
5875 .projection
5876 .as_ref()
5877 .expect("ok decision report carries projection");
5878 for question in &brief.questions {
5879 println!(" · {} · {}", question.id.dimmed(), question.title);
5880 println!(" answer: {}", wrap_line(&question.short_answer, 82));
5881 println!(" caveat: {}", wrap_line(&question.caveat, 82));
5882 println!(" support: {}", question.supporting_findings.join(", "));
5883 if !question.tension_findings.is_empty() {
5884 println!(" tensions: {}", question.tension_findings.join(", "));
5885 }
5886 if !question.gap_findings.is_empty() {
5887 println!(" gaps: {}", question.gap_findings.join(", "));
5888 }
5889 if !question.artifact_ids.is_empty() {
5890 println!(" artifacts: {}", question.artifact_ids.join(", "));
5891 }
5892 println!(
5893 " would change: {}",
5894 wrap_line(&question.what_would_change_this_answer, 82)
5895 );
5896 }
5897}
5898
5899fn cmd_trial_summary(frontier: &Path, json_out: bool) {
5900 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5901 let report = decision::load_trial_outcomes(frontier, &project);
5902 if json_out {
5903 println!(
5904 "{}",
5905 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
5906 );
5907 if !report.ok {
5908 std::process::exit(1);
5909 }
5910 return;
5911 }
5912 println!();
5913 println!(
5914 " {}",
5915 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
5916 .to_uppercase()
5917 .dimmed()
5918 );
5919 println!(" {}", style::tick_row(60));
5920 if !report.ok {
5921 print_projection_issues(&report.issues, report.error.as_deref());
5922 std::process::exit(1);
5923 }
5924 let outcomes = report
5925 .projection
5926 .as_ref()
5927 .expect("ok trial report carries projection");
5928 for row in &outcomes.rows {
5929 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
5930 println!(" population: {}", wrap_line(&row.population, 82));
5931 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
5932 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
5933 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
5934 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
5935 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
5936 if !row.finding_ids.is_empty() {
5937 println!(" findings: {}", row.finding_ids.join(", "));
5938 }
5939 if !row.artifact_ids.is_empty() {
5940 println!(" artifacts: {}", row.artifact_ids.join(", "));
5941 }
5942 }
5943}
5944
5945fn cmd_source_verification(frontier: &Path, json_out: bool) {
5946 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5947 let report = decision::load_source_verification(frontier, &project);
5948 if json_out {
5949 println!(
5950 "{}",
5951 serde_json::to_string_pretty(&report).expect("serialize source verification report")
5952 );
5953 if !report.ok {
5954 std::process::exit(1);
5955 }
5956 return;
5957 }
5958 println!();
5959 println!(
5960 " {}",
5961 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
5962 .to_uppercase()
5963 .dimmed()
5964 );
5965 println!(" {}", style::tick_row(60));
5966 if !report.ok {
5967 print_projection_issues(&report.issues, report.error.as_deref());
5968 std::process::exit(1);
5969 }
5970 let verification = report
5971 .projection
5972 .as_ref()
5973 .expect("ok source verification report carries projection");
5974 println!(" verified_at: {}", verification.verified_at);
5975 for source in &verification.sources {
5976 println!(" · {} · {}", source.id.dimmed(), source.title);
5977 println!(" agency: {}", source.agency);
5978 println!(" url: {}", truncate(&source.url, 88));
5979 println!(" status: {}", wrap_line(&source.current_status, 82));
5980 }
5981}
5982
5983fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
5984 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5985 let report = decision::load_source_ingest_plan(frontier, &project);
5986 if json_out {
5987 println!(
5988 "{}",
5989 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
5990 );
5991 if !report.ok {
5992 std::process::exit(1);
5993 }
5994 return;
5995 }
5996 println!();
5997 println!(
5998 " {}",
5999 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6000 .to_uppercase()
6001 .dimmed()
6002 );
6003 println!(" {}", style::tick_row(60));
6004 if !report.ok {
6005 print_projection_issues(&report.issues, report.error.as_deref());
6006 std::process::exit(1);
6007 }
6008 let plan = report
6009 .projection
6010 .as_ref()
6011 .expect("ok source ingest plan report carries projection");
6012 println!(" verified_at: {}", plan.verified_at);
6013 println!(" entries: {}", plan.entries.len());
6014 for entry in &plan.entries {
6015 println!(
6016 " · {} · {} · {} · {}",
6017 entry.id.dimmed(),
6018 entry.category,
6019 entry.priority,
6020 entry.ingest_status
6021 );
6022 println!(" name: {}", wrap_line(&entry.name, 82));
6023 println!(" locator: {}", truncate(&entry.locator, 88));
6024 println!(" use: {}", wrap_line(&entry.target_use, 82));
6025 if let Some(id) = &entry.current_frontier_artifact_id {
6026 println!(" artifact: {id}");
6027 }
6028 if !entry.target_findings.is_empty() {
6029 println!(" findings: {}", entry.target_findings.join(", "));
6030 }
6031 }
6032}
6033
6034fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6035 if let Some(error) = error {
6036 println!(" {} {error}", style::lost("unavailable"));
6037 }
6038 for issue in issues {
6039 println!(
6040 " {} {}: {}",
6041 style::lost("invalid"),
6042 issue.path,
6043 issue.message
6044 );
6045 }
6046}
6047
6048fn wrap_line(text: &str, max_chars: usize) -> String {
6049 if text.chars().count() <= max_chars {
6050 return text.to_string();
6051 }
6052 let mut out = String::new();
6053 let mut line_len = 0usize;
6054 for word in text.split_whitespace() {
6055 let word_len = word.chars().count();
6056 if line_len > 0 && line_len + 1 + word_len > max_chars {
6057 out.push('\n');
6058 out.push_str(" ");
6059 out.push_str(word);
6060 line_len = word_len;
6061 } else {
6062 if line_len > 0 {
6063 out.push(' ');
6064 line_len += 1;
6065 }
6066 out.push_str(word);
6067 line_len += word_len;
6068 }
6069 }
6070 out
6071}
6072
6073fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6074 study.pointer(pointer).and_then(Value::as_str)
6075}
6076
6077fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6078 study
6079 .pointer(pointer)
6080 .and_then(Value::as_array)
6081 .map(|items| {
6082 items
6083 .iter()
6084 .filter_map(Value::as_str)
6085 .map(str::to_string)
6086 .collect()
6087 })
6088 .unwrap_or_default()
6089}
6090
6091fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6092 study
6093 .pointer(pointer)
6094 .and_then(Value::as_array)
6095 .map(|items| {
6096 items
6097 .iter()
6098 .filter_map(|item| item.get(field).and_then(Value::as_str))
6099 .map(str::to_string)
6100 .collect()
6101 })
6102 .unwrap_or_default()
6103}
6104
6105fn insert_string_vec_metadata(
6106 metadata: &mut BTreeMap<String, Value>,
6107 key: &str,
6108 values: Vec<String>,
6109) {
6110 if values.is_empty() {
6111 return;
6112 }
6113 metadata.insert(
6114 key.to_string(),
6115 Value::Array(values.into_iter().map(Value::String).collect()),
6116 );
6117}
6118
6119async fn cmd_clinical_trial_import(
6120 frontier: &Path,
6121 nct_id: &str,
6122 input_json: Option<&Path>,
6123 target: Vec<String>,
6124 deposited_by: &str,
6125 reason: &str,
6126 license: &str,
6127 json_out: bool,
6128) {
6129 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6130 let raw = if let Some(path) = input_json {
6131 std::fs::read_to_string(path)
6132 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6133 } else {
6134 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6135 fail(&format!(
6136 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6137 ))
6138 });
6139 let response = response.error_for_status().unwrap_or_else(|e| {
6140 fail(&format!(
6141 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6142 ))
6143 });
6144 response.text().await.unwrap_or_else(|e| {
6145 fail(&format!(
6146 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6147 ))
6148 })
6149 };
6150 let study: Value = serde_json::from_str(&raw)
6151 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6152 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6153 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6154 let content_hash = sha256_for_bytes(&canonical_bytes);
6155 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6156 .unwrap_or_else(|| api_url.clone());
6157 let storage_mode = if locator.starts_with(".vela/") {
6158 "local_blob"
6159 } else {
6160 "remote"
6161 };
6162
6163 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6164 .unwrap_or(nct_id)
6165 .to_string();
6166 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6167 .or_else(|| {
6168 clinical_str(
6169 &study,
6170 "/protocolSection/identificationModule/officialTitle",
6171 )
6172 })
6173 .unwrap_or(nct_id);
6174 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6175 let mut metadata = BTreeMap::new();
6176 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6177 metadata.insert(
6178 "source_api".to_string(),
6179 Value::String("clinicaltrials.gov-v2".to_string()),
6180 );
6181 metadata.insert(
6182 "retrieved_at".to_string(),
6183 Value::String(chrono::Utc::now().to_rfc3339()),
6184 );
6185 for (key, pointer) in [
6186 (
6187 "overall_status",
6188 "/protocolSection/statusModule/overallStatus",
6189 ),
6190 (
6191 "start_date",
6192 "/protocolSection/statusModule/startDateStruct/date",
6193 ),
6194 (
6195 "completion_date",
6196 "/protocolSection/statusModule/completionDateStruct/date",
6197 ),
6198 ] {
6199 if let Some(value) = clinical_str(&study, pointer) {
6200 metadata.insert(key.to_string(), Value::String(value.to_string()));
6201 }
6202 }
6203 insert_string_vec_metadata(
6204 &mut metadata,
6205 "phases",
6206 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6207 );
6208 insert_string_vec_metadata(
6209 &mut metadata,
6210 "conditions",
6211 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6212 );
6213 insert_string_vec_metadata(
6214 &mut metadata,
6215 "interventions",
6216 clinical_named_array(
6217 &study,
6218 "/protocolSection/armsInterventionsModule/interventions",
6219 "name",
6220 ),
6221 );
6222 insert_string_vec_metadata(
6223 &mut metadata,
6224 "primary_outcomes",
6225 clinical_named_array(
6226 &study,
6227 "/protocolSection/outcomesModule/primaryOutcomes",
6228 "measure",
6229 ),
6230 );
6231 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6232 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6233 }
6234
6235 let provenance = artifact_provenance(
6236 "clinical_trial_record",
6237 title,
6238 Some(&public_url),
6239 None,
6240 Some(license),
6241 );
6242 let artifact = crate::bundle::Artifact::new(
6243 "clinical_trial_record",
6244 title.to_string(),
6245 content_hash,
6246 Some(canonical_bytes.len() as u64),
6247 Some("application/json".to_string()),
6248 storage_mode.to_string(),
6249 Some(locator),
6250 Some(public_url.clone()),
6251 Some(license.to_string()),
6252 target,
6253 provenance,
6254 metadata,
6255 crate::access_tier::AccessTier::Public,
6256 )
6257 .unwrap_or_else(|e| fail_return(&e));
6258 let artifact_id = artifact.id.clone();
6259 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6260 .unwrap_or_else(|e| fail_return(&e));
6261
6262 if json_out {
6263 println!(
6264 "{}",
6265 serde_json::to_string_pretty(&json!({
6266 "ok": true,
6267 "command": "clinical-trial-import",
6268 "nct_id": parsed_nct,
6269 "id": artifact_id,
6270 "frontier": frontier.display().to_string(),
6271 "event": report.applied_event_id,
6272 "source_url": public_url,
6273 }))
6274 .expect("serialize clinical-trial-import")
6275 );
6276 } else {
6277 println!();
6278 println!(
6279 " {}",
6280 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
6281 .to_uppercase()
6282 .dimmed()
6283 );
6284 println!(" {}", style::tick_row(60));
6285 println!(" nct_id: {parsed_nct}");
6286 println!(" title: {}", truncate(title, 96));
6287 println!(" source: {public_url}");
6288 println!(
6289 " {} trial record imported into {}",
6290 style::ok("ok"),
6291 frontier.display()
6292 );
6293 }
6294}
6295
6296#[allow(clippy::too_many_arguments)]
6303fn cmd_replicate(
6304 frontier: &Path,
6305 target: &str,
6306 outcome: &str,
6307 attempted_by: &str,
6308 conditions_text: &str,
6309 source_title: &str,
6310 doi: Option<&str>,
6311 pmid: Option<&str>,
6312 sample_size: Option<&str>,
6313 note: &str,
6314 previous_attempt: Option<&str>,
6315 no_cascade: bool,
6316 json: bool,
6317) {
6318 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
6319 fail(&format!(
6320 "invalid outcome '{outcome}'; valid: {:?}",
6321 crate::bundle::VALID_REPLICATION_OUTCOMES
6322 ));
6323 }
6324 if !target.starts_with("vf_") {
6325 fail(&format!("target '{target}' is not a vf_ finding id"));
6326 }
6327
6328 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6329
6330 if !project.findings.iter().any(|f| f.id == target) {
6331 fail(&format!(
6332 "target finding '{target}' not present in frontier '{}'",
6333 frontier.display()
6334 ));
6335 }
6336
6337 let lower = conditions_text.to_lowercase();
6342 let conditions = crate::bundle::Conditions {
6343 text: conditions_text.to_string(),
6344 species_verified: Vec::new(),
6345 species_unverified: Vec::new(),
6346 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
6347 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
6348 human_data: lower.contains("human")
6349 || lower.contains("clinical")
6350 || lower.contains("patient"),
6351 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
6352 concentration_range: None,
6353 duration: None,
6354 age_group: None,
6355 cell_type: None,
6356 };
6357
6358 let evidence = crate::bundle::Evidence {
6359 evidence_type: "experimental".to_string(),
6360 model_system: String::new(),
6361 species: None,
6362 method: "replication_attempt".to_string(),
6363 sample_size: sample_size.map(|s| s.to_string()),
6364 effect_size: None,
6365 p_value: None,
6366 replicated: outcome == "replicated",
6367 replication_count: None,
6368 evidence_spans: Vec::new(),
6369 };
6370
6371 let provenance = crate::bundle::Provenance {
6372 source_type: "published_paper".to_string(),
6373 doi: doi.map(|s| s.to_string()),
6374 pmid: pmid.map(|s| s.to_string()),
6375 pmc: None,
6376 openalex_id: None,
6377 url: None,
6378 title: source_title.to_string(),
6379 authors: Vec::new(),
6380 year: None,
6381 journal: None,
6382 license: None,
6383 publisher: None,
6384 funders: Vec::new(),
6385 extraction: crate::bundle::Extraction {
6386 method: "manual_curation".to_string(),
6387 model: None,
6388 model_version: None,
6389 extracted_at: chrono::Utc::now().to_rfc3339(),
6390 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6391 },
6392 review: None,
6393 citation_count: None,
6394 };
6395
6396 let mut rep = crate::bundle::Replication::new(
6397 target.to_string(),
6398 attempted_by.to_string(),
6399 outcome.to_string(),
6400 evidence,
6401 conditions,
6402 provenance,
6403 note.to_string(),
6404 );
6405 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
6406
6407 if project.replications.iter().any(|r| r.id == rep.id) {
6410 if json {
6411 println!(
6412 "{}",
6413 serde_json::to_string_pretty(&json!({
6414 "ok": false,
6415 "command": "replicate",
6416 "reason": "replication_already_exists",
6417 "id": rep.id,
6418 }))
6419 .expect("serialize")
6420 );
6421 } else {
6422 println!(
6423 "{} replication {} already exists in {}; skipping.",
6424 style::warn("replicate"),
6425 rep.id,
6426 frontier.display()
6427 );
6428 }
6429 return;
6430 }
6431
6432 let new_id = rep.id.clone();
6433 project.replications.push(rep);
6434
6435 let cascade_result = if no_cascade {
6442 None
6443 } else {
6444 let result = propagate::propagate_correction(
6445 &mut project,
6446 target,
6447 propagate::PropagationAction::ReplicationOutcome {
6448 outcome: outcome.to_string(),
6449 vrep_id: new_id.clone(),
6450 },
6451 );
6452 project.review_events.extend(result.events.clone());
6455 project::recompute_stats(&mut project);
6456 Some(result)
6457 };
6458
6459 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6460
6461 if json {
6462 let cascade_json = cascade_result.as_ref().map(|r| {
6463 json!({
6464 "affected": r.affected,
6465 "events": r.events.len(),
6466 })
6467 });
6468 println!(
6469 "{}",
6470 serde_json::to_string_pretty(&json!({
6471 "ok": true,
6472 "command": "replicate",
6473 "id": new_id,
6474 "target": target,
6475 "outcome": outcome,
6476 "attempted_by": attempted_by,
6477 "cascade": cascade_json,
6478 "frontier": frontier.display().to_string(),
6479 }))
6480 .expect("failed to serialize replicate result")
6481 );
6482 } else {
6483 println!();
6484 println!(
6485 " {}",
6486 format!("VELA · REPLICATE · {}", new_id)
6487 .to_uppercase()
6488 .dimmed()
6489 );
6490 println!(" {}", style::tick_row(60));
6491 println!(" target: {target}");
6492 println!(" outcome: {outcome}");
6493 println!(" attempted by: {attempted_by}");
6494 println!(" conditions: {conditions_text}");
6495 println!(" source: {source_title}");
6496 if let Some(d) = doi {
6497 println!(" doi: {d}");
6498 }
6499 println!();
6500 println!(
6501 " {} replication recorded in {}",
6502 style::ok("ok"),
6503 frontier.display()
6504 );
6505 if let Some(result) = cascade_result {
6506 println!(
6507 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
6508 style::ok("ok"),
6509 result.affected,
6510 result.events.len()
6511 );
6512 } else {
6513 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
6514 }
6515 }
6516}
6517
6518fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
6520 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6521 let filtered: Vec<&crate::bundle::Replication> = project
6522 .replications
6523 .iter()
6524 .filter(|r| target.is_none_or(|t| r.target_finding == t))
6525 .collect();
6526
6527 if json {
6528 let payload = json!({
6529 "ok": true,
6530 "command": "replications",
6531 "frontier": frontier.display().to_string(),
6532 "filter_target": target,
6533 "count": filtered.len(),
6534 "replications": filtered,
6535 });
6536 println!(
6537 "{}",
6538 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
6539 );
6540 return;
6541 }
6542
6543 println!();
6544 let header = match target {
6545 Some(t) => format!("VELA · REPLICATIONS · {t}"),
6546 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
6547 };
6548 println!(" {}", header.to_uppercase().dimmed());
6549 println!(" {}", style::tick_row(60));
6550 if filtered.is_empty() {
6551 println!(" (no replications recorded)");
6552 return;
6553 }
6554 for rep in &filtered {
6555 let outcome_chip = match rep.outcome.as_str() {
6556 "replicated" => style::ok(&rep.outcome),
6557 "failed" => style::lost(&rep.outcome),
6558 "partial" => style::warn(&rep.outcome),
6559 _ => rep.outcome.clone().normal().to_string(),
6560 };
6561 println!(
6562 " · {} {} by {}",
6563 rep.id.dimmed(),
6564 outcome_chip,
6565 rep.attempted_by
6566 );
6567 println!(" target: {}", rep.target_finding);
6568 if !rep.conditions.text.is_empty() {
6569 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
6570 }
6571 if !rep.provenance.title.is_empty() {
6572 println!(" source: {}", truncate(&rep.provenance.title, 80));
6573 }
6574 }
6575}
6576
6577async fn cmd_ingest(
6590 path: &str,
6591 frontier: &Path,
6592 backend: Option<&str>,
6593 actor: Option<&str>,
6594 dry_run: bool,
6595 json: bool,
6596) {
6597 let lowered = path.trim().to_lowercase();
6599 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
6600 cmd_source_fetch(path.trim(), None, None, false, json).await;
6601 return;
6602 }
6603
6604 let p = std::path::PathBuf::from(path);
6605 if !p.exists() {
6606 fail(&format!(
6607 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
6608 ));
6609 }
6610
6611 let ext = p
6613 .extension()
6614 .and_then(|s| s.to_str())
6615 .map(|s| s.to_ascii_lowercase());
6616
6617 if p.is_file() {
6618 match ext.as_deref() {
6619 Some("pdf") => {
6620 cmd_scout(&p, frontier, backend, dry_run, json).await;
6624 }
6625 Some("md") | Some("markdown") => {
6626 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
6629 }
6630 Some("csv") | Some("tsv") => {
6631 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
6634 }
6635 Some("json") => {
6636 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
6638 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
6639 }
6640 other => {
6641 fail(&format!(
6642 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
6643 other.unwrap_or("(none)")
6644 ));
6645 }
6646 }
6647 return;
6648 }
6649
6650 if p.is_dir() {
6651 let mut pdf_count = 0usize;
6653 let mut md_count = 0usize;
6654 let mut data_count = 0usize;
6655 if let Ok(entries) = std::fs::read_dir(&p) {
6656 for entry in entries.flatten() {
6657 if let Some(name) = entry.file_name().to_str()
6658 && let Some(dot) = name.rfind('.')
6659 {
6660 match name[dot + 1..].to_ascii_lowercase().as_str() {
6661 "pdf" => pdf_count += 1,
6662 "md" | "markdown" => md_count += 1,
6663 "csv" | "tsv" => data_count += 1,
6664 _ => {}
6665 }
6666 }
6667 }
6668 }
6669
6670 if pdf_count > 0 && pdf_count >= md_count && pdf_count >= data_count {
6671 cmd_scout(&p, frontier, backend, dry_run, json).await;
6672 } else if md_count > 0 && md_count >= data_count {
6673 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
6674 } else if data_count > 0 {
6675 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
6676 } else {
6677 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
6679 }
6680 return;
6681 }
6682
6683 fail(&format!(
6684 "ingest: path '{path}' is neither a file nor a directory"
6685 ));
6686}
6687
6688#[allow(clippy::too_many_arguments)]
6689async fn cmd_compile_data(
6691 root: &Path,
6692 frontier: &Path,
6693 backend: Option<&str>,
6694 sample_rows: Option<usize>,
6695 dry_run: bool,
6696 json_out: bool,
6697) {
6698 match DATASETS_HANDLER.get() {
6699 Some(handler) => {
6700 handler(
6701 root.to_path_buf(),
6702 frontier.to_path_buf(),
6703 backend.map(String::from),
6704 sample_rows,
6705 dry_run,
6706 json_out,
6707 )
6708 .await;
6709 }
6710 None => {
6711 eprintln!(
6712 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
6713 style::err_prefix()
6714 );
6715 std::process::exit(1);
6716 }
6717 }
6718}
6719
6720async fn cmd_review_pending(
6723 frontier: &Path,
6724 backend: Option<&str>,
6725 max_proposals: Option<usize>,
6726 batch_size: usize,
6727 dry_run: bool,
6728 json_out: bool,
6729) {
6730 match REVIEWER_HANDLER.get() {
6731 Some(handler) => {
6732 handler(
6733 frontier.to_path_buf(),
6734 backend.map(String::from),
6735 max_proposals,
6736 batch_size,
6737 dry_run,
6738 json_out,
6739 )
6740 .await;
6741 }
6742 None => {
6743 eprintln!(
6744 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
6745 style::err_prefix()
6746 );
6747 std::process::exit(1);
6748 }
6749 }
6750}
6751
6752async fn cmd_find_tensions(
6755 frontier: &Path,
6756 backend: Option<&str>,
6757 max_findings: Option<usize>,
6758 dry_run: bool,
6759 json_out: bool,
6760) {
6761 match TENSIONS_HANDLER.get() {
6762 Some(handler) => {
6763 handler(
6764 frontier.to_path_buf(),
6765 backend.map(String::from),
6766 max_findings,
6767 dry_run,
6768 json_out,
6769 )
6770 .await;
6771 }
6772 None => {
6773 eprintln!(
6774 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
6775 style::err_prefix()
6776 );
6777 std::process::exit(1);
6778 }
6779 }
6780}
6781
6782async fn cmd_plan_experiments(
6785 frontier: &Path,
6786 backend: Option<&str>,
6787 max_findings: Option<usize>,
6788 dry_run: bool,
6789 json_out: bool,
6790) {
6791 match EXPERIMENTS_HANDLER.get() {
6792 Some(handler) => {
6793 handler(
6794 frontier.to_path_buf(),
6795 backend.map(String::from),
6796 max_findings,
6797 dry_run,
6798 json_out,
6799 )
6800 .await;
6801 }
6802 None => {
6803 eprintln!(
6804 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
6805 style::err_prefix()
6806 );
6807 std::process::exit(1);
6808 }
6809 }
6810}
6811
6812async fn cmd_compile_code(
6815 root: &Path,
6816 frontier: &Path,
6817 backend: Option<&str>,
6818 max_files: Option<usize>,
6819 dry_run: bool,
6820 json_out: bool,
6821) {
6822 match CODE_HANDLER.get() {
6823 Some(handler) => {
6824 handler(
6825 root.to_path_buf(),
6826 frontier.to_path_buf(),
6827 backend.map(String::from),
6828 max_files,
6829 dry_run,
6830 json_out,
6831 )
6832 .await;
6833 }
6834 None => {
6835 eprintln!(
6836 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
6837 style::err_prefix()
6838 );
6839 std::process::exit(1);
6840 }
6841 }
6842}
6843
6844async fn cmd_compile_notes(
6849 vault: &Path,
6850 frontier: &Path,
6851 backend: Option<&str>,
6852 max_files: Option<usize>,
6853 max_items_per_category: Option<usize>,
6854 dry_run: bool,
6855 json_out: bool,
6856) {
6857 match NOTES_HANDLER.get() {
6858 Some(handler) => {
6859 handler(
6860 vault.to_path_buf(),
6861 frontier.to_path_buf(),
6862 backend.map(String::from),
6863 max_files,
6864 max_items_per_category,
6865 dry_run,
6866 json_out,
6867 )
6868 .await;
6869 }
6870 None => {
6871 eprintln!(
6872 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
6873 style::err_prefix()
6874 );
6875 std::process::exit(1);
6876 }
6877 }
6878}
6879
6880async fn cmd_scout(
6887 folder: &Path,
6888 frontier: &Path,
6889 backend: Option<&str>,
6890 dry_run: bool,
6891 json_out: bool,
6892) {
6893 match SCOUT_HANDLER.get() {
6894 Some(handler) => {
6895 handler(
6896 folder.to_path_buf(),
6897 frontier.to_path_buf(),
6898 backend.map(String::from),
6899 dry_run,
6900 json_out,
6901 )
6902 .await;
6903 }
6904 None => {
6905 eprintln!(
6906 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
6907 style::err_prefix()
6908 );
6909 std::process::exit(1);
6910 }
6911 }
6912}
6913
6914#[allow(clippy::too_many_arguments)]
6915fn cmd_check(
6916 source: Option<&Path>,
6917 schema: bool,
6918 stats: bool,
6919 conformance_flag: bool,
6920 conformance_dir: &Path,
6921 all: bool,
6922 schema_only: bool,
6923 strict: bool,
6924 fix: bool,
6925 json_output: bool,
6926) {
6927 if json_output {
6928 let Some(src) = source else {
6929 fail("--json requires a frontier source");
6930 };
6931 let payload = check_json_payload(src, schema_only, strict);
6932 println!(
6933 "{}",
6934 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
6935 );
6936 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
6937 std::process::exit(1);
6938 }
6939 return;
6940 }
6941
6942 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
6943 if run_all || schema || schema_only {
6944 let Some(src) = source else {
6945 fail("check requires a frontier source");
6946 };
6947 validate::run(src);
6948 }
6949 if !schema_only && (run_all || stats) {
6950 let Some(src) = source else {
6951 fail("--stats requires a frontier source");
6952 };
6953 let frontier = load_frontier_or_fail(src);
6954 let report = lint::lint(&frontier, None, None);
6955 lint::print_report(&report);
6956 let replay_report = events::replay_report(&frontier);
6957 println!("event replay: {}", replay_report.status);
6958 if !replay_report.conflicts.is_empty() {
6959 for conflict in &replay_report.conflicts {
6960 println!(" - {conflict}");
6961 }
6962 }
6963 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
6964 && signature_report.signed > 0
6965 {
6966 println!(
6967 "Signatures: {} valid / {} invalid / {} unsigned",
6968 signature_report.valid, signature_report.invalid, signature_report.unsigned
6969 );
6970 }
6971 let signal_report = signals::analyze(&frontier, &[]);
6972 print_signal_summary(&signal_report, strict);
6973 if !replay_report.ok
6974 || (strict
6975 && (!signal_report.review_queue.is_empty()
6976 || signal_report.proof_readiness.status != "ready"))
6977 {
6978 std::process::exit(1);
6979 }
6980 }
6981 if run_all || conformance_flag {
6982 conformance::run(conformance_dir);
6983 }
6984 let _ = fix;
6985}
6986
6987fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
6988 let report = validate::validate(src);
6989 let loaded = repo::load_from_path(src).ok();
6990 let (method_report, graph_report) = if schema_only {
6991 (None, None)
6992 } else if let Some(frontier) = loaded.as_ref() {
6993 (
6994 Some(lint::lint(frontier, None, None)),
6995 Some(lint::lint_frontier(frontier)),
6996 )
6997 } else {
6998 (None, None)
6999 };
7000 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7001 let mut diagnostics = Vec::new();
7002 diagnostics.extend(report.errors.iter().map(|e| {
7003 json!({
7004 "severity": "error",
7005 "rule_id": "schema",
7006 "finding_id": null,
7007 "file": &e.file,
7008 "field_path": null,
7009 "message": &e.error,
7010 "suggestion": schema_error_suggestion(&e.error),
7011 "fixable": schema_error_fix(&e.error),
7012 "normalize_action": schema_error_action(&e.error),
7013 })
7014 }));
7015 for (check_id, lint_report) in [
7016 ("methodology", method_report.as_ref()),
7017 ("frontier_graph", graph_report.as_ref()),
7018 ] {
7019 if let Some(lint_report) = lint_report {
7020 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7021 json!({
7022 "severity": d.severity.to_string(),
7023 "rule_id": &d.rule_id,
7024 "check": check_id,
7025 "finding_id": &d.finding_id,
7026 "field_path": null,
7027 "message": &d.message,
7028 "suggestion": &d.suggestion,
7029 "fixable": false,
7030 "normalize_action": null,
7031 })
7032 }));
7033 }
7034 }
7035 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7036 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7037 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7038 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7039 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7040 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7041 let replay_report = loaded.as_ref().map(events::replay_report);
7042 let state_integrity_report = if schema_only {
7043 loaded.as_ref().map(state_integrity::analyze)
7044 } else {
7045 state_integrity::analyze_path(src).ok()
7046 };
7047 if let Some(replay) = replay_report.as_ref()
7048 && !replay.ok
7049 {
7050 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7051 json!({
7052 "severity": "error",
7053 "rule_id": "event_replay",
7054 "check": "events",
7055 "finding_id": null,
7056 "field_path": null,
7057 "message": conflict,
7058 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
7059 "fixable": false,
7060 "normalize_action": null,
7061 })
7062 }));
7063 }
7064 let event_errors = replay_report
7065 .as_ref()
7066 .map_or(0, |replay| usize::from(!replay.ok));
7067 let state_integrity_errors = state_integrity_report
7068 .as_ref()
7069 .map_or(0, |report| report.structural_errors.len());
7070 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
7071 .as_ref()
7072 .map(|frontier| {
7073 (
7074 sources::source_summary(frontier),
7075 sources::evidence_summary(frontier),
7076 sources::condition_summary(frontier),
7077 proposals::summary(frontier),
7078 proposals::proof_state_json(&frontier.proof_state),
7079 )
7080 })
7081 .unwrap_or_else(|| {
7082 (
7083 sources::SourceRegistrySummary::default(),
7084 sources::EvidenceAtomSummary::default(),
7085 sources::ConditionSummary::default(),
7086 proposals::ProposalSummary::default(),
7087 Value::Null,
7088 )
7089 });
7090 let signature_report = loaded
7091 .as_ref()
7092 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
7093 if let Some(frontier) = loaded.as_ref()
7094 && !schema_only
7095 {
7096 let projection = sources::derive_projection(frontier);
7097 let existing_sources = frontier
7098 .sources
7099 .iter()
7100 .map(|source| source.id.as_str())
7101 .collect::<std::collections::BTreeSet<_>>();
7102 let existing_atoms = frontier
7103 .evidence_atoms
7104 .iter()
7105 .map(|atom| atom.id.as_str())
7106 .collect::<std::collections::BTreeSet<_>>();
7107 let existing_conditions = frontier
7108 .condition_records
7109 .iter()
7110 .map(|record| record.id.as_str())
7111 .collect::<std::collections::BTreeSet<_>>();
7112 for source in projection
7113 .sources
7114 .iter()
7115 .filter(|source| !existing_sources.contains(source.id.as_str()))
7116 {
7117 diagnostics.push(json!({
7118 "severity": "warning",
7119 "rule_id": "missing_source_record",
7120 "check": "source_registry",
7121 "finding_id": source.finding_ids.first(),
7122 "field_path": "sources",
7123 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
7124 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
7125 "fixable": true,
7126 "normalize_action": "materialize_source_record",
7127 }));
7128 }
7129 for atom in projection
7130 .evidence_atoms
7131 .iter()
7132 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
7133 {
7134 diagnostics.push(json!({
7135 "severity": "warning",
7136 "rule_id": "missing_evidence_atom",
7137 "check": "evidence_atoms",
7138 "finding_id": atom.finding_id,
7139 "field_path": "evidence_atoms",
7140 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
7141 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
7142 "fixable": true,
7143 "normalize_action": "materialize_evidence_atom",
7144 }));
7145 }
7146 for atom in projection
7147 .evidence_atoms
7148 .iter()
7149 .filter(|atom| atom.locator.is_none())
7150 {
7151 diagnostics.push(json!({
7152 "severity": "warning",
7153 "rule_id": "missing_evidence_locator",
7154 "check": "evidence_atoms",
7155 "finding_id": atom.finding_id,
7156 "field_path": "evidence_atoms[].locator",
7157 "message": format!("Evidence atom {} has no source locator.", atom.id),
7158 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
7159 "fixable": false,
7160 "normalize_action": null,
7161 }));
7162 }
7163 for condition in projection
7164 .condition_records
7165 .iter()
7166 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
7167 {
7168 diagnostics.push(json!({
7169 "severity": "warning",
7170 "rule_id": "condition_record_missing",
7171 "check": "conditions",
7172 "finding_id": condition.finding_id,
7173 "field_path": "condition_records",
7174 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
7175 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
7176 "fixable": true,
7177 "normalize_action": "materialize_condition_record",
7178 }));
7179 }
7180 for proposal in frontier.proposals.iter().filter(|proposal| {
7181 matches!(proposal.status.as_str(), "accepted" | "applied")
7182 && proposal
7183 .reviewed_by
7184 .as_deref()
7185 .is_none_or(proposals::is_placeholder_reviewer)
7186 }) {
7187 diagnostics.push(json!({
7188 "severity": "error",
7189 "rule_id": "reviewer_identity_missing",
7190 "check": "proposals",
7191 "finding_id": proposal.target.id,
7192 "field_path": "proposals[].reviewed_by",
7193 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
7194 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
7195 "fixable": false,
7196 "normalize_action": null,
7197 }));
7198 }
7199 }
7200 let signal_report = loaded
7201 .as_ref()
7202 .map(|frontier| signals::analyze(frontier, &diagnostics))
7203 .unwrap_or_else(empty_signal_report);
7204 let errors =
7205 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
7206 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
7207 let infos = method_infos + graph_infos;
7208 let strict_blockers = signal_report
7209 .signals
7210 .iter()
7211 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
7212 .count();
7213 let fixable = diagnostics
7214 .iter()
7215 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
7216 .count();
7217 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
7218
7219 json!({
7220 "ok": ok,
7221 "command": "check",
7222 "schema_version": project::VELA_SCHEMA_VERSION,
7223 "source": {
7224 "path": src.display().to_string(),
7225 "hash": format!("sha256:{source_hash}"),
7226 },
7227 "summary": {
7228 "status": if ok { "pass" } else { "fail" },
7229 "checked_findings": report.total_files,
7230 "valid_findings": report.valid,
7231 "invalid_findings": report.invalid,
7232 "errors": errors,
7233 "warnings": warnings,
7234 "info": infos,
7235 "fixable": fixable,
7236 "strict": strict,
7237 "schema_only": schema_only,
7238 },
7239 "checks": [
7240 {
7241 "id": "schema",
7242 "status": if report.invalid == 0 { "pass" } else { "fail" },
7243 "checked": report.total_files,
7244 "failed": report.invalid,
7245 "errors": report.errors.iter().map(|e| json!({
7246 "file": e.file,
7247 "message": e.error,
7248 })).collect::<Vec<_>>(),
7249 },
7250 {
7251 "id": "methodology",
7252 "status": if method_errors == 0 { "pass" } else { "fail" },
7253 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
7254 "failed": method_errors,
7255 "warnings": method_warnings,
7256 "info": method_infos,
7257 "skipped": schema_only,
7258 },
7259 {
7260 "id": "frontier_graph",
7261 "status": if graph_errors == 0 { "pass" } else { "fail" },
7262 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
7263 "failed": graph_errors,
7264 "warnings": graph_warnings,
7265 "info": graph_infos,
7266 "skipped": schema_only,
7267 },
7268 {
7269 "id": "signals",
7270 "status": if strict_blockers == 0 { "pass" } else { "fail" },
7271 "checked": signal_report.signals.len(),
7272 "failed": strict_blockers,
7273 "warnings": signal_report.proof_readiness.warnings,
7274 "skipped": loaded.is_none(),
7275 "blockers": signal_report.signals.iter()
7276 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
7277 .map(|s| json!({
7278 "id": s.id,
7279 "kind": s.kind,
7280 "severity": s.severity,
7281 "reason": s.reason,
7282 }))
7283 .collect::<Vec<_>>(),
7284 },
7285 {
7286 "id": "events",
7287 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
7288 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
7289 "failed": event_errors,
7290 "skipped": schema_only || loaded.is_none(),
7291 },
7292 {
7293 "id": "state_integrity",
7294 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
7295 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
7296 "failed": state_integrity_errors,
7297 "skipped": schema_only || loaded.is_none(),
7298 }
7299 ],
7300 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
7301 "replay": replay_report,
7302 "state_integrity": state_integrity_report,
7303 "source_registry": source_registry,
7304 "evidence_atoms": evidence_atoms,
7305 "conditions": conditions,
7306 "proposals": proposal_summary,
7307 "proof_state": proof_state,
7308 "signatures": signature_report,
7309 "diagnostics": diagnostics,
7310 "signals": signal_report.signals,
7311 "review_queue": signal_report.review_queue,
7312 "proof_readiness": signal_report.proof_readiness,
7313 "repair_plan": build_repair_plan(&diagnostics),
7314 })
7315}
7316
7317#[allow(clippy::too_many_arguments)]
7318fn cmd_normalize(
7319 source: &Path,
7320 out: Option<&Path>,
7321 write: bool,
7322 dry_run: bool,
7323 rewrite_ids: bool,
7324 id_map: Option<&Path>,
7325 resync_provenance: bool,
7326 json_output: bool,
7327) {
7328 if write && out.is_some() {
7329 fail("Use either --write or --out, not both.");
7330 }
7331 if dry_run && (write || out.is_some()) {
7332 fail("--dry-run cannot be combined with --write or --out.");
7333 }
7334 if id_map.is_some() && !rewrite_ids {
7335 fail("--id-map requires --rewrite-ids.");
7336 }
7337
7338 let detected = repo::detect(source).unwrap_or_else(|e| {
7339 eprintln!("{e}");
7340 std::process::exit(1);
7341 });
7342 if matches!(detected, repo::VelaSource::PacketDir(_)) {
7343 fail(
7344 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
7345 );
7346 }
7347 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
7348 let has_substantive_events = frontier
7353 .events
7354 .iter()
7355 .any(|event| event.kind != "frontier.created");
7356 if has_substantive_events && (write || out.is_some()) {
7357 fail(
7358 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
7359 );
7360 }
7361 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
7362 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7363 let (entity_type_fixes, entity_name_fixes) =
7364 normalize::normalize_findings(&mut frontier.findings);
7365 let confidence_updates =
7366 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
7367 let provenance_resync_count = if resync_provenance {
7371 sources::resync_provenance_from_sources(&mut frontier)
7372 } else {
7373 0
7374 };
7375 let before_source_count = frontier.sources.len();
7376 let before_evidence_atom_count = frontier.evidence_atoms.len();
7377 let before_condition_record_count = frontier.condition_records.len();
7378
7379 let mut id_rewrites = Vec::new();
7380 if rewrite_ids {
7381 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
7382 for finding in &frontier.findings {
7383 let expected =
7384 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
7385 if expected != finding.id {
7386 id_map_values.insert(finding.id.clone(), expected);
7387 }
7388 }
7389 let new_ids = id_map_values
7390 .values()
7391 .map(String::as_str)
7392 .collect::<std::collections::HashSet<_>>();
7393 if new_ids.len() != id_map_values.len() {
7394 fail("Refusing to rewrite IDs because two findings map to the same content address.");
7395 }
7396 for finding in &mut frontier.findings {
7397 if let Some(new_id) = id_map_values.get(&finding.id) {
7398 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
7399 finding.previous_version = Some(finding.id.clone());
7400 finding.id = new_id.clone();
7401 }
7402 }
7403 for finding in &mut frontier.findings {
7404 for link in &mut finding.links {
7405 if let Some(new_target) = id_map_values.get(&link.target) {
7406 link.target = new_target.clone();
7407 }
7408 }
7409 }
7410 if let Some(path) = id_map {
7411 std::fs::write(
7412 path,
7413 serde_json::to_string_pretty(&id_map_values)
7414 .expect("failed to serialize normalize id map"),
7415 )
7416 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
7417 }
7418 }
7419
7420 sources::materialize_project(&mut frontier);
7421 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
7422 let evidence_atoms_materialized = frontier
7423 .evidence_atoms
7424 .len()
7425 .saturating_sub(before_evidence_atom_count);
7426 let condition_records_materialized = frontier
7427 .condition_records
7428 .len()
7429 .saturating_sub(before_condition_record_count);
7430 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7431 let id_rewrite_count = id_rewrites.len();
7432 let wrote_to = if write {
7433 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
7434 Some(source.display().to_string())
7435 } else if let Some(out_path) = out {
7436 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
7437 Some(out_path.display().to_string())
7438 } else {
7439 None
7440 };
7441 let wrote = wrote_to.is_some();
7442 let planned_changes = entity_type_fixes
7443 + entity_name_fixes
7444 + confidence_updates
7445 + id_rewrite_count
7446 + source_records_materialized
7447 + evidence_atoms_materialized
7448 + condition_records_materialized
7449 + provenance_resync_count;
7450 let payload = json!({
7451 "ok": true,
7452 "command": "normalize",
7453 "schema_version": project::VELA_SCHEMA_VERSION,
7454 "source": {
7455 "path": source.display().to_string(),
7456 "hash": format!("sha256:{source_hash}"),
7457 },
7458 "dry_run": wrote_to.is_none(),
7459 "wrote_to": wrote_to,
7460 "summary": {
7461 "planned": planned_changes,
7462 "safe": planned_changes,
7463 "unsafe": 0,
7464 "applied": if wrote { planned_changes } else { 0 },
7465 },
7466 "changes": {
7467 "entity_type_fixes": entity_type_fixes,
7468 "entity_name_fixes": entity_name_fixes,
7469 "confidence_updates": confidence_updates,
7470 "id_rewrites": id_rewrite_count,
7471 "source_records_materialized": source_records_materialized,
7472 "evidence_atoms_materialized": evidence_atoms_materialized,
7473 "condition_records_materialized": condition_records_materialized,
7474 "provenance_resyncs": provenance_resync_count,
7475 "stats_changed": before_stats != after_stats,
7476 },
7477 "id_rewrites": id_rewrites,
7478 "repair_plan": if wrote { Vec::<Value>::new() } else {
7479 vec![json!({
7480 "action": "apply_normalization",
7481 "command": "vela normalize <frontier> --out frontier.normalized.json"
7482 })]
7483 },
7484 });
7485 if json_output {
7486 println!(
7487 "{}",
7488 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
7489 );
7490 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
7491 println!("{} normalized frontier written to {path}", style::ok("ok"));
7492 println!(
7493 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
7494 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
7495 );
7496 } else {
7497 println!("normalize dry run for {}", source.display());
7498 println!(
7499 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
7500 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
7501 );
7502 }
7503}
7504
7505fn cmd_proof(
7506 frontier: &Path,
7507 out: &Path,
7508 template: &str,
7509 gold: Option<&Path>,
7510 record_proof_state: bool,
7511 json_output: bool,
7512) {
7513 if template != "bbb-alzheimer" {
7514 fail(&format!(
7515 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
7516 ));
7517 }
7518 let mut loaded = load_frontier_or_fail(frontier);
7519 let source_hash = hash_path_or_fail(frontier);
7520 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
7521 .unwrap_or_else(|e| fail(&e));
7522 let benchmark_summary = gold.map(|gold_path| {
7523 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
7524 fail(&format!(
7525 "Failed to run proof benchmark '{}': {e}",
7526 gold_path.display()
7527 ))
7528 });
7529 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
7530 fail(&format!("Failed to write benchmark summary: {e}"));
7531 });
7532 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
7533 fail(&format!(
7534 "Proof benchmark failed for {}",
7535 gold_path.display()
7536 ));
7537 }
7538 summary
7539 });
7540 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
7541 fail(&format!("Proof packet validation failed: {e}"));
7542 });
7543 proposals::record_proof_export(
7544 &mut loaded,
7545 proposals::ProofPacketRecord {
7546 generated_at: export_record.generated_at.clone(),
7547 snapshot_hash: export_record.snapshot_hash.clone(),
7548 event_log_hash: export_record.event_log_hash.clone(),
7549 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
7550 },
7551 );
7552 project::recompute_stats(&mut loaded);
7553 if record_proof_state {
7554 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
7555 }
7556 let signal_report = signals::analyze(&loaded, &[]);
7557 if json_output {
7558 let payload = json!({
7559 "ok": true,
7560 "command": "proof",
7561 "schema_version": project::VELA_SCHEMA_VERSION,
7562 "recorded_proof_state": record_proof_state,
7563 "frontier": {
7564 "name": &loaded.project.name,
7565 "source": frontier.display().to_string(),
7566 "hash": format!("sha256:{source_hash}"),
7567 },
7568 "template": template,
7569 "gold": gold.map(|p| p.display().to_string()),
7570 "benchmark": benchmark_summary,
7571 "output": out.display().to_string(),
7572 "packet": {
7573 "manifest_path": out.join("manifest.json").display().to_string(),
7574 },
7575 "validation": {
7576 "status": "ok",
7577 "summary": validation_summary,
7578 },
7579 "proposals": proposals::summary(&loaded),
7580 "proof_state": loaded.proof_state,
7581 "signals": signal_report.signals,
7582 "review_queue": signal_report.review_queue,
7583 "proof_readiness": signal_report.proof_readiness,
7584 "trace_path": out.join("proof-trace.json").display().to_string(),
7585 });
7586 println!(
7587 "{}",
7588 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
7589 );
7590 } else {
7591 println!("vela proof");
7592 println!(" source: {}", frontier.display());
7593 println!(" template: {template}");
7594 println!(" output: {}", out.display());
7595 println!(" trace: {}", out.join("proof-trace.json").display());
7596 println!(
7597 " proof state: {}",
7598 if record_proof_state {
7599 "recorded"
7600 } else {
7601 "not recorded"
7602 }
7603 );
7604 println!();
7605 println!("{validation_summary}");
7606 }
7607}
7608
7609fn cmd_status(path: &Path, json: bool) {
7613 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
7614
7615 let mut pending_total = 0usize;
7617 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
7618 std::collections::BTreeMap::new();
7619 for p in &project.proposals {
7620 if p.status == "pending_review" {
7621 pending_total += 1;
7622 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
7623 }
7624 }
7625
7626 let audit = crate::causal_reasoning::audit_frontier(&project);
7628 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
7629
7630 let mut last_sync: Option<&crate::events::StateEvent> = None;
7632 let mut last_conflict: Option<&crate::events::StateEvent> = None;
7633 let mut total_conflicts = 0usize;
7634 for e in &project.events {
7635 match e.kind.as_str() {
7636 "frontier.synced_with_peer" => {
7637 if last_sync
7638 .map(|prev| e.timestamp > prev.timestamp)
7639 .unwrap_or(true)
7640 {
7641 last_sync = Some(e);
7642 }
7643 }
7644 "frontier.conflict_detected" => {
7645 total_conflicts += 1;
7646 if last_conflict
7647 .map(|prev| e.timestamp > prev.timestamp)
7648 .unwrap_or(true)
7649 {
7650 last_conflict = Some(e);
7651 }
7652 }
7653 _ => {}
7654 }
7655 }
7656
7657 let mut targets_with_success = std::collections::HashSet::new();
7659 let mut failed_replications = 0usize;
7660 for r in &project.replications {
7661 if r.outcome == "replicated" {
7662 targets_with_success.insert(r.target_finding.clone());
7663 } else if r.outcome == "failed" {
7664 failed_replications += 1;
7665 }
7666 }
7667
7668 if json {
7669 println!(
7670 "{}",
7671 serde_json::to_string_pretty(&json!({
7672 "ok": true,
7673 "command": "status",
7674 "frontier": frontier_label(&project),
7675 "vfr_id": project.frontier_id(),
7676 "findings": project.findings.len(),
7677 "events": project.events.len(),
7678 "actors": project.actors.len(),
7679 "peers": project.peers.len(),
7680 "inbox": {
7681 "pending_total": pending_total,
7682 "pending_by_kind": pending_by_kind,
7683 },
7684 "causal_audit": {
7685 "identified": audit_summary.identified,
7686 "conditional": audit_summary.conditional,
7687 "underidentified": audit_summary.underidentified,
7688 "underdetermined": audit_summary.underdetermined,
7689 },
7690 "replications": {
7691 "total": project.replications.len(),
7692 "findings_with_success": targets_with_success.len(),
7693 "failed": failed_replications,
7694 },
7695 "federation": {
7696 "peers": project.peers.len(),
7697 "last_sync": last_sync.map(|e| e.timestamp.clone()),
7698 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
7699 "total_conflicts": total_conflicts,
7700 },
7701 }))
7702 .expect("serialize status")
7703 );
7704 return;
7705 }
7706
7707 println!();
7708 println!(
7709 " {}",
7710 format!("VELA · STATUS · {}", path.display())
7711 .to_uppercase()
7712 .dimmed()
7713 );
7714 println!(" {}", style::tick_row(60));
7715 println!();
7716 println!(" frontier: {}", frontier_label(&project));
7717 println!(" vfr_id: {}", project.frontier_id());
7718 println!(
7719 " findings: {} events: {} peers: {} actors: {}",
7720 project.findings.len(),
7721 project.events.len(),
7722 project.peers.len(),
7723 project.actors.len(),
7724 );
7725 println!();
7726 if pending_total > 0 {
7727 println!(
7728 " {} {pending_total} pending proposals",
7729 style::warn("inbox")
7730 );
7731 for (k, n) in &pending_by_kind {
7732 println!(" · {n:>3} {k}");
7733 }
7734 } else {
7735 println!(" {} inbox clean", style::ok("ok"));
7736 }
7737 println!();
7738 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
7739 let chip = if audit_summary.underidentified > 0 {
7740 style::lost("audit")
7741 } else {
7742 style::warn("audit")
7743 };
7744 println!(
7745 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
7746 chip,
7747 audit_summary.identified,
7748 audit_summary.conditional,
7749 audit_summary.underidentified,
7750 audit_summary.underdetermined,
7751 );
7752 if audit_summary.underidentified > 0 {
7753 println!(
7754 " next: vela causal audit {} --problems-only",
7755 path.display()
7756 );
7757 }
7758 } else if audit_summary.underdetermined == 0 {
7759 println!(
7760 " {} causal audit: all {} identified",
7761 style::ok("ok"),
7762 audit_summary.identified
7763 );
7764 } else {
7765 println!(
7766 " {} causal audit: {} identified, {} ungraded",
7767 style::warn("audit"),
7768 audit_summary.identified,
7769 audit_summary.underdetermined,
7770 );
7771 }
7772 println!();
7773 if !project.replications.is_empty() {
7774 println!(
7775 " {} {} records · {} findings replicated · {} failed",
7776 style::ok("replications"),
7777 project.replications.len(),
7778 targets_with_success.len(),
7779 failed_replications,
7780 );
7781 }
7782 if project.peers.is_empty() {
7783 println!(
7784 " {} no federation peers registered",
7785 style::warn("federation")
7786 );
7787 } else {
7788 let last = last_sync
7789 .map(|e| fmt_timestamp(&e.timestamp))
7790 .unwrap_or_else(|| "never".to_string());
7791 let chip = if total_conflicts > 0 {
7792 style::warn("federation")
7793 } else {
7794 style::ok("federation")
7795 };
7796 println!(
7797 " {} {} peer(s) · last sync {} · {} conflict events",
7798 chip,
7799 project.peers.len(),
7800 last,
7801 total_conflicts,
7802 );
7803 }
7804 println!();
7805}
7806
7807fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
7809 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
7810 let mut events: Vec<&crate::events::StateEvent> = project
7811 .events
7812 .iter()
7813 .filter(|e| match kind_filter {
7814 Some(k) => e.kind.contains(k),
7815 None => true,
7816 })
7817 .collect();
7818 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
7819 events.truncate(limit);
7820
7821 if json {
7822 let payload: Vec<_> = events
7823 .iter()
7824 .map(|e| {
7825 json!({
7826 "id": e.id,
7827 "kind": e.kind,
7828 "actor": e.actor.id,
7829 "target": &e.target.id,
7830 "target_type": &e.target.r#type,
7831 "timestamp": e.timestamp,
7832 "reason": e.reason,
7833 })
7834 })
7835 .collect();
7836 println!(
7837 "{}",
7838 serde_json::to_string_pretty(&json!({
7839 "ok": true,
7840 "command": "log",
7841 "events": payload,
7842 }))
7843 .expect("serialize log")
7844 );
7845 return;
7846 }
7847
7848 println!();
7849 println!(
7850 " {}",
7851 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
7852 .to_uppercase()
7853 .dimmed()
7854 );
7855 println!(" {}", style::tick_row(60));
7856 if events.is_empty() {
7857 println!(" (no events)");
7858 return;
7859 }
7860 for e in &events {
7861 let when = fmt_timestamp(&e.timestamp);
7862 let target_short = if e.target.id.len() > 22 {
7863 format!("{}…", &e.target.id[..21])
7864 } else {
7865 e.target.id.clone()
7866 };
7867 let reason: String = e.reason.chars().take(70).collect();
7868 println!(
7869 " {:<19} {:<32} {:<24} {}",
7870 when, e.kind, target_short, reason
7871 );
7872 }
7873 println!();
7874}
7875
7876fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
7878 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
7879
7880 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
7883 std::collections::HashMap::new();
7884 for p in &project.proposals {
7885 if p.kind != "finding.note" {
7886 continue;
7887 }
7888 if p.actor.id != "agent:reviewer-agent" {
7889 continue;
7890 }
7891 let reason = &p.reason;
7892 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
7893 continue;
7894 };
7895 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
7896 let extract = |k: &str| -> f64 {
7897 let pat = format!("{k} ");
7898 text.find(&pat)
7899 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
7900 .and_then(|t| t.parse::<f64>().ok())
7901 .unwrap_or(0.0)
7902 };
7903 score_map.insert(
7904 target.to_string(),
7905 (
7906 extract("plausibility"),
7907 extract("evidence"),
7908 extract("scope"),
7909 extract("duplicate-risk"),
7910 ),
7911 );
7912 }
7913
7914 let mut pending: Vec<&crate::proposals::StateProposal> = project
7915 .proposals
7916 .iter()
7917 .filter(|p| {
7918 p.status == "pending_review"
7919 && match kind_filter {
7920 Some(k) => p.kind.contains(k),
7921 None => true,
7922 }
7923 })
7924 .collect();
7925 pending.sort_by(|a, b| {
7927 let sa = score_map
7928 .get(&a.id)
7929 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
7930 let sb = score_map
7931 .get(&b.id)
7932 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
7933 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
7934 });
7935 pending.truncate(limit);
7936
7937 if json {
7938 let payload: Vec<_> = pending
7939 .iter()
7940 .map(|p| {
7941 let assertion_text = p
7942 .payload
7943 .get("finding")
7944 .and_then(|f| f.get("assertion"))
7945 .and_then(|a| a.get("text"))
7946 .and_then(|t| t.as_str());
7947 let assertion_type = p
7948 .payload
7949 .get("finding")
7950 .and_then(|f| f.get("assertion"))
7951 .and_then(|a| a.get("type"))
7952 .and_then(|t| t.as_str());
7953 let composite = score_map
7954 .get(&p.id)
7955 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
7956 json!({
7957 "proposal_id": p.id,
7958 "kind": p.kind,
7959 "actor": p.actor,
7960 "reason": p.reason,
7961 "assertion_text": assertion_text,
7962 "assertion_type": assertion_type,
7963 "reviewer_composite": composite,
7964 })
7965 })
7966 .collect();
7967 println!(
7968 "{}",
7969 serde_json::to_string_pretty(&json!({
7970 "ok": true,
7971 "command": "inbox",
7972 "shown": pending.len(),
7973 "proposals": payload,
7974 }))
7975 .expect("serialize inbox")
7976 );
7977 return;
7978 }
7979
7980 println!();
7981 println!(
7982 " {}",
7983 format!(
7984 "VELA · INBOX · {} ({} pending shown)",
7985 path.display(),
7986 pending.len()
7987 )
7988 .to_uppercase()
7989 .dimmed()
7990 );
7991 println!(" {}", style::tick_row(60));
7992 if pending.is_empty() {
7993 println!(" (inbox clean)");
7994 return;
7995 }
7996 for p in &pending {
7997 let assertion_text = p
7998 .payload
7999 .get("finding")
8000 .and_then(|f| f.get("assertion"))
8001 .and_then(|a| a.get("text"))
8002 .and_then(|t| t.as_str())
8003 .unwrap_or("");
8004 let assertion_type = p
8005 .payload
8006 .get("finding")
8007 .and_then(|f| f.get("assertion"))
8008 .and_then(|a| a.get("type"))
8009 .and_then(|t| t.as_str())
8010 .unwrap_or("");
8011 let composite = score_map
8012 .get(&p.id)
8013 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8014 let score_str = composite
8015 .map(|c| format!("[{:.2}]", c))
8016 .unwrap_or_else(|| "[—] ".to_string());
8017 let kind_short = if p.kind.len() > 12 {
8018 format!("{}…", &p.kind[..11])
8019 } else {
8020 p.kind.clone()
8021 };
8022 let summary: String = if !assertion_text.is_empty() {
8023 assertion_text.chars().take(80).collect()
8024 } else {
8025 p.reason.chars().take(80).collect()
8026 };
8027 println!(
8028 " {} {} {:<13} {:<18} {}",
8029 score_str, p.id, kind_short, assertion_type, summary
8030 );
8031 }
8032 println!();
8033}
8034
8035fn cmd_ask(path: &Path, question: &str, json: bool) {
8040 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8041
8042 if question.trim().is_empty() {
8043 use std::io::{BufRead, Write};
8045 println!();
8046 println!(
8047 " {}",
8048 format!("VELA · ASK · {}", path.display())
8049 .to_uppercase()
8050 .dimmed()
8051 );
8052 println!(" {}", style::tick_row(60));
8053 println!(" Ask a question. Type `exit` to quit.");
8054 println!(" Examples:");
8055 println!(" · what's pending?");
8056 println!(" · what's underidentified?");
8057 println!(" · how many findings?");
8058 println!(" · what changed recently?");
8059 println!(" · who has what calibration?");
8060 println!();
8061 let stdin = std::io::stdin();
8062 let mut stdout = std::io::stdout();
8063 loop {
8064 print!(" ask> ");
8065 stdout.flush().ok();
8066 let mut line = String::new();
8067 if stdin.lock().read_line(&mut line).is_err() {
8068 break;
8069 }
8070 let q = line.trim();
8071 if q.is_empty() {
8072 continue;
8073 }
8074 if matches!(q, "exit" | "quit" | "q") {
8075 break;
8076 }
8077 answer(&project, q, false);
8078 }
8079 return;
8080 }
8081
8082 answer(&project, question, json);
8083}
8084
8085fn answer(project: &crate::project::Project, q: &str, json: bool) {
8086 let lower = q.to_lowercase();
8087
8088 if lower.contains("pending")
8090 || lower.contains("inbox")
8091 || lower.contains("queue")
8092 || lower.contains("to review")
8093 {
8094 let pending: Vec<&crate::proposals::StateProposal> = project
8095 .proposals
8096 .iter()
8097 .filter(|p| p.status == "pending_review")
8098 .collect();
8099 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
8100 for p in &pending {
8101 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8102 }
8103 if json {
8104 println!(
8105 "{}",
8106 serde_json::to_string_pretty(&json!({
8107 "answer": "pending",
8108 "total": pending.len(),
8109 "by_kind": by_kind,
8110 }))
8111 .unwrap()
8112 );
8113 } else {
8114 println!(" {} pending proposals.", pending.len());
8115 for (k, n) in &by_kind {
8116 println!(" · {n:>3} {k}");
8117 }
8118 if pending.is_empty() {
8119 println!(" Inbox is clean.");
8120 } else {
8121 println!(" Run `vela inbox <frontier>` to triage.");
8122 }
8123 }
8124 return;
8125 }
8126
8127 if lower.contains("underident")
8129 || lower.contains("audit")
8130 || lower.contains("identif")
8131 || lower.contains("causal")
8132 {
8133 let entries = crate::causal_reasoning::audit_frontier(project);
8134 let summary = crate::causal_reasoning::summarize_audit(&entries);
8135 if json {
8136 println!(
8137 "{}",
8138 serde_json::to_string_pretty(&json!({
8139 "answer": "audit",
8140 "summary": {
8141 "identified": summary.identified,
8142 "conditional": summary.conditional,
8143 "underidentified": summary.underidentified,
8144 "underdetermined": summary.underdetermined,
8145 },
8146 }))
8147 .unwrap()
8148 );
8149 } else {
8150 println!(
8151 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
8152 summary.identified,
8153 summary.conditional,
8154 summary.underidentified,
8155 summary.underdetermined,
8156 );
8157 if summary.underidentified > 0 {
8158 println!(
8159 " The {} underidentified findings are concrete review items:",
8160 summary.underidentified
8161 );
8162 for e in entries
8163 .iter()
8164 .filter(|e| {
8165 matches!(
8166 e.verdict,
8167 crate::causal_reasoning::Identifiability::Underidentified
8168 )
8169 })
8170 .take(8)
8171 {
8172 let txt: String = e.assertion_text.chars().take(70).collect();
8173 println!(" · {} {}", e.finding_id, txt);
8174 }
8175 }
8176 }
8177 return;
8178 }
8179
8180 if lower.contains("recent")
8182 || lower.contains("changed")
8183 || lower.contains("latest")
8184 || lower.contains("happen")
8185 {
8186 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
8187 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8188 events.truncate(8);
8189 if json {
8190 println!(
8191 "{}",
8192 serde_json::to_string_pretty(&json!({
8193 "answer": "recent_events",
8194 "events": events.iter().map(|e| json!({
8195 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
8196 "actor": e.actor.id, "target": e.target.id,
8197 })).collect::<Vec<_>>(),
8198 }))
8199 .unwrap()
8200 );
8201 } else {
8202 println!(" Most recent {} events:", events.len());
8203 for e in &events {
8204 let when = fmt_timestamp(&e.timestamp);
8205 println!(" · {when} {:<28} {}", e.kind, e.target.id);
8206 }
8207 }
8208 return;
8209 }
8210
8211 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
8213 let n = project.findings.len();
8214 let evs = project.events.len();
8215 let peers = project.peers.len();
8216 let actors = project.actors.len();
8217 if json {
8218 println!(
8219 "{}",
8220 serde_json::to_string_pretty(&json!({
8221 "answer": "counts",
8222 "findings": n,
8223 "events": evs,
8224 "peers": peers,
8225 "actors": actors,
8226 "replications": project.replications.len(),
8227 "predictions": project.predictions.len(),
8228 }))
8229 .unwrap()
8230 );
8231 } else {
8232 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
8233 println!(
8234 " {} replications · {} predictions · {} datasets · {} code artifacts.",
8235 project.replications.len(),
8236 project.predictions.len(),
8237 project.datasets.len(),
8238 project.code_artifacts.len(),
8239 );
8240 }
8241 return;
8242 }
8243
8244 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
8246 let records =
8247 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
8248 if json {
8249 println!("{}", serde_json::to_string_pretty(&records).unwrap());
8250 } else if records.is_empty() {
8251 println!(" No predictions yet. The calibration ledger is empty.");
8252 } else {
8253 println!(" Calibration over {} actor(s):", records.len());
8254 for r in &records {
8255 let brier = r
8256 .brier_score
8257 .map(|b| format!("{:.3}", b))
8258 .unwrap_or_else(|| "—".into());
8259 println!(
8260 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
8261 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
8262 );
8263 }
8264 }
8265 return;
8266 }
8267
8268 if lower.contains("peer")
8270 || lower.contains("federat")
8271 || lower.contains("sync")
8272 || lower.contains("conflict")
8273 {
8274 let mut total_conflicts = 0usize;
8275 for e in &project.events {
8276 if e.kind == "frontier.conflict_detected" {
8277 total_conflicts += 1;
8278 }
8279 }
8280 if json {
8281 println!(
8282 "{}",
8283 serde_json::to_string_pretty(&json!({
8284 "answer": "federation",
8285 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
8286 "total_conflicts": total_conflicts,
8287 }))
8288 .unwrap()
8289 );
8290 } else {
8291 println!(" {} peer(s) registered:", project.peers.len());
8292 for p in &project.peers {
8293 println!(" · {:<24} {}", p.id, p.url);
8294 }
8295 println!(" {total_conflicts} conflict events on the canonical log.");
8296 }
8297 return;
8298 }
8299
8300 if json {
8302 println!(
8303 "{}",
8304 serde_json::to_string_pretty(&json!({
8305 "answer": "unknown_question",
8306 "question": q,
8307 "hint": "Try: pending, audit, recent, how many, calibration, peers."
8308 }))
8309 .unwrap()
8310 );
8311 } else {
8312 println!(" Don't know how to route that question yet.");
8313 println!(" Try: pending · audit · recent · how many · calibration · peers");
8314 }
8315}
8316
8317fn frontier_label(p: &crate::project::Project) -> String {
8318 if p.project.name.trim().is_empty() {
8319 "(unnamed)".to_string()
8320 } else {
8321 p.project.name.clone()
8322 }
8323}
8324
8325fn fmt_timestamp(ts: &str) -> String {
8326 chrono::DateTime::parse_from_rfc3339(ts)
8329 .map(|dt| dt.format("%m-%d %H:%M").to_string())
8330 .unwrap_or_else(|_| ts.chars().take(16).collect())
8331}
8332
8333fn cmd_stats(path: &Path) {
8334 let frontier = load_frontier_or_fail(path);
8335 let s = &frontier.stats;
8336 println!();
8337 println!(" {}", "FRONTIER · V0.36.0".dimmed());
8338 println!(" {}", frontier.project.name.bold());
8339 println!(" {}", style::tick_row(60));
8340 println!(" id: {}", frontier.frontier_id());
8341 println!(" compiled: {}", frontier.project.compiled_at);
8342 println!(" papers: {}", frontier.project.papers_processed);
8343 println!(" findings: {}", s.findings);
8344 println!(" links: {}", s.links);
8345 println!(" replicated: {}", s.replicated);
8346 println!(" avg confidence: {}", s.avg_confidence);
8347 println!(" gaps: {}", s.gaps);
8348 println!(" contested: {}", s.contested);
8349 println!(" reviewed: {}", s.human_reviewed);
8350 println!(" proposals: {}", s.proposal_count);
8351 println!(
8352 " recorded proof: {}",
8353 frontier.proof_state.latest_packet.status
8354 );
8355 if frontier.proof_state.latest_packet.status != "never_exported" {
8356 println!(
8357 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
8358 );
8359 }
8360 if !s.categories.is_empty() {
8361 println!();
8362 println!(" {}", "categories".dimmed());
8363 let mut categories = s.categories.iter().collect::<Vec<_>>();
8364 categories.sort_by(|a, b| b.1.cmp(a.1));
8365 for (category, count) in categories {
8366 println!(" {category}: {}", count);
8367 }
8368 }
8369 println!();
8370 println!(" {}", style::tick_row(60));
8371 println!();
8372}
8373
8374fn cmd_proposals(action: ProposalAction) {
8375 match action {
8376 ProposalAction::List {
8377 frontier,
8378 status,
8379 json,
8380 } => {
8381 let frontier_state =
8382 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8383 let proposals_list = proposals::list(&frontier_state, status.as_deref());
8384 let payload = json!({
8385 "ok": true,
8386 "command": "proposals.list",
8387 "frontier": frontier_state.project.name,
8388 "status_filter": status,
8389 "summary": proposals::summary(&frontier_state),
8390 "proposals": proposals_list,
8391 });
8392 if json {
8393 println!(
8394 "{}",
8395 serde_json::to_string_pretty(&payload)
8396 .expect("failed to serialize proposals list")
8397 );
8398 } else {
8399 println!("vela proposals list");
8400 println!(" frontier: {}", frontier_state.project.name);
8401 println!(
8402 " proposals: {}",
8403 payload["proposals"].as_array().map_or(0, Vec::len)
8404 );
8405 }
8406 }
8407 ProposalAction::Show {
8408 frontier,
8409 proposal_id,
8410 json,
8411 } => {
8412 let frontier_state =
8413 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8414 let proposal =
8415 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
8416 let payload = json!({
8417 "ok": true,
8418 "command": "proposals.show",
8419 "frontier": frontier_state.project.name,
8420 "proposal": proposal,
8421 });
8422 if json {
8423 println!(
8424 "{}",
8425 serde_json::to_string_pretty(&payload)
8426 .expect("failed to serialize proposal show")
8427 );
8428 } else {
8429 println!("vela proposals show");
8430 println!(" frontier: {}", frontier_state.project.name);
8431 println!(" proposal: {}", proposal_id);
8432 println!(" kind: {}", proposal.kind);
8433 println!(" status: {}", proposal.status);
8434 }
8435 }
8436 ProposalAction::Preview {
8437 frontier,
8438 proposal_id,
8439 reviewer,
8440 json,
8441 } => {
8442 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
8443 .unwrap_or_else(|e| fail_return(&e));
8444 let payload = json!({
8445 "ok": true,
8446 "command": "proposals.preview",
8447 "frontier": frontier.display().to_string(),
8448 "preview": preview,
8449 });
8450 if json {
8451 println!(
8452 "{}",
8453 serde_json::to_string_pretty(&payload)
8454 .expect("failed to serialize proposal preview")
8455 );
8456 } else {
8457 println!("vela proposals preview");
8458 println!(" proposal: {}", proposal_id);
8459 println!(" kind: {}", preview.kind);
8460 println!(
8461 " findings: {} -> {}",
8462 preview.findings_before, preview.findings_after
8463 );
8464 println!(
8465 " artifacts: {} -> {}",
8466 preview.artifacts_before, preview.artifacts_after
8467 );
8468 println!(
8469 " events: {} -> {}",
8470 preview.events_before, preview.events_after
8471 );
8472 if !preview.changed_findings.is_empty() {
8473 println!(
8474 " findings changed: {}",
8475 preview.changed_findings.join(", ")
8476 );
8477 }
8478 if !preview.changed_artifacts.is_empty() {
8479 println!(
8480 " artifacts changed: {}",
8481 preview.changed_artifacts.join(", ")
8482 );
8483 }
8484 if !preview.event_kinds.is_empty() {
8485 println!(" event kinds: {}", preview.event_kinds.join(", "));
8486 }
8487 println!(" event: {}", preview.applied_event_id);
8488 }
8489 }
8490 ProposalAction::Import {
8491 frontier,
8492 source,
8493 json,
8494 } => {
8495 let report =
8496 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
8497 let payload = json!({
8498 "ok": true,
8499 "command": "proposals.import",
8500 "frontier": frontier.display().to_string(),
8501 "source": source.display().to_string(),
8502 "summary": {
8503 "imported": report.imported,
8504 "applied": report.applied,
8505 "rejected": report.rejected,
8506 "duplicates": report.duplicates,
8507 },
8508 });
8509 if json {
8510 println!(
8511 "{}",
8512 serde_json::to_string_pretty(&payload)
8513 .expect("failed to serialize proposal import")
8514 );
8515 } else {
8516 println!(
8517 "Imported {} proposals into {}",
8518 report.imported, report.wrote_to
8519 );
8520 }
8521 }
8522 ProposalAction::Validate { source, json } => {
8523 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
8524 let payload = json!({
8525 "ok": report.ok,
8526 "command": "proposals.validate",
8527 "source": source.display().to_string(),
8528 "summary": {
8529 "checked": report.checked,
8530 "valid": report.valid,
8531 "invalid": report.invalid,
8532 },
8533 "proposal_ids": report.proposal_ids,
8534 "errors": report.errors,
8535 });
8536 if json {
8537 println!(
8538 "{}",
8539 serde_json::to_string_pretty(&payload)
8540 .expect("failed to serialize proposal validation")
8541 );
8542 } else if report.ok {
8543 println!("{} validated {} proposals", style::ok("ok"), report.valid);
8544 } else {
8545 println!(
8546 "{} validated {} proposals, {} invalid",
8547 style::lost("lost"),
8548 report.valid,
8549 report.invalid
8550 );
8551 for error in &report.errors {
8552 println!(" · {error}");
8553 }
8554 std::process::exit(1);
8555 }
8556 }
8557 ProposalAction::Export {
8558 frontier,
8559 output,
8560 status,
8561 json,
8562 } => {
8563 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
8564 .unwrap_or_else(|e| fail_return(&e));
8565 let payload = json!({
8566 "ok": true,
8567 "command": "proposals.export",
8568 "frontier": frontier.display().to_string(),
8569 "output": output.display().to_string(),
8570 "status": status,
8571 "exported": count,
8572 });
8573 if json {
8574 println!(
8575 "{}",
8576 serde_json::to_string_pretty(&payload)
8577 .expect("failed to serialize proposal export")
8578 );
8579 } else {
8580 println!("sealed · {count} proposals · {}", output.display());
8581 }
8582 }
8583 ProposalAction::Accept {
8584 frontier,
8585 proposal_id,
8586 reviewer,
8587 reason,
8588 json,
8589 } => {
8590 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
8591 .unwrap_or_else(|e| fail_return(&e));
8592 let payload = json!({
8593 "ok": true,
8594 "command": "proposals.accept",
8595 "frontier": frontier.display().to_string(),
8596 "proposal_id": proposal_id,
8597 "reviewer": reviewer,
8598 "applied_event_id": event_id,
8599 });
8600 if json {
8601 println!(
8602 "{}",
8603 serde_json::to_string_pretty(&payload)
8604 .expect("failed to serialize proposal accept")
8605 );
8606 } else {
8607 println!(
8608 "{} accepted and applied proposal {}",
8609 style::ok("ok"),
8610 proposal_id
8611 );
8612 println!(" event: {}", event_id);
8613 }
8614 }
8615 ProposalAction::Reject {
8616 frontier,
8617 proposal_id,
8618 reviewer,
8619 reason,
8620 json,
8621 } => {
8622 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
8623 .unwrap_or_else(|e| fail_return(&e));
8624 let payload = json!({
8625 "ok": true,
8626 "command": "proposals.reject",
8627 "frontier": frontier.display().to_string(),
8628 "proposal_id": proposal_id,
8629 "reviewer": reviewer,
8630 "status": "rejected",
8631 });
8632 if json {
8633 println!(
8634 "{}",
8635 serde_json::to_string_pretty(&payload)
8636 .expect("failed to serialize proposal reject")
8637 );
8638 } else {
8639 println!(
8640 "{} rejected proposal {}",
8641 style::warn("rejected"),
8642 proposal_id
8643 );
8644 }
8645 }
8646 }
8647}
8648
8649fn cmd_artifact_to_state(
8650 frontier: &Path,
8651 packet: &Path,
8652 actor: &str,
8653 apply_artifacts: bool,
8654 json: bool,
8655) {
8656 let report =
8657 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
8658 .unwrap_or_else(|e| fail_return(&e));
8659 if json {
8660 println!(
8661 "{}",
8662 serde_json::to_string_pretty(&report)
8663 .expect("failed to serialize artifact-to-state report")
8664 );
8665 } else {
8666 println!("vela artifact-to-state");
8667 println!(" packet: {}", report.packet_id);
8668 println!(" frontier: {}", report.frontier);
8669 println!(" artifact proposals: {}", report.artifact_proposals);
8670 println!(" finding proposals: {}", report.finding_proposals);
8671 println!(" gap proposals: {}", report.gap_proposals);
8672 println!(
8673 " applied artifact events: {}",
8674 report.applied_artifact_events
8675 );
8676 println!(
8677 " pending truth proposals: {}",
8678 report.pending_truth_proposals
8679 );
8680 }
8681}
8682
8683fn cmd_bridge_kit(action: BridgeKitAction) {
8684 match action {
8685 BridgeKitAction::Validate { source, json } => {
8686 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
8687 if json {
8688 println!(
8689 "{}",
8690 serde_json::to_string_pretty(&report)
8691 .expect("failed to serialize bridge-kit validation report")
8692 );
8693 } else {
8694 println!("vela bridge-kit validate");
8695 println!(" source: {}", report.source);
8696 println!(" packets: {}", report.packet_count);
8697 println!(" valid: {}", report.valid_packet_count);
8698 println!(" invalid: {}", report.invalid_packet_count);
8699 for packet in &report.packets {
8700 if packet.ok {
8701 println!(
8702 " ok: {} · {} artifacts · {} claims · {} needs",
8703 packet
8704 .packet_id
8705 .as_deref()
8706 .unwrap_or("packet id unavailable"),
8707 packet.artifact_count,
8708 packet.candidate_claim_count,
8709 packet.open_need_count
8710 );
8711 } else {
8712 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
8713 }
8714 }
8715 for error in &report.errors {
8716 println!(" error: {error}");
8717 }
8718 }
8719 if !report.ok {
8720 std::process::exit(1);
8721 }
8722 }
8723 }
8724}
8725
8726async fn cmd_source_adapter(action: SourceAdapterAction) {
8727 match action {
8728 SourceAdapterAction::Run {
8729 frontier,
8730 adapter,
8731 actor,
8732 entries,
8733 priority,
8734 include_excluded,
8735 allow_partial,
8736 dry_run,
8737 input_dir,
8738 apply_artifacts,
8739 json,
8740 } => {
8741 let report = crate::source_adapters::run(
8742 &frontier,
8743 crate::source_adapters::SourceAdapterRunOptions {
8744 adapter,
8745 actor,
8746 entries,
8747 priority,
8748 include_excluded,
8749 allow_partial,
8750 dry_run,
8751 input_dir,
8752 apply_artifacts,
8753 },
8754 )
8755 .await
8756 .unwrap_or_else(|e| fail_return(&e));
8757 if json {
8758 println!(
8759 "{}",
8760 serde_json::to_string_pretty(&report)
8761 .expect("failed to serialize source adapter report")
8762 );
8763 } else {
8764 println!("vela source-adapter run");
8765 println!(" adapter: {}", report.adapter);
8766 println!(" run: {}", report.run_id);
8767 println!(" frontier: {}", report.frontier);
8768 println!(" selected entries: {}", report.selected_entries);
8769 println!(" fetched records: {}", report.fetched_records);
8770 println!(" changed records: {}", report.changed_records);
8771 println!(" unchanged records: {}", report.unchanged_records);
8772 println!(" failed records: {}", report.failed_records.len());
8773 if let Some(packet_id) = report.packet_id {
8774 println!(" packet: {packet_id}");
8775 }
8776 println!(" artifact proposals: {}", report.artifact_proposals);
8777 println!(" review note proposals: {}", report.review_note_proposals);
8778 println!(" applied events: {}", report.applied_event_ids.len());
8779 }
8780 }
8781 }
8782}
8783
8784fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
8785 match action {
8786 RuntimeAdapterAction::Run {
8787 frontier,
8788 adapter,
8789 input,
8790 actor,
8791 dry_run,
8792 apply_artifacts,
8793 json,
8794 } => {
8795 let report = crate::runtime_adapters::run(
8796 &frontier,
8797 crate::runtime_adapters::RuntimeAdapterRunOptions {
8798 adapter,
8799 input,
8800 actor,
8801 dry_run,
8802 apply_artifacts,
8803 },
8804 )
8805 .unwrap_or_else(|e| fail_return(&e));
8806 if json {
8807 println!(
8808 "{}",
8809 serde_json::to_string_pretty(&report)
8810 .expect("failed to serialize runtime adapter report")
8811 );
8812 } else {
8813 println!("vela runtime-adapter run");
8814 println!(" adapter: {}", report.adapter);
8815 println!(" run: {}", report.run_id);
8816 println!(" frontier: {}", report.frontier);
8817 if let Some(packet_id) = report.packet_id {
8818 println!(" packet: {packet_id}");
8819 }
8820 println!(" artifact proposals: {}", report.artifact_proposals);
8821 println!(" finding proposals: {}", report.finding_proposals);
8822 println!(" gap proposals: {}", report.gap_proposals);
8823 println!(" review note proposals: {}", report.review_note_proposals);
8824 println!(
8825 " applied artifact events: {}",
8826 report.applied_artifact_events
8827 );
8828 println!(
8829 " pending truth proposals: {}",
8830 report.pending_truth_proposals
8831 );
8832 }
8833 }
8834 }
8835}
8836
8837fn cmd_sign(action: SignAction) {
8838 match action {
8839 SignAction::GenerateKeypair { out, json } => {
8840 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
8841 let payload = json!({
8842 "ok": true,
8843 "command": "sign.generate-keypair",
8844 "output_dir": out.display().to_string(),
8845 "public_key": public_key,
8846 });
8847 if json {
8848 println!(
8849 "{}",
8850 serde_json::to_string_pretty(&payload)
8851 .expect("failed to serialize sign.generate-keypair")
8852 );
8853 } else {
8854 println!("{} keypair · {}", style::ok("generated"), out.display());
8855 println!(" public key: {public_key}");
8856 }
8857 }
8858 SignAction::Apply {
8859 frontier,
8860 private_key,
8861 json,
8862 } => {
8863 let count =
8864 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
8865 let payload = json!({
8866 "ok": true,
8867 "command": "sign.apply",
8868 "frontier": frontier.display().to_string(),
8869 "private_key": private_key.display().to_string(),
8870 "signed": count,
8871 });
8872 if json {
8873 println!(
8874 "{}",
8875 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
8876 );
8877 } else {
8878 println!(
8879 "{} {count} findings in {}",
8880 style::ok("signed"),
8881 frontier.display()
8882 );
8883 }
8884 }
8885 SignAction::Verify {
8886 frontier,
8887 public_key,
8888 json,
8889 } => {
8890 let report = sign::verify_frontier(&frontier, public_key.as_deref())
8891 .unwrap_or_else(|e| fail_return(&e));
8892 if json {
8893 println!(
8894 "{}",
8895 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
8896 );
8897 } else {
8898 println!();
8899 println!(
8900 " {}",
8901 format!("VELA · SIGN · VERIFY · {}", frontier.display())
8902 .to_uppercase()
8903 .dimmed()
8904 );
8905 println!(" {}", style::tick_row(60));
8906 println!(" total findings: {}", report.total_findings);
8907 println!(" signed: {}", report.signed);
8908 println!(" unsigned: {}", report.unsigned);
8909 println!(" valid: {}", report.valid);
8910 println!(" invalid: {}", report.invalid);
8911 if report.findings_with_threshold > 0 {
8912 println!(" with threshold: {}", report.findings_with_threshold);
8913 println!(" jointly accepted: {}", report.jointly_accepted);
8914 }
8915 }
8916 }
8917 SignAction::ThresholdSet {
8918 frontier,
8919 finding_id,
8920 to,
8921 json,
8922 } => {
8923 if to == 0 {
8924 fail("--to must be >= 1");
8925 }
8926 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8927 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
8928 fail(&format!("finding '{finding_id}' not present in frontier"));
8929 };
8930 project.findings[idx].flags.signature_threshold = Some(to);
8931 sign::refresh_jointly_accepted(&mut project);
8935 let met = project.findings[idx].flags.jointly_accepted;
8936 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
8937
8938 if json {
8939 println!(
8940 "{}",
8941 serde_json::to_string_pretty(&json!({
8942 "ok": true,
8943 "command": "sign.threshold-set",
8944 "finding_id": finding_id,
8945 "threshold": to,
8946 "jointly_accepted": met,
8947 "frontier": frontier.display().to_string(),
8948 }))
8949 .expect("failed to serialize sign.threshold-set")
8950 );
8951 } else {
8952 println!(
8953 "{} signature_threshold={to} on {finding_id} ({})",
8954 style::ok("set"),
8955 if met {
8956 "jointly accepted"
8957 } else {
8958 "awaiting signatures"
8959 }
8960 );
8961 }
8962 }
8963 }
8964}
8965
8966fn cmd_actor(action: ActorAction) {
8967 match action {
8968 ActorAction::Add {
8969 frontier,
8970 id,
8971 pubkey,
8972 tier,
8973 orcid,
8974 clearance,
8975 json,
8976 } => {
8977 let trimmed = pubkey.trim();
8979 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
8980 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
8981 }
8982 let orcid_normalized = orcid
8984 .as_deref()
8985 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
8986 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
8989 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
8990 });
8991
8992 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8993 if project.actors.iter().any(|actor| actor.id == id) {
8994 fail(&format!(
8995 "Actor '{id}' already registered in this frontier."
8996 ));
8997 }
8998 project.actors.push(sign::ActorRecord {
8999 id: id.clone(),
9000 public_key: trimmed.to_string(),
9001 algorithm: "ed25519".to_string(),
9002 created_at: chrono::Utc::now().to_rfc3339(),
9003 tier: tier.clone(),
9004 orcid: orcid_normalized.clone(),
9005 access_clearance: clearance,
9006 });
9007 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9008 let payload = json!({
9009 "ok": true,
9010 "command": "actor.add",
9011 "frontier": frontier.display().to_string(),
9012 "actor_id": id,
9013 "public_key": trimmed,
9014 "tier": tier,
9015 "orcid": orcid_normalized,
9016 "registered_count": project.actors.len(),
9017 });
9018 if json {
9019 println!(
9020 "{}",
9021 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
9022 );
9023 } else {
9024 let tier_suffix = tier
9025 .as_deref()
9026 .map_or_else(String::new, |t| format!(" tier={t}"));
9027 println!(
9028 "{} actor {} (pubkey {}{tier_suffix})",
9029 style::ok("registered"),
9030 id,
9031 &trimmed[..16]
9032 );
9033 }
9034 }
9035 ActorAction::List { frontier, json } => {
9036 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9037 if json {
9038 let payload = json!({
9039 "ok": true,
9040 "command": "actor.list",
9041 "frontier": frontier.display().to_string(),
9042 "actors": project.actors,
9043 });
9044 println!(
9045 "{}",
9046 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
9047 );
9048 } else {
9049 println!();
9050 println!(
9051 " {}",
9052 format!("VELA · ACTOR · LIST · {}", frontier.display())
9053 .to_uppercase()
9054 .dimmed()
9055 );
9056 println!(" {}", style::tick_row(60));
9057 if project.actors.is_empty() {
9058 println!(" (no actors registered)");
9059 } else {
9060 for actor in &project.actors {
9061 println!(
9062 " {:<28} {}… registered {}",
9063 actor.id,
9064 &actor.public_key[..16],
9065 actor.created_at
9066 );
9067 }
9068 }
9069 }
9070 }
9071 }
9072}
9073
9074fn cmd_causal(action: CausalAction) {
9076 use crate::causal_reasoning;
9077
9078 match action {
9079 CausalAction::Audit {
9080 frontier,
9081 problems_only,
9082 json,
9083 } => {
9084 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9085 let mut entries = causal_reasoning::audit_frontier(&project);
9086 if problems_only {
9087 entries.retain(|e| e.verdict.needs_reviewer_attention());
9088 }
9089 let summary = causal_reasoning::summarize_audit(&entries);
9090
9091 if json {
9092 println!(
9093 "{}",
9094 serde_json::to_string_pretty(&json!({
9095 "ok": true,
9096 "command": "causal.audit",
9097 "frontier": frontier.display().to_string(),
9098 "summary": summary,
9099 "entries": entries,
9100 }))
9101 .expect("serialize causal.audit")
9102 );
9103 return;
9104 }
9105
9106 println!();
9107 println!(
9108 " {}",
9109 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
9110 .to_uppercase()
9111 .dimmed()
9112 );
9113 println!(" {}", style::tick_row(60));
9114 println!(
9115 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
9116 summary.total,
9117 summary.identified,
9118 summary.conditional,
9119 summary.underidentified,
9120 summary.underdetermined,
9121 );
9122 if entries.is_empty() {
9123 println!(" (no entries to report)");
9124 return;
9125 }
9126 for e in &entries {
9127 let chip = match e.verdict {
9128 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
9129 crate::causal_reasoning::Identifiability::Conditional => {
9130 style::warn("conditional")
9131 }
9132 crate::causal_reasoning::Identifiability::Underidentified => {
9133 style::lost("underidentified")
9134 }
9135 crate::causal_reasoning::Identifiability::Underdetermined => {
9136 style::warn("underdetermined")
9137 }
9138 };
9139 let claim = e
9140 .causal_claim
9141 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
9142 let grade = e
9143 .causal_evidence_grade
9144 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
9145 println!();
9146 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
9147 let assertion_short: String = e.assertion_text.chars().take(78).collect();
9148 println!(" {assertion_short}");
9149 println!(" {} {}", style::ok("why:"), e.rationale);
9150 if e.verdict.needs_reviewer_attention()
9151 || matches!(
9152 e.verdict,
9153 crate::causal_reasoning::Identifiability::Underdetermined
9154 )
9155 {
9156 println!(" {} {}", style::ok("fix:"), e.remediation);
9157 }
9158 }
9159 }
9160 CausalAction::Effect {
9161 frontier,
9162 source,
9163 on: target,
9164 json,
9165 } => {
9166 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
9167
9168 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9169 let verdict = identify_effect(&project, &source, &target);
9170
9171 if json {
9172 println!(
9173 "{}",
9174 serde_json::to_string_pretty(&json!({
9175 "ok": true,
9176 "command": "causal.effect",
9177 "frontier": frontier.display().to_string(),
9178 "source": source,
9179 "target": target,
9180 "verdict": verdict,
9181 }))
9182 .expect("serialize causal.effect")
9183 );
9184 return;
9185 }
9186
9187 println!();
9188 println!(
9189 " {}",
9190 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
9191 .to_uppercase()
9192 .dimmed()
9193 );
9194 println!(" {}", style::tick_row(60));
9195 match verdict {
9196 CausalEffectVerdict::Identified {
9197 adjustment_set,
9198 back_door_paths_considered,
9199 } => {
9200 if adjustment_set.is_empty() {
9201 println!(
9202 " {} no back-door adjustment needed",
9203 style::ok("identified")
9204 );
9205 } else {
9206 println!(" {} identified by adjusting on:", style::ok("identified"));
9207 for z in &adjustment_set {
9208 println!(" · {z}");
9209 }
9210 }
9211 println!(
9212 " back-door paths considered: {}",
9213 back_door_paths_considered
9214 );
9215 }
9216 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
9217 println!(
9218 " {} identified via front-door criterion (Pearl 1995 §3.3)",
9219 style::ok("identified")
9220 );
9221 println!(" mediators that intercept all directed paths:");
9222 for m in &mediator_set {
9223 println!(" · {m}");
9224 }
9225 println!(
9226 " applies when source-target confounders are unobserved but the mediator chain is."
9227 );
9228 }
9229 CausalEffectVerdict::NoCausalPath { reason } => {
9230 println!(" {} no causal path: {reason}", style::warn("no_path"));
9231 }
9232 CausalEffectVerdict::Underidentified {
9233 unblocked_back_door_paths,
9234 candidates_tried,
9235 } => {
9236 println!(
9237 " {} no observational adjustment set found ({} candidates tried)",
9238 style::lost("underidentified"),
9239 candidates_tried
9240 );
9241 println!(" open back-door paths:");
9242 for path in unblocked_back_door_paths.iter().take(5) {
9243 println!(" · {}", path.join(" — "));
9244 }
9245 println!(
9246 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
9247 );
9248 }
9249 CausalEffectVerdict::UnknownNode { which } => {
9250 fail(&which);
9251 }
9252 }
9253 println!();
9254 }
9255 CausalAction::Graph {
9256 frontier,
9257 node,
9258 json,
9259 } => {
9260 use crate::causal_graph::CausalGraph;
9261 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9262 let graph = CausalGraph::from_project(&project);
9263
9264 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
9267 if !graph.contains(n) {
9268 fail(&format!("node not in frontier: {n}"));
9269 }
9270 vec![n]
9271 } else {
9272 project.findings.iter().map(|f| f.id.as_str()).collect()
9273 };
9274
9275 if json {
9276 let payload: Vec<_> = nodes
9277 .iter()
9278 .map(|n| {
9279 let parents: Vec<&str> = graph.parents_of(n).collect();
9280 let children: Vec<&str> = graph.children_of(n).collect();
9281 json!({
9282 "node": n,
9283 "parents": parents,
9284 "children": children,
9285 })
9286 })
9287 .collect();
9288 println!(
9289 "{}",
9290 serde_json::to_string_pretty(&json!({
9291 "ok": true,
9292 "command": "causal.graph",
9293 "node_count": graph.node_count(),
9294 "edge_count": graph.edge_count(),
9295 "nodes": payload,
9296 }))
9297 .expect("serialize causal.graph")
9298 );
9299 return;
9300 }
9301
9302 println!();
9303 println!(
9304 " {}",
9305 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
9306 .to_uppercase()
9307 .dimmed()
9308 );
9309 println!(" {}", style::tick_row(60));
9310 println!(
9311 " {} nodes · {} edges",
9312 graph.node_count(),
9313 graph.edge_count()
9314 );
9315 println!();
9316 for n in &nodes {
9317 let parents: Vec<&str> = graph.parents_of(n).collect();
9318 let children: Vec<&str> = graph.children_of(n).collect();
9319 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
9320 continue; }
9322 println!(" {n}");
9323 if !parents.is_empty() {
9324 println!(" parents: {}", parents.join(", "));
9325 }
9326 if !children.is_empty() {
9327 println!(" children: {}", children.join(", "));
9328 }
9329 }
9330 }
9331 CausalAction::Counterfactual {
9332 frontier,
9333 intervene_on,
9334 set_to,
9335 target,
9336 json,
9337 } => {
9338 use crate::counterfactual::{
9339 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
9340 };
9341
9342 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9343 let query = CounterfactualQuery {
9344 intervene_on: intervene_on.clone(),
9345 set_to,
9346 target: target.clone(),
9347 };
9348 let verdict = answer_counterfactual(&project, &query);
9349
9350 if json {
9351 println!(
9352 "{}",
9353 serde_json::to_string_pretty(&json!({
9354 "ok": true,
9355 "command": "causal.counterfactual",
9356 "frontier": frontier.display().to_string(),
9357 "query": query,
9358 "verdict": verdict,
9359 }))
9360 .expect("serialize causal.counterfactual")
9361 );
9362 return;
9363 }
9364
9365 println!();
9366 println!(
9367 " {}",
9368 format!(
9369 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
9370 )
9371 .to_uppercase()
9372 .dimmed()
9373 );
9374 println!(" {}", style::tick_row(72));
9375 match verdict {
9376 CounterfactualVerdict::Resolved {
9377 factual,
9378 counterfactual,
9379 delta,
9380 paths_used,
9381 } => {
9382 println!(
9383 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
9384 style::ok("resolved")
9385 );
9386 println!(
9387 " twin-network propagation through {} causal path(s):",
9388 paths_used.len()
9389 );
9390 for p in paths_used.iter().take(5) {
9391 println!(" · {}", p.join(" → "));
9392 }
9393 println!(
9394 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
9395 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
9396 );
9397 }
9398 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
9399 println!(
9400 " {} causal path exists but {} edge(s) lack a mechanism annotation",
9401 style::warn("mechanism_unspecified"),
9402 unspecified_edges.len()
9403 );
9404 for (parent, child) in unspecified_edges.iter().take(8) {
9405 println!(" · {parent} → {child}");
9406 }
9407 println!(
9408 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
9409 );
9410 }
9411 CounterfactualVerdict::NoCausalPath { factual } => {
9412 println!(
9413 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
9414 style::warn("no_path")
9415 );
9416 }
9417 CounterfactualVerdict::UnknownNode { which } => {
9418 fail(&format!("node not in frontier: {which}"));
9419 }
9420 CounterfactualVerdict::InvalidIntervention { reason } => {
9421 fail(&reason);
9422 }
9423 }
9424 println!();
9425 }
9426 }
9427}
9428
9429fn cmd_bridges(action: BridgesAction) {
9432 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
9433 use std::collections::HashMap;
9434
9435 fn bridges_dir(frontier: &Path) -> PathBuf {
9436 frontier.join(".vela/bridges")
9437 }
9438
9439 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
9440 let path = bridges_dir(frontier).join(format!("{id}.json"));
9441 if !path.is_file() {
9442 return Err(format!("bridge not found: {id}"));
9443 }
9444 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
9445 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
9446 }
9447
9448 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
9449 let dir = bridges_dir(frontier);
9450 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
9451 let path = dir.join(format!("{}.json", b.id));
9452 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
9453 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
9454 }
9455
9456 fn default_reviewer_id() -> String {
9459 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
9460 }
9461
9462 fn emit_bridge_reviewed_event(
9473 frontier: &Path,
9474 bridge_id: &str,
9475 status: &str,
9476 reviewer_id: &str,
9477 note: Option<&str>,
9478 ) -> Result<(), String> {
9479 let mut payload = serde_json::json!({
9480 "bridge_id": bridge_id,
9481 "status": status,
9482 });
9483 if let Some(n) = note
9484 && !n.trim().is_empty()
9485 {
9486 payload["note"] = serde_json::Value::String(n.to_string());
9487 }
9488 let known_ids: Vec<String> = list_bridges(frontier)
9490 .unwrap_or_default()
9491 .into_iter()
9492 .map(|b| b.id)
9493 .collect();
9494 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
9495 let event = crate::events::new_bridge_reviewed_event(
9496 bridge_id,
9497 reviewer_id,
9498 "human",
9499 &format!("Bridge {status} by {reviewer_id}"),
9500 payload,
9501 Vec::new(),
9502 );
9503 let events_dir = frontier.join(".vela/events");
9504 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
9505 let event_path = events_dir.join(format!("{}.json", event.id));
9506 let data =
9507 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
9508 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
9509 }
9510
9511 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
9512 let dir = bridges_dir(frontier);
9513 if !dir.is_dir() {
9514 return Ok(Vec::new());
9515 }
9516 let mut out = Vec::new();
9517 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
9518 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
9519 let path = entry.path();
9520 if path.extension().and_then(|s| s.to_str()) != Some("json") {
9521 continue;
9522 }
9523 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
9524 let b: Bridge =
9525 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
9526 out.push(b);
9527 }
9528 out.sort_by(|a, b| {
9529 b.finding_refs
9530 .len()
9531 .cmp(&a.finding_refs.len())
9532 .then(a.entity_name.cmp(&b.entity_name))
9533 });
9534 Ok(out)
9535 }
9536
9537 match action {
9538 BridgesAction::Derive {
9539 frontier_a,
9540 label_a,
9541 frontier_b,
9542 label_b,
9543 json,
9544 } => {
9545 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
9546 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
9547 let now = chrono::Utc::now().to_rfc3339();
9548 let new_bridges =
9549 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
9550
9551 let existing = list_bridges(&frontier_a).unwrap_or_default();
9555 let existing_by_id: HashMap<String, Bridge> =
9556 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
9557 let mut written = 0;
9558 let mut preserved = 0;
9559 let mut new_ids = Vec::new();
9560 for mut bridge in new_bridges {
9561 if let Some(prev) = existing_by_id.get(&bridge.id)
9562 && prev.status != BridgeStatus::Derived
9563 {
9564 bridge.status = prev.status;
9566 bridge.derived_at = prev.derived_at.clone();
9567 preserved += 1;
9568 }
9569 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
9570 new_ids.push(bridge.id.clone());
9571 written += 1;
9572 }
9573
9574 if json {
9575 println!(
9576 "{}",
9577 serde_json::to_string_pretty(&json!({
9578 "ok": true,
9579 "command": "bridges.derive",
9580 "frontier_a": frontier_a.display().to_string(),
9581 "frontier_b": frontier_b.display().to_string(),
9582 "bridges_written": written,
9583 "reviewer_judgments_preserved": preserved,
9584 "ids": new_ids,
9585 }))
9586 .expect("serialize bridges.derive")
9587 );
9588 return;
9589 }
9590
9591 println!();
9592 println!(
9593 " {}",
9594 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
9595 .to_uppercase()
9596 .dimmed()
9597 );
9598 println!(" {}", style::tick_row(60));
9599 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
9600 if preserved > 0 {
9601 println!(
9602 " {} {} reviewer judgment(s) preserved",
9603 style::ok("kept"),
9604 preserved
9605 );
9606 }
9607 for id in new_ids.iter().take(10) {
9608 println!(" · {id}");
9609 }
9610 if new_ids.len() > 10 {
9611 println!(" … and {} more", new_ids.len() - 10);
9612 }
9613 println!();
9614 }
9615 BridgesAction::List {
9616 frontier,
9617 status,
9618 json,
9619 } => {
9620 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
9621 if let Some(s) = status.as_deref() {
9622 let want = match s.to_lowercase().as_str() {
9623 "derived" => BridgeStatus::Derived,
9624 "confirmed" => BridgeStatus::Confirmed,
9625 "refuted" => BridgeStatus::Refuted,
9626 other => fail_return(&format!(
9627 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
9628 )),
9629 };
9630 bridges.retain(|b| b.status == want);
9631 }
9632 if json {
9633 println!(
9634 "{}",
9635 serde_json::to_string_pretty(&json!({
9636 "ok": true,
9637 "command": "bridges.list",
9638 "frontier": frontier.display().to_string(),
9639 "count": bridges.len(),
9640 "bridges": bridges,
9641 }))
9642 .expect("serialize bridges.list")
9643 );
9644 return;
9645 }
9646 println!();
9647 println!(
9648 " {}",
9649 format!("VELA · BRIDGES · LIST · {}", frontier.display())
9650 .to_uppercase()
9651 .dimmed()
9652 );
9653 println!(" {}", style::tick_row(60));
9654 println!(" {} bridge(s)", bridges.len());
9655 for b in &bridges {
9656 let chip = match b.status {
9657 BridgeStatus::Derived => style::warn("derived"),
9658 BridgeStatus::Confirmed => style::ok("confirmed"),
9659 BridgeStatus::Refuted => style::lost("refuted"),
9660 };
9661 println!();
9662 println!(
9663 " {chip} {} {} ↔ findings:{}",
9664 b.id,
9665 b.entity_name,
9666 b.finding_refs.len()
9667 );
9668 println!(" frontiers: {}", b.frontiers.join(", "));
9669 if let Some(t) = &b.tension {
9670 println!(" tension: {t}");
9671 }
9672 }
9673 println!();
9674 }
9675 BridgesAction::Show {
9676 frontier,
9677 bridge_id,
9678 json,
9679 } => {
9680 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
9681 if json {
9682 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
9683 return;
9684 }
9685 println!();
9686 println!(
9687 " {}",
9688 format!("VELA · BRIDGES · SHOW · {}", b.id)
9689 .to_uppercase()
9690 .dimmed()
9691 );
9692 println!(" {}", style::tick_row(60));
9693 println!(" entity: {}", b.entity_name);
9694 println!(" status: {:?}", b.status);
9695 println!(" frontiers: {}", b.frontiers.join(", "));
9696 if !b.frontier_ids.is_empty() {
9697 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
9698 }
9699 if let Some(t) = &b.tension {
9700 println!(" tension: {t}");
9701 }
9702 println!(" derived_at: {}", b.derived_at);
9703 println!(" finding refs ({}):", b.finding_refs.len());
9704 for r in &b.finding_refs {
9705 let dir = r.direction.as_deref().unwrap_or("—");
9706 let truncated: String = r.assertion_text.chars().take(72).collect();
9707 println!(
9708 " · [{}] {} (conf={:.2}, dir={})",
9709 r.frontier, r.finding_id, r.confidence, dir
9710 );
9711 println!(" {truncated}");
9712 }
9713 println!();
9714 }
9715 BridgesAction::Confirm {
9716 frontier,
9717 bridge_id,
9718 reviewer,
9719 note,
9720 json,
9721 } => {
9722 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
9723 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
9724 b.status = BridgeStatus::Confirmed;
9725 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
9726 let _ = emit_bridge_reviewed_event(
9730 &frontier,
9731 &bridge_id,
9732 "confirmed",
9733 &reviewer_id,
9734 note.as_deref(),
9735 );
9736 if json {
9737 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
9738 return;
9739 }
9740 println!();
9741 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
9742 println!();
9743 }
9744 BridgesAction::Refute {
9745 frontier,
9746 bridge_id,
9747 reviewer,
9748 note,
9749 json,
9750 } => {
9751 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
9752 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
9753 b.status = BridgeStatus::Refuted;
9754 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
9755 let _ = emit_bridge_reviewed_event(
9756 &frontier,
9757 &bridge_id,
9758 "refuted",
9759 &reviewer_id,
9760 note.as_deref(),
9761 );
9762 if json {
9763 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
9764 return;
9765 }
9766 println!();
9767 println!(" {} {} now refuted", style::lost("refuted"), b.id);
9768 println!();
9769 }
9770 }
9771}
9772
9773fn cmd_federation(action: FederationAction) {
9775 use crate::federation::PeerHub;
9776
9777 match action {
9778 FederationAction::PeerAdd {
9779 frontier,
9780 id,
9781 url,
9782 pubkey,
9783 note,
9784 json,
9785 } => {
9786 let peer = PeerHub {
9787 id: id.clone(),
9788 url: url.clone(),
9789 public_key: pubkey.trim().to_string(),
9790 added_at: chrono::Utc::now().to_rfc3339(),
9791 note: note.clone(),
9792 };
9793 peer.validate().unwrap_or_else(|e| fail_return(&e));
9794
9795 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9796 if project.peers.iter().any(|p| p.id == id) {
9797 fail(&format!("peer '{id}' already in registry"));
9798 }
9799 project.peers.push(peer.clone());
9800 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9801
9802 if json {
9803 println!(
9804 "{}",
9805 serde_json::to_string_pretty(&json!({
9806 "ok": true,
9807 "command": "federation.peer-add",
9808 "frontier": frontier.display().to_string(),
9809 "peer": peer,
9810 "registered_count": project.peers.len(),
9811 }))
9812 .expect("serialize federation.peer-add")
9813 );
9814 } else {
9815 println!(
9816 "{} peer {} (pubkey {}…) at {}",
9817 style::ok("registered"),
9818 id,
9819 &peer.public_key[..16],
9820 peer.url
9821 );
9822 }
9823 }
9824 FederationAction::PeerList { frontier, json } => {
9825 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9826 if json {
9827 println!(
9828 "{}",
9829 serde_json::to_string_pretty(&json!({
9830 "ok": true,
9831 "command": "federation.peer-list",
9832 "frontier": frontier.display().to_string(),
9833 "peers": project.peers,
9834 }))
9835 .expect("serialize federation.peer-list")
9836 );
9837 } else {
9838 println!();
9839 println!(
9840 " {}",
9841 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
9842 .to_uppercase()
9843 .dimmed()
9844 );
9845 println!(" {}", style::tick_row(60));
9846 if project.peers.is_empty() {
9847 println!(" (no peers registered)");
9848 } else {
9849 for p in &project.peers {
9850 let note_suffix = if p.note.is_empty() {
9851 String::new()
9852 } else {
9853 format!(" · {}", p.note)
9854 };
9855 println!(
9856 " {:<24} {} {}…{note_suffix}",
9857 p.id,
9858 p.url,
9859 &p.public_key[..16]
9860 );
9861 }
9862 }
9863 }
9864 }
9865 FederationAction::Sync {
9866 frontier,
9867 peer_id,
9868 url,
9869 via_hub,
9870 vfr_id,
9871 allow_cross_vfr,
9872 dry_run,
9873 json,
9874 } => {
9875 use crate::federation::{self, DiscoveryResult};
9876
9877 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9878 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
9879 fail(&format!(
9880 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
9881 ));
9882 };
9883 let local_frontier_id = project.frontier_id();
9884
9885 if via_hub
9892 && let Some(target) = vfr_id.as_deref()
9893 && target != local_frontier_id
9894 && !allow_cross_vfr
9895 {
9896 fail(&format!(
9897 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
9898 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
9899 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
9900 ));
9901 }
9902
9903 #[derive(Debug)]
9905 enum SyncOutcome {
9906 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
9910 }
9911
9912 let outcome = if via_hub {
9913 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
9914 match federation::discover_peer_frontier(
9915 &peer.url,
9916 &target_vfr,
9917 Some(&peer.public_key),
9918 ) {
9919 DiscoveryResult::Resolved(p) => {
9920 let src =
9921 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
9922 SyncOutcome::Resolved(p, src)
9923 }
9924 DiscoveryResult::BrokenLocator {
9925 vfr_id,
9926 locator,
9927 status,
9928 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
9929 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
9930 SyncOutcome::UnverifiedEntry(vfr_id, reason)
9931 }
9932 DiscoveryResult::EntryNotFound { vfr_id, status } => {
9933 SyncOutcome::EntryNotFound(vfr_id, status)
9934 }
9935 DiscoveryResult::Unreachable { url, error } => {
9936 fail(&format!("peer hub unreachable ({url}): {error}"));
9937 }
9938 }
9939 } else {
9940 let resolved_url = url.unwrap_or_else(|| {
9941 let base = peer.url.trim_end_matches('/');
9942 format!("{base}/manifest/{local_frontier_id}.json")
9943 });
9944 match federation::fetch_peer_frontier(&resolved_url) {
9945 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
9946 Err(e) => fail(&format!("direct fetch failed: {e}")),
9947 }
9948 };
9949
9950 let peer_source: String;
9953 let peer_state = match outcome {
9954 SyncOutcome::Resolved(p, src) => {
9955 if !json {
9956 println!(" · resolved via {src}");
9957 }
9958 peer_source = src;
9959 p
9960 }
9961 SyncOutcome::BrokenLocator(vfr, locator, status) => {
9962 if dry_run {
9963 if json {
9964 println!(
9965 "{}",
9966 serde_json::to_string_pretty(&json!({
9967 "ok": true,
9968 "command": "federation.sync",
9969 "dry_run": true,
9970 "outcome": "broken_locator",
9971 "vfr_id": vfr,
9972 "locator": locator,
9973 "http_status": status,
9974 }))
9975 .expect("serialize")
9976 );
9977 } else {
9978 println!(
9979 "{} dry-run: peer entry resolved but locator dead",
9980 style::warn("broken_locator")
9981 );
9982 println!(" vfr_id: {vfr}");
9983 println!(" locator: {locator} (HTTP {status})");
9984 }
9985 return;
9986 }
9987 let report = federation::record_locator_failure(
9988 &mut project,
9989 &peer_id,
9990 &vfr,
9991 &locator,
9992 status,
9993 );
9994 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9995 if json {
9996 println!(
9997 "{}",
9998 serde_json::to_string_pretty(&json!({
9999 "ok": true,
10000 "command": "federation.sync",
10001 "outcome": "broken_locator",
10002 "report": report,
10003 }))
10004 .expect("serialize")
10005 );
10006 } else {
10007 println!(
10008 "{} sync recorded broken-locator conflict against {peer_id}",
10009 style::warn("broken_locator")
10010 );
10011 println!(" vfr_id: {vfr}");
10012 println!(" locator: {locator} (HTTP {status})");
10013 println!(" events appended: {}", report.events_appended);
10014 }
10015 return;
10016 }
10017 SyncOutcome::UnverifiedEntry(vfr, reason) => {
10018 if dry_run {
10019 if json {
10020 println!(
10021 "{}",
10022 serde_json::to_string_pretty(&json!({
10023 "ok": true,
10024 "command": "federation.sync",
10025 "dry_run": true,
10026 "outcome": "unverified_peer_entry",
10027 "vfr_id": vfr,
10028 "reason": reason,
10029 }))
10030 .expect("serialize")
10031 );
10032 } else {
10033 println!(
10034 "{} dry-run: peer entry signature did not verify",
10035 style::lost("unverified_peer_entry")
10036 );
10037 println!(" vfr_id: {vfr}");
10038 println!(" reason: {reason}");
10039 }
10040 return;
10041 }
10042 let report =
10043 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
10044 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10045 if json {
10046 println!(
10047 "{}",
10048 serde_json::to_string_pretty(&json!({
10049 "ok": true,
10050 "command": "federation.sync",
10051 "outcome": "unverified_peer_entry",
10052 "report": report,
10053 }))
10054 .expect("serialize")
10055 );
10056 } else {
10057 println!(
10058 "{} sync halted; peer's registry entry signature did not verify",
10059 style::lost("unverified_peer_entry")
10060 );
10061 println!(" vfr_id: {vfr}");
10062 println!(" reason: {reason}");
10063 }
10064 return;
10065 }
10066 SyncOutcome::EntryNotFound(vfr, status) => {
10067 if json {
10068 println!(
10069 "{}",
10070 serde_json::to_string_pretty(&json!({
10071 "ok": false,
10072 "command": "federation.sync",
10073 "outcome": "entry_not_found",
10074 "vfr_id": vfr,
10075 "http_status": status,
10076 }))
10077 .expect("serialize")
10078 );
10079 } else {
10080 println!(
10081 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
10082 style::warn("entry_not_found")
10083 );
10084 }
10085 return;
10086 }
10087 };
10088
10089 if dry_run {
10090 let conflicts = federation::diff_frontiers(&project, &peer_state);
10091 if json {
10092 println!(
10093 "{}",
10094 serde_json::to_string_pretty(&json!({
10095 "ok": true,
10096 "command": "federation.sync",
10097 "dry_run": true,
10098 "peer_id": peer_id,
10099 "peer_source": peer_source,
10100 "conflicts": conflicts,
10101 }))
10102 .expect("serialize federation.sync (dry-run)")
10103 );
10104 } else {
10105 println!(
10106 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
10107 style::ok("ok"),
10108 peer_source,
10109 conflicts.len()
10110 );
10111 for c in &conflicts {
10112 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10113 }
10114 }
10115 return;
10116 }
10117
10118 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
10119 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10120
10121 if json {
10122 println!(
10123 "{}",
10124 serde_json::to_string_pretty(&json!({
10125 "ok": true,
10126 "command": "federation.sync",
10127 "peer_id": peer_id,
10128 "peer_source": peer_source,
10129 "report": report,
10130 }))
10131 .expect("serialize federation.sync")
10132 );
10133 } else {
10134 println!(
10135 "{} synced with {} ({})",
10136 style::ok("ok"),
10137 peer_id,
10138 peer_source
10139 );
10140 println!(
10141 " our: {}",
10142 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
10143 );
10144 println!(
10145 " peer: {}",
10146 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
10147 );
10148 println!(
10149 " conflicts: {} events appended: {}",
10150 report.conflicts.len(),
10151 report.events_appended
10152 );
10153 for c in &report.conflicts {
10154 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10155 }
10156 }
10157 }
10158 FederationAction::PushResolution {
10159 frontier,
10160 conflict_event_id,
10161 to,
10162 key,
10163 vfr_id,
10164 json,
10165 } => {
10166 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
10167 }
10168 FederationAction::PeerRemove { frontier, id, json } => {
10169 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10170 let before = project.peers.len();
10171 project.peers.retain(|p| p.id != id);
10172 if project.peers.len() == before {
10173 fail(&format!("peer '{id}' not found in registry"));
10174 }
10175 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10176
10177 if json {
10178 println!(
10179 "{}",
10180 serde_json::to_string_pretty(&json!({
10181 "ok": true,
10182 "command": "federation.peer-remove",
10183 "frontier": frontier.display().to_string(),
10184 "removed": id,
10185 "remaining": project.peers.len(),
10186 }))
10187 .expect("serialize federation.peer-remove")
10188 );
10189 } else {
10190 println!(
10191 "{} peer {} ({} remaining)",
10192 style::ok("removed"),
10193 id,
10194 project.peers.len()
10195 );
10196 }
10197 }
10198 }
10199}
10200
10201fn cmd_federation_push_resolution(
10213 frontier: PathBuf,
10214 conflict_event_id: String,
10215 to: String,
10216 key: Option<PathBuf>,
10217 vfr_id: Option<String>,
10218 json: bool,
10219) {
10220 use crate::canonical;
10221 use crate::sign;
10222
10223 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10224
10225 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
10226 fail(&format!(
10227 "peer '{to}' not in registry; run `vela federation peer-add` first"
10228 ));
10229 };
10230
10231 let Some(resolution) = project
10233 .events
10234 .iter()
10235 .find(|e| {
10236 e.kind == "frontier.conflict_resolved"
10237 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
10238 == Some(conflict_event_id.as_str())
10239 })
10240 .cloned()
10241 else {
10242 fail(&format!(
10243 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
10244 frontier.display()
10245 ));
10246 };
10247
10248 let actor_id = resolution.actor.id.clone();
10251 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
10252 fail(&format!(
10253 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
10254 register the reviewer with `vela actor add` before pushing"
10255 ));
10256 };
10257
10258 let key_path = key.unwrap_or_else(|| {
10261 let home = std::env::var("HOME").unwrap_or_default();
10262 let base = PathBuf::from(home)
10263 .join(".config")
10264 .join("vela")
10265 .join("keys");
10266 let safe_id = actor.id.replace([':', '/'], "_");
10267 let by_actor = base.join(format!("{safe_id}.key"));
10268 if by_actor.exists() {
10269 by_actor
10270 } else {
10271 base.join("private.key")
10272 }
10273 });
10274
10275 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
10276 fail_return(&format!(
10277 "load private key from {}: {e}",
10278 key_path.display()
10279 ))
10280 });
10281 let pubkey_hex = sign::pubkey_hex(&signing_key);
10282 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
10283 fail(&format!(
10284 "private key at {} does not match actor {}'s registered public key. \
10285 Loaded pubkey {}, expected {}.",
10286 key_path.display(),
10287 actor.id,
10288 &pubkey_hex[..16],
10289 &actor.public_key[..16]
10290 ));
10291 }
10292
10293 let signature_hex = sign::sign_event(&resolution, &signing_key)
10296 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
10297
10298 let mut body = resolution.clone();
10303 body.signature = None;
10304 let body_value =
10305 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
10306 let _canonical_check = canonical::to_canonical_bytes(&body_value)
10307 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
10308
10309 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
10310 let url = format!(
10311 "{}/entries/{}/events",
10312 peer.url.trim_end_matches('/'),
10313 target_vfr
10314 );
10315
10316 let url_owned = url.clone();
10318 let pubkey_owned = pubkey_hex.clone();
10319 let signature_owned = signature_hex.clone();
10320 let body_owned = body_value.clone();
10321 let response: Result<(u16, String), String> = std::thread::spawn(move || {
10322 let client = reqwest::blocking::Client::new();
10323 let resp = client
10324 .post(&url_owned)
10325 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
10326 .header("X-Vela-Signature", &signature_owned)
10327 .json(&body_owned)
10328 .send()
10329 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
10330 let status = resp.status().as_u16();
10331 let text = resp.text().unwrap_or_default();
10332 Ok((status, text))
10333 })
10334 .join()
10335 .map_err(|_| "push thread panicked".to_string())
10336 .unwrap_or_else(|e| fail_return(&e));
10337
10338 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
10339 let parsed: serde_json::Value =
10340 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
10341
10342 let accepted = matches!(status, 200..=202);
10343 if json {
10344 println!(
10345 "{}",
10346 serde_json::to_string_pretty(&json!({
10347 "ok": accepted,
10348 "command": "federation.push-resolution",
10349 "frontier": frontier.display().to_string(),
10350 "peer_id": to,
10351 "url": url,
10352 "conflict_event_id": conflict_event_id,
10353 "event_id": resolution.id,
10354 "actor_id": actor.id,
10355 "http_status": status,
10356 "response": parsed,
10357 }))
10358 .expect("serialize federation.push-resolution")
10359 );
10360 } else if accepted {
10361 println!(
10362 "{} resolution {} pushed to {} (HTTP {})",
10363 style::ok("ok"),
10364 &resolution.id[..16.min(resolution.id.len())],
10365 to,
10366 status
10367 );
10368 println!(" url: {url}");
10369 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
10370 } else {
10371 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
10372 println!(" url: {url}");
10373 println!(" response: {text}");
10374 std::process::exit(1);
10375 }
10376}
10377
10378fn cmd_queue(action: QueueAction) {
10383 use crate::queue;
10384 match action {
10385 QueueAction::List { queue_file, json } => {
10386 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10387 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
10388 if json {
10389 let payload = json!({
10390 "ok": true,
10391 "command": "queue.list",
10392 "queue_file": path.display().to_string(),
10393 "schema": q.schema,
10394 "actions": q.actions,
10395 });
10396 println!(
10397 "{}",
10398 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
10399 );
10400 } else {
10401 println!();
10402 println!(
10403 " {}",
10404 format!("VELA · QUEUE · LIST · {}", path.display())
10405 .to_uppercase()
10406 .dimmed()
10407 );
10408 println!(" {}", style::tick_row(60));
10409 if q.actions.is_empty() {
10410 println!(" (queue is empty)");
10411 } else {
10412 for (idx, action) in q.actions.iter().enumerate() {
10413 println!(
10414 " [{idx}] {} → {} queued {}",
10415 action.kind,
10416 action.frontier.display(),
10417 action.queued_at
10418 );
10419 }
10420 }
10421 }
10422 }
10423 QueueAction::Clear { queue_file, json } => {
10424 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10425 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
10426 if json {
10427 let payload = json!({
10428 "ok": true,
10429 "command": "queue.clear",
10430 "queue_file": path.display().to_string(),
10431 "dropped": dropped,
10432 });
10433 println!(
10434 "{}",
10435 serde_json::to_string_pretty(&payload)
10436 .expect("failed to serialize queue.clear")
10437 );
10438 } else {
10439 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
10440 }
10441 }
10442 QueueAction::Sign {
10443 actor,
10444 key,
10445 queue_file,
10446 yes_to_all,
10447 json,
10448 } => {
10449 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10450 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
10451 if q.actions.is_empty() {
10452 if json {
10453 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
10454 } else {
10455 println!("{} queue is empty", style::ok("ok"));
10456 }
10457 return;
10458 }
10459 let key_hex = std::fs::read_to_string(&key)
10460 .map(|s| s.trim().to_string())
10461 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
10462 let signing_key = parse_signing_key(&key_hex);
10463 let mut signed_count = 0usize;
10464 let mut remaining = Vec::new();
10465 for action in q.actions.iter() {
10466 if !yes_to_all && !confirm_action(action) {
10467 remaining.push(action.clone());
10468 continue;
10469 }
10470 match sign_and_apply(&signing_key, &actor, action) {
10471 Ok(report) => {
10472 signed_count += 1;
10473 if !json {
10474 println!(
10475 "{} {} on {} → {}",
10476 style::ok("signed"),
10477 action.kind,
10478 action.frontier.display(),
10479 report
10480 );
10481 }
10482 }
10483 Err(error) => {
10484 remaining.push(action.clone());
10486 if !json {
10487 eprintln!(
10488 "{} {} on {}: {error}",
10489 style::warn("failed"),
10490 action.kind,
10491 action.frontier.display()
10492 );
10493 }
10494 }
10495 }
10496 }
10497 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
10498 if json {
10499 let payload = json!({
10500 "ok": true,
10501 "command": "queue.sign",
10502 "signed": signed_count,
10503 "remaining": remaining.len(),
10504 });
10505 println!(
10506 "{}",
10507 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
10508 );
10509 } else {
10510 println!(
10511 "{} signed {signed_count} action(s); {} remaining in queue",
10512 style::ok("ok"),
10513 remaining.len()
10514 );
10515 }
10516 }
10517 }
10518}
10519
10520fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
10521 let bytes = hex::decode(hex_str)
10522 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
10523 let key_bytes: [u8; 32] = bytes
10524 .try_into()
10525 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
10526 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
10527}
10528
10529fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
10530 use std::io::{self, BufRead, Write};
10531 let mut stdout = io::stdout().lock();
10532 let _ = writeln!(
10533 stdout,
10534 " sign {} on {}? [y/N] ",
10535 action.kind,
10536 action.frontier.display()
10537 );
10538 let _ = stdout.flush();
10539 drop(stdout);
10540 let stdin = io::stdin();
10541 let mut line = String::new();
10542 if stdin.lock().read_line(&mut line).is_err() {
10543 return false;
10544 }
10545 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
10546}
10547
10548fn sign_and_apply(
10553 signing_key: &ed25519_dalek::SigningKey,
10554 actor: &str,
10555 action: &crate::queue::QueuedAction,
10556) -> Result<String, String> {
10557 use crate::events::StateTarget;
10558 use crate::proposals;
10559 let args = &action.args;
10560 match action.kind.as_str() {
10561 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
10562 let kind = match action.kind.as_str() {
10563 "propose_review" => "finding.review",
10564 "propose_note" => "finding.note",
10565 "propose_revise_confidence" => "finding.confidence_revise",
10566 "propose_retract" => "finding.retract",
10567 _ => unreachable!(),
10568 };
10569 let target_id = args
10570 .get("target_finding_id")
10571 .and_then(Value::as_str)
10572 .ok_or("target_finding_id missing")?;
10573 let reason = args
10574 .get("reason")
10575 .and_then(Value::as_str)
10576 .ok_or("reason missing")?;
10577 let payload = match action.kind.as_str() {
10578 "propose_review" => {
10579 let status = args
10580 .get("status")
10581 .and_then(Value::as_str)
10582 .ok_or("status missing")?;
10583 json!({"status": status})
10584 }
10585 "propose_note" => {
10586 let text = args
10587 .get("text")
10588 .and_then(Value::as_str)
10589 .ok_or("text missing")?;
10590 json!({"text": text})
10591 }
10592 "propose_revise_confidence" => {
10593 let new_score = args
10594 .get("new_score")
10595 .and_then(Value::as_f64)
10596 .ok_or("new_score missing")?;
10597 json!({"new_score": new_score})
10598 }
10599 "propose_retract" => json!({}),
10600 _ => unreachable!(),
10601 };
10602 let created_at = args
10603 .get("created_at")
10604 .and_then(Value::as_str)
10605 .map(String::from)
10606 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
10607 let mut proposal = proposals::new_proposal(
10608 kind,
10609 StateTarget {
10610 r#type: "finding".to_string(),
10611 id: target_id.to_string(),
10612 },
10613 actor,
10614 "human",
10615 reason,
10616 payload,
10617 Vec::new(),
10618 Vec::new(),
10619 );
10620 proposal.created_at = created_at;
10621 proposal.id = proposals::proposal_id(&proposal);
10622 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
10626 let result = proposals::create_or_apply(&action.frontier, proposal, false)
10627 .map_err(|e| format!("create_or_apply: {e}"))?;
10628 Ok(format!("proposal {}", result.proposal_id))
10629 }
10630 "accept_proposal" | "reject_proposal" => {
10631 let proposal_id = args
10632 .get("proposal_id")
10633 .and_then(Value::as_str)
10634 .ok_or("proposal_id missing")?;
10635 let reason = args
10636 .get("reason")
10637 .and_then(Value::as_str)
10638 .ok_or("reason missing")?;
10639 let timestamp = args
10640 .get("timestamp")
10641 .and_then(Value::as_str)
10642 .map(String::from)
10643 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
10644 let preimage = json!({
10646 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
10647 "proposal_id": proposal_id,
10648 "reviewer_id": actor,
10649 "reason": reason,
10650 "timestamp": timestamp,
10651 });
10652 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
10653 use ed25519_dalek::Signer;
10654 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
10655 if action.kind == "accept_proposal" {
10656 let event_id =
10657 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
10658 .map_err(|e| format!("accept_at_path: {e}"))?;
10659 Ok(format!("event {event_id}"))
10660 } else {
10661 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
10662 .map_err(|e| format!("reject_at_path: {e}"))?;
10663 Ok(format!("rejected {proposal_id}"))
10664 }
10665 }
10666 other => Err(format!("unsupported queued action kind '{other}'")),
10667 }
10668}
10669
10670fn cmd_entity(action: EntityAction) {
10682 use crate::entity_resolve;
10683 match action {
10684 EntityAction::Resolve {
10685 frontier,
10686 force,
10687 json,
10688 } => {
10689 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10690 let report = entity_resolve::resolve_frontier(&mut p, force);
10691 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
10692 if json {
10693 println!(
10694 "{}",
10695 serde_json::to_string_pretty(&serde_json::json!({
10696 "ok": true,
10697 "command": "entity.resolve",
10698 "frontier_path": frontier.display().to_string(),
10699 "report": report,
10700 }))
10701 .expect("serialize")
10702 );
10703 } else {
10704 println!(
10705 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
10706 style::ok("entity"),
10707 report.resolved,
10708 report.total_entities,
10709 report.already_resolved,
10710 report.unresolved_count,
10711 report.findings_touched,
10712 );
10713 let unresolved_summary: std::collections::BTreeSet<&str> = report
10714 .per_finding
10715 .iter()
10716 .flat_map(|f| f.unresolved.iter().map(String::as_str))
10717 .collect();
10718 if !unresolved_summary.is_empty() {
10719 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
10720 println!(
10721 " unresolved (first {}): {}",
10722 take.len(),
10723 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
10724 );
10725 }
10726 }
10727 }
10728 EntityAction::List { json } => {
10729 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
10730 .map(|(name, etype, source, id)| {
10731 serde_json::json!({
10732 "canonical_name": name,
10733 "entity_type": etype,
10734 "source": source,
10735 "id": id,
10736 })
10737 })
10738 .collect();
10739 if json {
10740 println!(
10741 "{}",
10742 serde_json::to_string_pretty(&serde_json::json!({
10743 "ok": true,
10744 "command": "entity.list",
10745 "count": entries.len(),
10746 "entries": entries,
10747 }))
10748 .expect("serialize")
10749 );
10750 } else {
10751 println!("{} {} bundled entries", style::ok("entity"), entries.len());
10752 for e in &entries {
10753 println!(
10754 " {:32} {:18} {} {}",
10755 e["canonical_name"].as_str().unwrap_or("?"),
10756 e["entity_type"].as_str().unwrap_or("?"),
10757 e["source"].as_str().unwrap_or("?"),
10758 e["id"].as_str().unwrap_or("?"),
10759 );
10760 }
10761 }
10762 }
10763 }
10764}
10765
10766fn cmd_link(action: LinkAction) {
10767 use crate::bundle::{Link, LinkRef};
10768 match action {
10769 LinkAction::Add {
10770 frontier,
10771 from,
10772 to,
10773 r#type,
10774 note,
10775 inferred_by,
10776 no_check_target,
10777 json,
10778 } => {
10779 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
10780 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
10781 fail(&format!(
10782 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
10783 ));
10784 }
10785 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
10786 fail(&format!(
10787 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
10788 ))
10789 });
10790 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10791 let source_idx = p
10792 .findings
10793 .iter()
10794 .position(|f| f.id == from)
10795 .unwrap_or_else(|| {
10796 fail_return(&format!("--from finding '{from}' not in frontier"))
10797 });
10798 if let LinkRef::Local { vf_id } = &parsed
10799 && !p.findings.iter().any(|f| &f.id == vf_id)
10800 {
10801 fail(&format!(
10802 "local --to target '{vf_id}' not in frontier; add the target finding first"
10803 ));
10804 }
10805 if let LinkRef::Cross { vfr_id, .. } = &parsed
10806 && p.dep_for_vfr(vfr_id).is_none()
10807 {
10808 fail(&format!(
10809 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
10810 ));
10811 }
10812
10813 let mut target_warning: Option<String> = None;
10819 if let LinkRef::Cross {
10820 vfr_id: target_vfr,
10821 vf_id: target_vf,
10822 } = &parsed
10823 && !no_check_target
10824 && let Some(dep) = p.dep_for_vfr(target_vfr)
10825 && let Some(locator) = dep.locator.as_deref()
10826 && (locator.starts_with("http://") || locator.starts_with("https://"))
10827 {
10828 let client = reqwest::blocking::Client::builder()
10829 .timeout(std::time::Duration::from_secs(15))
10830 .build()
10831 .ok();
10832 if let Some(client) = client
10833 && let Ok(resp) = client.get(locator).send()
10834 && resp.status().is_success()
10835 && let Ok(dep_project) = resp.json::<crate::project::Project>()
10836 {
10837 if let Some(target_finding) =
10838 dep_project.findings.iter().find(|f| &f.id == target_vf)
10839 {
10840 if target_finding.flags.superseded {
10841 target_warning = Some(format!(
10842 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
10843You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
10844Use --no-check-target to skip this check."
10845 ));
10846 }
10847 } else {
10848 target_warning = Some(format!(
10849 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
10850The target may have been removed or never existed in the pinned snapshot."
10851 ));
10852 }
10853 }
10854 }
10855
10856 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
10857 let link = Link {
10858 target: to.clone(),
10859 link_type: r#type.clone(),
10860 note: note.clone(),
10861 inferred_by: inferred_by.clone(),
10862 created_at: now,
10863 mechanism: None,
10864 };
10865 p.findings[source_idx].links.push(link);
10866 project::recompute_stats(&mut p);
10867 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
10868 let payload = json!({
10869 "ok": true,
10870 "command": "link.add",
10871 "frontier": frontier.display().to_string(),
10872 "from": from,
10873 "to": to,
10874 "type": r#type,
10875 "cross_frontier": parsed.is_cross_frontier(),
10876 });
10877 if json {
10878 let mut p2 = payload.clone();
10879 if let Some(w) = &target_warning
10880 && let serde_json::Value::Object(m) = &mut p2
10881 {
10882 m.insert(
10883 "target_warning".to_string(),
10884 serde_json::Value::String(w.clone()),
10885 );
10886 }
10887 println!(
10888 "{}",
10889 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
10890 );
10891 } else {
10892 println!(
10893 "{} {} --[{}]--> {}{}",
10894 style::ok("link"),
10895 from,
10896 r#type,
10897 to,
10898 if parsed.is_cross_frontier() {
10899 " (cross-frontier)"
10900 } else {
10901 ""
10902 }
10903 );
10904 if let Some(w) = target_warning {
10905 println!(" {w}");
10906 }
10907 }
10908 }
10909 }
10910}
10911
10912fn cmd_frontier(action: FrontierAction) {
10913 use crate::project::ProjectDependency;
10914 use crate::repo;
10915 match action {
10916 FrontierAction::New {
10917 path,
10918 name,
10919 description,
10920 force,
10921 json,
10922 } => {
10923 if path.exists() && !force {
10924 fail(&format!(
10925 "{} already exists; pass --force to overwrite",
10926 path.display()
10927 ));
10928 }
10929 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
10930 let project = project::Project {
10931 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
10932 schema: project::VELA_SCHEMA_URL.to_string(),
10933 frontier_id: None,
10934 project: project::ProjectMeta {
10935 name: name.clone(),
10936 description: description.clone(),
10937 compiled_at: now,
10938 compiler: project::VELA_COMPILER_VERSION.to_string(),
10939 papers_processed: 0,
10940 errors: 0,
10941 dependencies: Vec::new(),
10942 },
10943 stats: project::ProjectStats::default(),
10944 findings: Vec::new(),
10945 sources: Vec::new(),
10946 evidence_atoms: Vec::new(),
10947 condition_records: Vec::new(),
10948 review_events: Vec::new(),
10949 confidence_updates: Vec::new(),
10950 events: Vec::new(),
10951 proposals: Vec::new(),
10952 proof_state: proposals::ProofState::default(),
10953 signatures: Vec::new(),
10954 actors: Vec::new(),
10955 replications: Vec::new(),
10956 datasets: Vec::new(),
10957 code_artifacts: Vec::new(),
10958 artifacts: Vec::new(),
10959 predictions: Vec::new(),
10960 resolutions: Vec::new(),
10961 peers: Vec::new(),
10962 negative_results: Vec::new(),
10963 trajectories: Vec::new(),
10964 };
10965 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
10966 let payload = json!({
10967 "ok": true,
10968 "command": "frontier.new",
10969 "path": path.display().to_string(),
10970 "name": name,
10971 "schema": project::VELA_SCHEMA_URL,
10972 "vela_version": env!("CARGO_PKG_VERSION"),
10973 "next_steps": [
10974 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
10975 "vela sign generate-keypair --out keys",
10976 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
10977 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
10978 ],
10979 });
10980 if json {
10981 println!(
10982 "{}",
10983 serde_json::to_string_pretty(&payload)
10984 .expect("failed to serialize frontier.new")
10985 );
10986 } else {
10987 println!(
10988 "{} scaffolded frontier '{name}' at {}",
10989 style::ok("frontier"),
10990 path.display()
10991 );
10992 println!(" next steps:");
10993 println!(
10994 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
10995 path.display()
10996 );
10997 println!(" 2. vela sign generate-keypair --out keys");
10998 println!(
10999 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
11000 path.display()
11001 );
11002 println!(
11003 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11004 path.display()
11005 );
11006 }
11007 }
11008 FrontierAction::Materialize { frontier, json } => {
11009 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
11010 if json {
11011 println!(
11012 "{}",
11013 serde_json::to_string_pretty(&payload)
11014 .expect("failed to serialize frontier materialize")
11015 );
11016 } else {
11017 println!(
11018 "{} materialized frontier repo at {}",
11019 style::ok("frontier"),
11020 frontier.display()
11021 );
11022 }
11023 }
11024 FrontierAction::AddDep {
11025 frontier,
11026 vfr_id,
11027 locator,
11028 snapshot,
11029 name,
11030 json,
11031 } => {
11032 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11033 if p.project
11034 .dependencies
11035 .iter()
11036 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
11037 {
11038 fail(&format!(
11039 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
11040 ));
11041 }
11042 let dep = ProjectDependency {
11043 name: name.unwrap_or_else(|| vfr_id.clone()),
11044 source: "vela.hub".into(),
11045 version: None,
11046 pinned_hash: None,
11047 vfr_id: Some(vfr_id.clone()),
11048 locator: Some(locator.clone()),
11049 pinned_snapshot_hash: Some(snapshot.clone()),
11050 };
11051 p.project.dependencies.push(dep);
11052 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11053 let payload = json!({
11054 "ok": true,
11055 "command": "frontier.add-dep",
11056 "frontier": frontier.display().to_string(),
11057 "vfr_id": vfr_id,
11058 "locator": locator,
11059 "pinned_snapshot_hash": snapshot,
11060 "declared_count": p.project.dependencies.len(),
11061 });
11062 if json {
11063 println!(
11064 "{}",
11065 serde_json::to_string_pretty(&payload)
11066 .expect("failed to serialize frontier.add-dep")
11067 );
11068 } else {
11069 println!(
11070 "{} declared cross-frontier dep {vfr_id}",
11071 style::ok("frontier")
11072 );
11073 println!(" locator: {locator}");
11074 println!(" snapshot: {snapshot}");
11075 }
11076 }
11077 FrontierAction::ListDeps { frontier, json } => {
11078 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11079 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
11080 if json {
11081 let payload = json!({
11082 "ok": true,
11083 "command": "frontier.list-deps",
11084 "frontier": frontier.display().to_string(),
11085 "count": deps.len(),
11086 "dependencies": deps,
11087 });
11088 println!(
11089 "{}",
11090 serde_json::to_string_pretty(&payload)
11091 .expect("failed to serialize frontier.list-deps")
11092 );
11093 } else {
11094 println!();
11095 println!(
11096 " {}",
11097 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
11098 .to_uppercase()
11099 .dimmed()
11100 );
11101 println!(" {}", style::tick_row(60));
11102 if deps.is_empty() {
11103 println!(" (no dependencies declared)");
11104 } else {
11105 for d in &deps {
11106 let kind = if d.is_cross_frontier() {
11107 "cross-frontier"
11108 } else {
11109 "compile-time"
11110 };
11111 println!(" · {} [{kind}]", d.name);
11112 if let Some(v) = &d.vfr_id {
11113 println!(" vfr_id: {v}");
11114 }
11115 if let Some(l) = &d.locator {
11116 println!(" locator: {l}");
11117 }
11118 if let Some(s) = &d.pinned_snapshot_hash {
11119 println!(" snapshot: {s}");
11120 }
11121 }
11122 }
11123 }
11124 }
11125 FrontierAction::RemoveDep {
11126 frontier,
11127 vfr_id,
11128 json,
11129 } => {
11130 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11131 for f in &p.findings {
11133 for l in &f.links {
11134 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
11135 crate::bundle::LinkRef::parse(&l.target)
11136 && v == &vfr_id
11137 {
11138 fail(&format!(
11139 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
11140 f.id, l.target
11141 ));
11142 }
11143 }
11144 }
11145 let before = p.project.dependencies.len();
11146 p.project
11147 .dependencies
11148 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
11149 let removed = before - p.project.dependencies.len();
11150 if removed == 0 {
11151 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
11152 }
11153 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11154 let payload = json!({
11155 "ok": true,
11156 "command": "frontier.remove-dep",
11157 "frontier": frontier.display().to_string(),
11158 "vfr_id": vfr_id,
11159 "removed": removed,
11160 });
11161 if json {
11162 println!(
11163 "{}",
11164 serde_json::to_string_pretty(&payload)
11165 .expect("failed to serialize frontier.remove-dep")
11166 );
11167 } else {
11168 println!(
11169 "{} removed cross-frontier dep {vfr_id}",
11170 style::ok("frontier")
11171 );
11172 }
11173 }
11174 FrontierAction::RefreshDeps {
11175 frontier,
11176 from,
11177 dry_run,
11178 json,
11179 } => {
11180 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11181 let cross_deps: Vec<String> = p
11182 .project
11183 .dependencies
11184 .iter()
11185 .filter_map(|d| d.vfr_id.clone())
11186 .collect();
11187 if cross_deps.is_empty() {
11188 if json {
11189 println!(
11190 "{}",
11191 serde_json::to_string_pretty(&json!({
11192 "ok": true,
11193 "command": "frontier.refresh-deps",
11194 "frontier": frontier.display().to_string(),
11195 "from": from,
11196 "dry_run": dry_run,
11197 "deps": [],
11198 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
11199 })).expect("serialize")
11200 );
11201 } else {
11202 println!(
11203 "{} no cross-frontier deps declared in {}",
11204 style::ok("frontier"),
11205 frontier.display()
11206 );
11207 }
11208 return;
11209 }
11210 let client = reqwest::blocking::Client::builder()
11211 .timeout(std::time::Duration::from_secs(20))
11212 .build()
11213 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
11214 let base = from.trim_end_matches('/');
11215 #[derive(serde::Deserialize)]
11216 struct HubEntry {
11217 latest_snapshot_hash: String,
11218 }
11219 let mut per_dep: Vec<serde_json::Value> = Vec::new();
11220 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
11221 (0u32, 0u32, 0u32, 0u32);
11222 for vfr in &cross_deps {
11223 let url = format!("{base}/entries/{vfr}");
11224 let resp = client.get(&url).send();
11225 let outcome = match resp {
11226 Ok(r) if r.status().as_u16() == 404 => {
11227 missing += 1;
11228 json!({ "vfr_id": vfr, "status": "missing", "url": url })
11229 }
11230 Ok(r) if !r.status().is_success() => {
11231 unreachable += 1;
11232 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
11233 }
11234 Err(e) => {
11235 unreachable += 1;
11236 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
11237 }
11238 Ok(r) => match r.json::<HubEntry>() {
11239 Err(e) => {
11240 unreachable += 1;
11241 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
11242 }
11243 Ok(entry) => {
11244 match p
11246 .project
11247 .dependencies
11248 .iter()
11249 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
11250 {
11251 None => {
11252 unreachable += 1;
11253 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
11254 }
11255 Some(idx) => {
11256 let local_pin =
11257 p.project.dependencies[idx].pinned_snapshot_hash.clone();
11258 let new_pin = entry.latest_snapshot_hash;
11259 if local_pin.as_deref() == Some(new_pin.as_str()) {
11260 unchanged += 1;
11261 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
11262 } else {
11263 if !dry_run {
11264 p.project.dependencies[idx].pinned_snapshot_hash =
11265 Some(new_pin.clone());
11266 }
11267 refreshed += 1;
11268 json!({
11269 "vfr_id": vfr,
11270 "status": "refreshed",
11271 "old_snapshot": local_pin,
11272 "new_snapshot": new_pin,
11273 })
11274 }
11275 }
11276 }
11277 }
11278 },
11279 };
11280 per_dep.push(outcome);
11281 }
11282 if !dry_run && refreshed > 0 {
11283 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11284 }
11285 let payload = json!({
11286 "ok": true,
11287 "command": "frontier.refresh-deps",
11288 "frontier": frontier.display().to_string(),
11289 "from": from,
11290 "dry_run": dry_run,
11291 "deps": per_dep,
11292 "summary": {
11293 "total": cross_deps.len(),
11294 "refreshed": refreshed,
11295 "unchanged": unchanged,
11296 "missing": missing,
11297 "unreachable": unreachable,
11298 },
11299 });
11300 if json {
11301 println!(
11302 "{}",
11303 serde_json::to_string_pretty(&payload)
11304 .expect("failed to serialize frontier.refresh-deps")
11305 );
11306 } else {
11307 let mode = if dry_run { " (dry-run)" } else { "" };
11308 println!(
11309 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
11310 style::ok("frontier"),
11311 cross_deps.len()
11312 );
11313 for d in &per_dep {
11314 let vfr = d["vfr_id"].as_str().unwrap_or("?");
11315 let status = d["status"].as_str().unwrap_or("?");
11316 match status {
11317 "refreshed" => println!(
11318 " {vfr} refreshed {} → {}",
11319 d["old_snapshot"]
11320 .as_str()
11321 .unwrap_or("(none)")
11322 .chars()
11323 .take(16)
11324 .collect::<String>(),
11325 d["new_snapshot"]
11326 .as_str()
11327 .unwrap_or("?")
11328 .chars()
11329 .take(16)
11330 .collect::<String>(),
11331 ),
11332 "unchanged" => println!(" {vfr} unchanged"),
11333 "missing" => println!(" {vfr} missing on hub"),
11334 _ => println!(" {vfr} unreachable"),
11335 }
11336 }
11337 }
11338 }
11339 FrontierAction::Diff {
11340 frontier,
11341 since,
11342 week,
11343 json,
11344 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
11345 }
11346}
11347
11348fn cmd_repo(action: RepoAction) {
11349 match action {
11350 RepoAction::Status { frontier, json } => {
11351 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
11352 if json {
11353 println!(
11354 "{}",
11355 serde_json::to_string_pretty(&payload)
11356 .expect("failed to serialize repo status")
11357 );
11358 } else {
11359 let summary = payload.get("summary").unwrap_or(&Value::Null);
11360 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
11361 println!("vela repo status");
11362 println!(" frontier: {}", frontier.display());
11363 println!(
11364 " events: {}",
11365 summary
11366 .get("accepted_events")
11367 .and_then(Value::as_u64)
11368 .unwrap_or_default()
11369 );
11370 println!(
11371 " open proposals: {}",
11372 summary
11373 .get("open_proposals")
11374 .and_then(Value::as_u64)
11375 .unwrap_or_default()
11376 );
11377 println!(
11378 " state: {}",
11379 freshness
11380 .get("materialized_state")
11381 .and_then(Value::as_str)
11382 .unwrap_or("unknown")
11383 );
11384 println!(
11385 " proof: {}",
11386 freshness
11387 .get("proof")
11388 .and_then(Value::as_str)
11389 .unwrap_or("unknown")
11390 );
11391 }
11392 }
11393 RepoAction::Doctor { frontier, json } => {
11394 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
11395 if json {
11396 println!(
11397 "{}",
11398 serde_json::to_string_pretty(&payload)
11399 .expect("failed to serialize repo doctor")
11400 );
11401 } else {
11402 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
11403 let issues = payload
11404 .get("issues")
11405 .and_then(Value::as_array)
11406 .map_or(0, Vec::len);
11407 println!("vela repo doctor");
11408 println!(" frontier: {}", frontier.display());
11409 println!(" status: {}", if ok { "ok" } else { "needs attention" });
11410 println!(" issues: {issues}");
11411 }
11412 }
11413 }
11414}
11415
11416fn cmd_proof_verify(frontier: &Path, json_output: bool) {
11417 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
11418 if json_output {
11419 println!(
11420 "{}",
11421 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
11422 );
11423 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
11424 std::process::exit(1);
11425 }
11426 } else {
11427 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
11428 println!("vela proof verify");
11429 println!(" frontier: {}", frontier.display());
11430 println!(" status: {}", if ok { "ok" } else { "failed" });
11431 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
11432 for issue in issues {
11433 if let Some(message) = issue.get("message").and_then(Value::as_str) {
11434 println!(" issue: {message}");
11435 }
11436 }
11437 }
11438 if !ok {
11439 std::process::exit(1);
11440 }
11441 }
11442}
11443
11444fn cmd_proof_explain(frontier: &Path) {
11445 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
11446 print!("{text}");
11447}
11448
11449fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
11458 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
11459
11460 let now = chrono::Utc::now();
11462 let (window_start, window_end, week_label): (
11463 chrono::DateTime<chrono::Utc>,
11464 chrono::DateTime<chrono::Utc>,
11465 Option<String>,
11466 ) = if let Some(s) = since {
11467 let parsed = chrono::DateTime::parse_from_rfc3339(s)
11468 .map(|d| d.with_timezone(&chrono::Utc))
11469 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
11470 (parsed, now, None)
11471 } else {
11472 let key = week
11473 .map(str::to_owned)
11474 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
11475 let (start, end) = iso_week_bounds(&key)
11476 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
11477 (start, end, Some(key))
11478 };
11479
11480 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
11482 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
11483 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
11484 let mut cumulative: usize = 0;
11485
11486 for f in &project.findings {
11487 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
11488 .map(|d| d.with_timezone(&chrono::Utc))
11489 .ok();
11490 let updated_ts = f
11491 .updated
11492 .as_deref()
11493 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
11494 .map(|d| d.with_timezone(&chrono::Utc));
11495
11496 if let Some(c) = created
11497 && c < window_end
11498 {
11499 cumulative += 1;
11500 }
11501
11502 if let Some(c) = created
11503 && c >= window_start
11504 && c < window_end
11505 {
11506 added.push(f);
11507 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
11508 if is_tension {
11509 new_contradictions.push(f);
11510 }
11511 continue;
11512 }
11513 if let Some(u) = updated_ts
11514 && u >= window_start
11515 && u < window_end
11516 {
11517 updated.push(f);
11518 }
11519 }
11520
11521 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
11523 list.iter()
11524 .map(|f| {
11525 json!({
11526 "id": f.id,
11527 "assertion": f.assertion.text,
11528 "evidence_type": f.evidence.evidence_type,
11529 "confidence": f.confidence.score,
11530 "doi": f.provenance.doi,
11531 "pmid": f.provenance.pmid,
11532 })
11533 })
11534 .collect()
11535 };
11536
11537 let payload = json!({
11538 "ok": true,
11539 "command": "frontier.diff",
11540 "frontier": frontier.display().to_string(),
11541 "frontier_id": project.frontier_id,
11542 "window": {
11543 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
11544 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
11545 "iso_week": week_label,
11546 },
11547 "totals": {
11548 "added": added.len(),
11549 "updated": updated.len(),
11550 "new_contradictions": new_contradictions.len(),
11551 "cumulative_claims": cumulative,
11552 },
11553 "added": summary_for(&added),
11554 "updated": summary_for(&updated),
11555 "new_contradictions": summary_for(&new_contradictions),
11556 });
11557
11558 if json {
11559 println!(
11560 "{}",
11561 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
11562 );
11563 return;
11564 }
11565
11566 let label = week_label
11567 .clone()
11568 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
11569 println!();
11570 println!(
11571 " {}",
11572 format!("VELA · FRONTIER · DIFF · {label}")
11573 .to_uppercase()
11574 .dimmed()
11575 );
11576 println!(" {}", style::tick_row(60));
11577 println!(
11578 " range: {} → {}",
11579 window_start.format("%Y-%m-%d %H:%M"),
11580 window_end.format("%Y-%m-%d %H:%M")
11581 );
11582 println!(" added: {}", added.len());
11583 println!(" updated: {}", updated.len());
11584 println!(" contradictions: {}", new_contradictions.len());
11585 println!(" cumulative: {cumulative}");
11586 if added.is_empty() && updated.is_empty() {
11587 println!();
11588 println!(" (quiet window — no findings added or updated)");
11589 } else {
11590 println!();
11591 println!(" added:");
11592 for f in &added {
11593 println!(
11594 " · {} {}",
11595 f.id.dimmed(),
11596 truncate(&f.assertion.text, 88)
11597 );
11598 }
11599 if !updated.is_empty() {
11600 println!();
11601 println!(" updated:");
11602 for f in &updated {
11603 println!(
11604 " · {} {}",
11605 f.id.dimmed(),
11606 truncate(&f.assertion.text, 88)
11607 );
11608 }
11609 }
11610 }
11611}
11612
11613fn truncate(s: &str, n: usize) -> String {
11614 if s.chars().count() <= n {
11615 s.to_string()
11616 } else {
11617 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
11618 out.push('…');
11619 out
11620 }
11621}
11622
11623fn iso_week_key_for(d: chrono::NaiveDate) -> String {
11625 use chrono::Datelike;
11626 let iso = d.iso_week();
11627 format!("{:04}-W{:02}", iso.year(), iso.week())
11628}
11629
11630fn iso_week_bounds(
11633 key: &str,
11634) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
11635 let (year_str, week_str) = key
11636 .split_once("-W")
11637 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
11638 let year: i32 = year_str
11639 .parse()
11640 .map_err(|e| format!("bad year in '{key}': {e}"))?;
11641 let week: u32 = week_str
11642 .parse()
11643 .map_err(|e| format!("bad week in '{key}': {e}"))?;
11644 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
11645 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
11646 let next_monday = monday + chrono::Duration::days(7);
11647 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
11648 let end = next_monday
11649 .and_hms_opt(0, 0, 0)
11650 .expect("00:00 valid")
11651 .and_utc();
11652 Ok((start, end))
11653}
11654
11655fn cmd_registry(action: RegistryAction) {
11660 use crate::registry;
11661 let default_registry = || -> PathBuf {
11662 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
11663 PathBuf::from(home)
11664 .join(".vela")
11665 .join("registry")
11666 .join("entries.json")
11667 };
11668 match action {
11669 RegistryAction::DependsOn { vfr_id, from, json } => {
11670 let base = from.trim_end_matches('/');
11671 let url = format!("{base}/entries/{vfr_id}/depends-on");
11672 let client = reqwest::blocking::Client::builder()
11673 .timeout(std::time::Duration::from_secs(30))
11674 .build()
11675 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
11676 let resp = client
11677 .get(&url)
11678 .send()
11679 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
11680 if !resp.status().is_success() {
11681 fail(&format!("GET {url}: HTTP {}", resp.status()));
11682 }
11683 let body: serde_json::Value = resp
11684 .json()
11685 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
11686 if json {
11687 println!(
11688 "{}",
11689 serde_json::to_string_pretty(&body).expect("serialize")
11690 );
11691 } else {
11692 let dependents = body
11693 .get("dependents")
11694 .and_then(|v| v.as_array())
11695 .cloned()
11696 .unwrap_or_default();
11697 let count = dependents.len();
11698 println!(
11699 "{} {count} {} on {vfr_id}",
11700 style::ok("registry"),
11701 if count == 1 {
11702 "frontier depends"
11703 } else {
11704 "frontiers depend"
11705 },
11706 );
11707 for e in &dependents {
11708 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
11709 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
11710 let o = e
11711 .get("owner_actor_id")
11712 .and_then(|v| v.as_str())
11713 .unwrap_or("?");
11714 println!(" {v} {n} ({o})");
11715 }
11716 }
11717 }
11718 RegistryAction::Mirror {
11719 vfr_id,
11720 from,
11721 to,
11722 json,
11723 } => {
11724 let src_base = from.trim_end_matches('/');
11725 let dst_base = to.trim_end_matches('/');
11726 let src_url = format!("{src_base}/entries/{vfr_id}");
11727 let dst_url = format!("{dst_base}/entries");
11728 let client = reqwest::blocking::Client::builder()
11729 .timeout(std::time::Duration::from_secs(30))
11730 .build()
11731 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
11732
11733 let entry: serde_json::Value = client
11734 .get(&src_url)
11735 .send()
11736 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
11737 .error_for_status()
11738 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
11739 .json()
11740 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
11741
11742 let resp = client
11743 .post(&dst_url)
11744 .header("content-type", "application/json")
11745 .body(
11746 serde_json::to_vec(&entry)
11747 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
11748 )
11749 .send()
11750 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
11751 let status = resp.status();
11752 if !status.is_success() {
11753 let body = resp.text().unwrap_or_default();
11754 fail(&format!(
11755 "POST {dst_url}: HTTP {status}: {}",
11756 body.chars().take(300).collect::<String>()
11757 ));
11758 }
11759 let body: serde_json::Value = resp
11760 .json()
11761 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
11762 let duplicate = body
11763 .get("duplicate")
11764 .and_then(serde_json::Value::as_bool)
11765 .unwrap_or(false);
11766 let payload = json!({
11767 "ok": true,
11768 "command": "registry.mirror",
11769 "vfr_id": vfr_id,
11770 "from": src_base,
11771 "to": dst_base,
11772 "duplicate_on_destination": duplicate,
11773 "destination_response": body,
11774 });
11775 if json {
11776 println!(
11777 "{}",
11778 serde_json::to_string_pretty(&payload).expect("serialize")
11779 );
11780 } else {
11781 println!(
11782 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
11783 style::ok("registry"),
11784 if duplicate {
11785 " (duplicate; signature already known)"
11786 } else {
11787 " (fresh insert)"
11788 }
11789 );
11790 }
11791 }
11792 RegistryAction::List { from, json } => {
11793 let (label, registry_data) = match &from {
11796 Some(loc) if loc.starts_with("http") => (
11797 loc.clone(),
11798 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
11799 ),
11800 Some(loc) => {
11801 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
11802 (
11803 p.display().to_string(),
11804 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
11805 )
11806 }
11807 None => {
11808 let p = default_registry();
11809 (
11810 p.display().to_string(),
11811 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
11812 )
11813 }
11814 };
11815 let r = registry_data;
11816 let path_label = label;
11817 if json {
11818 let payload = json!({
11819 "ok": true,
11820 "command": "registry.list",
11821 "registry": path_label,
11822 "entry_count": r.entries.len(),
11823 "entries": r.entries,
11824 });
11825 println!(
11826 "{}",
11827 serde_json::to_string_pretty(&payload)
11828 .expect("failed to serialize registry.list")
11829 );
11830 } else {
11831 println!();
11832 println!(
11833 " {}",
11834 format!("VELA · REGISTRY · LIST · {}", path_label)
11835 .to_uppercase()
11836 .dimmed()
11837 );
11838 println!(" {}", style::tick_row(60));
11839 if r.entries.is_empty() {
11840 println!(" (registry is empty)");
11841 } else {
11842 for entry in &r.entries {
11843 println!(
11844 " {} {} ({}) by {} published {}",
11845 entry.vfr_id,
11846 entry.name,
11847 entry.network_locator,
11848 entry.owner_actor_id,
11849 entry.signed_publish_at
11850 );
11851 }
11852 }
11853 }
11854 }
11855 RegistryAction::Publish {
11856 frontier,
11857 owner,
11858 key,
11859 locator,
11860 to,
11861 json,
11862 } => {
11863 let frontier_data = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11865 let snapshot_hash = events::snapshot_hash(&frontier_data);
11866 let event_log_hash = events::event_log_hash(&frontier_data.events);
11867 let vfr_id = frontier_data.frontier_id();
11868 let name = frontier_data.project.name.clone();
11869
11870 let pubkey = frontier_data
11872 .actors
11873 .iter()
11874 .find(|actor| actor.id == owner)
11875 .map(|actor| actor.public_key.clone())
11876 .unwrap_or_else(|| {
11877 fail_return(&format!(
11878 "owner '{owner}' is not registered in the frontier; run `vela actor add` first"
11879 ))
11880 });
11881
11882 let key_hex = std::fs::read_to_string(&key)
11884 .map(|s| s.trim().to_string())
11885 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
11886 let signing_key = parse_signing_key(&key_hex);
11887
11888 let derived = hex::encode(signing_key.verifying_key().to_bytes());
11890 if derived != pubkey {
11891 fail(&format!(
11892 "private key does not match registered pubkey for owner '{owner}'"
11893 ));
11894 }
11895
11896 let to_is_remote = matches!(
11904 to.as_deref(),
11905 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
11906 );
11907 let resolved_locator = match locator {
11908 Some(l) => l,
11909 None => {
11910 if to_is_remote {
11911 let hub = to.as_deref().unwrap().trim_end_matches('/');
11912 let hub_root = hub.trim_end_matches("/entries");
11913 format!("{hub_root}/entries/{vfr_id}/snapshot")
11914 } else {
11915 fail_return(
11916 "--locator is required for local publishes; pass e.g. \
11917 --locator file:///path/to/frontier.json or an HTTPS URL.",
11918 )
11919 }
11920 }
11921 };
11922
11923 let mut entry = registry::RegistryEntry {
11924 schema: registry::ENTRY_SCHEMA.to_string(),
11925 vfr_id: vfr_id.clone(),
11926 name: name.clone(),
11927 owner_actor_id: owner.clone(),
11928 owner_pubkey: pubkey,
11929 latest_snapshot_hash: snapshot_hash,
11930 latest_event_log_hash: event_log_hash,
11931 network_locator: resolved_locator,
11932 signed_publish_at: chrono::Utc::now().to_rfc3339(),
11933 signature: String::new(),
11934 };
11935 entry.signature =
11936 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
11937
11938 let (registry_label, duplicate) = if to_is_remote {
11939 let hub_url = to.clone().unwrap();
11940 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
11944 .unwrap_or_else(|e| fail_return(&e));
11945 (hub_url, resp.duplicate)
11946 } else {
11947 let registry_path = match &to {
11948 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
11949 None => default_registry(),
11950 };
11951 registry::publish_entry(®istry_path, entry.clone())
11952 .unwrap_or_else(|e| fail_return(&e));
11953 (registry_path.display().to_string(), false)
11954 };
11955
11956 let payload = json!({
11957 "ok": true,
11958 "command": "registry.publish",
11959 "registry": registry_label,
11960 "vfr_id": vfr_id,
11961 "name": name,
11962 "owner": owner,
11963 "snapshot_hash": entry.latest_snapshot_hash,
11964 "event_log_hash": entry.latest_event_log_hash,
11965 "signed_publish_at": entry.signed_publish_at,
11966 "signature": entry.signature,
11967 "duplicate": duplicate,
11968 });
11969 if json {
11970 println!(
11971 "{}",
11972 serde_json::to_string_pretty(&payload)
11973 .expect("failed to serialize registry.publish")
11974 );
11975 } else {
11976 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
11977 println!(
11978 "{} published {vfr_id} → {}{}",
11979 style::ok("registry"),
11980 registry_label,
11981 dup_suffix
11982 );
11983 println!(" snapshot: {}", entry.latest_snapshot_hash);
11984 println!(" event_log: {}", entry.latest_event_log_hash);
11985 println!(" signature: {}…", &entry.signature[..16]);
11986 }
11987 }
11988 RegistryAction::Pull {
11989 vfr_id,
11990 from,
11991 out,
11992 transitive,
11993 depth,
11994 json,
11995 } => {
11996 let (registry_label, registry_data) = match &from {
12000 Some(loc) if loc.starts_with("http") => (
12001 loc.clone(),
12002 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12003 ),
12004 Some(loc) => {
12005 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12006 (
12007 p.display().to_string(),
12008 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12009 )
12010 }
12011 None => {
12012 let p = default_registry();
12013 (
12014 p.display().to_string(),
12015 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12016 )
12017 }
12018 };
12019 let entry = registry::find_latest(®istry_data, &vfr_id)
12020 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
12021
12022 if transitive {
12023 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
12027 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
12028
12029 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
12030 result
12031 .deps
12032 .iter()
12033 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
12034 .collect(),
12035 );
12036 let payload = json!({
12037 "ok": true,
12038 "command": "registry.pull",
12039 "registry": registry_label,
12040 "vfr_id": vfr_id,
12041 "transitive": true,
12042 "depth": depth,
12043 "out_dir": out.display().to_string(),
12044 "primary": result.primary_path.display().to_string(),
12045 "verified": result.verified,
12046 "deps": dep_paths_json,
12047 });
12048 if json {
12049 println!(
12050 "{}",
12051 serde_json::to_string_pretty(&payload)
12052 .expect("failed to serialize registry.pull")
12053 );
12054 } else {
12055 println!(
12056 "{} pulled {vfr_id} (transitive) → {}",
12057 style::ok("registry"),
12058 out.display()
12059 );
12060 println!(" verified {} frontier(s):", result.verified.len());
12061 for v in &result.verified {
12062 println!(" · {v}");
12063 }
12064 println!(" every cross-frontier dependency's pinned snapshot hash matched");
12065 }
12066 return;
12067 }
12068
12069 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
12072 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
12073 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
12074 let _ = std::fs::remove_file(&out);
12075 fail_return(&format!("pull verification failed: {e}"))
12076 });
12077
12078 let payload = json!({
12079 "ok": true,
12080 "command": "registry.pull",
12081 "registry": registry_label,
12082 "vfr_id": vfr_id,
12083 "out": out.display().to_string(),
12084 "snapshot_hash": entry.latest_snapshot_hash,
12085 "event_log_hash": entry.latest_event_log_hash,
12086 "verified": true,
12087 });
12088 if json {
12089 println!(
12090 "{}",
12091 serde_json::to_string_pretty(&payload)
12092 .expect("failed to serialize registry.pull")
12093 );
12094 } else {
12095 println!(
12096 "{} pulled {vfr_id} → {}",
12097 style::ok("registry"),
12098 out.display()
12099 );
12100 println!(" verified snapshot+event_log hashes match registry; signature ok");
12101 }
12102 }
12103 }
12104}
12105
12106fn print_stats_json(path: &Path) {
12107 let frontier = load_frontier_or_fail(path);
12108 let source_hash = hash_path_or_fail(path);
12109 let payload = json!({
12110 "ok": true,
12111 "command": "stats",
12112 "schema_version": project::VELA_SCHEMA_VERSION,
12113 "frontier": {
12114 "name": &frontier.project.name,
12115 "description": &frontier.project.description,
12116 "source": path.display().to_string(),
12117 "hash": format!("sha256:{source_hash}"),
12118 "compiled_at": &frontier.project.compiled_at,
12119 "compiler": &frontier.project.compiler,
12120 "papers_processed": frontier.project.papers_processed,
12121 "errors": frontier.project.errors,
12122 },
12123 "stats": frontier.stats,
12124 "proposals": proposals::summary(&frontier),
12125 "proof_state": frontier.proof_state,
12126 });
12127 println!(
12128 "{}",
12129 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
12130 );
12131}
12132
12133fn cmd_search(
12134 source: Option<&Path>,
12135 query: &str,
12136 entity: Option<&str>,
12137 assertion_type: Option<&str>,
12138 all: Option<&Path>,
12139 limit: usize,
12140 json_output: bool,
12141) {
12142 if let Some(dir) = all {
12143 search::run_all(dir, query, entity, assertion_type, limit);
12144 return;
12145 }
12146 let Some(src) = source else {
12147 fail("Provide --source <frontier> or --all <directory>.");
12148 };
12149 if json_output {
12150 let results = search::search(src, query, entity, assertion_type, limit);
12151 let loaded = load_frontier_or_fail(src);
12152 let source_hash = hash_path_or_fail(src);
12153 let payload = json!({
12154 "ok": true,
12155 "command": "search",
12156 "schema_version": project::VELA_SCHEMA_VERSION,
12157 "query": query,
12158 "frontier": {
12159 "name": &loaded.project.name,
12160 "source": src.display().to_string(),
12161 "hash": format!("sha256:{source_hash}"),
12162 },
12163 "filters": {
12164 "entity": entity,
12165 "assertion_type": assertion_type,
12166 "limit": limit,
12167 },
12168 "count": results.len(),
12169 "results": results.iter().map(|result| json!({
12170 "id": &result.id,
12171 "score": result.score,
12172 "assertion": &result.assertion,
12173 "assertion_type": &result.assertion_type,
12174 "confidence": result.confidence,
12175 "entities": &result.entities,
12176 "doi": &result.doi,
12177 })).collect::<Vec<_>>()
12178 });
12179 println!(
12180 "{}",
12181 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
12182 );
12183 } else {
12184 search::run(src, query, entity, assertion_type, limit);
12185 }
12186}
12187
12188fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
12189 let frontier = load_frontier_or_fail(source);
12190 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
12191 if json_output {
12192 let source_hash = hash_path_or_fail(source);
12193 let payload = json!({
12194 "ok": true,
12195 "command": "tensions",
12196 "schema_version": project::VELA_SCHEMA_VERSION,
12197 "frontier": {
12198 "name": &frontier.project.name,
12199 "source": source.display().to_string(),
12200 "hash": format!("sha256:{source_hash}"),
12201 },
12202 "filters": {
12203 "both_high": both_high,
12204 "cross_domain": cross_domain,
12205 "top": top,
12206 },
12207 "count": result.len(),
12208 "tensions": result.iter().map(|t| json!({
12209 "score": t.score,
12210 "resolved": t.resolved,
12211 "superseding_id": &t.superseding_id,
12212 "finding_a": {
12213 "id": &t.finding_a.id,
12214 "assertion": &t.finding_a.assertion,
12215 "confidence": t.finding_a.confidence,
12216 "assertion_type": &t.finding_a.assertion_type,
12217 "citation_count": t.finding_a.citation_count,
12218 "contradicts_count": t.finding_a.contradicts_count,
12219 },
12220 "finding_b": {
12221 "id": &t.finding_b.id,
12222 "assertion": &t.finding_b.assertion,
12223 "confidence": t.finding_b.confidence,
12224 "assertion_type": &t.finding_b.assertion_type,
12225 "citation_count": t.finding_b.citation_count,
12226 "contradicts_count": t.finding_b.contradicts_count,
12227 }
12228 })).collect::<Vec<_>>()
12229 });
12230 println!(
12231 "{}",
12232 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
12233 );
12234 } else {
12235 tensions::print_tensions(&result);
12236 }
12237}
12238
12239fn cmd_gaps(action: GapsAction) {
12240 match action {
12241 GapsAction::Rank {
12242 frontier,
12243 top,
12244 domain,
12245 json,
12246 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
12247 }
12248}
12249
12250fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
12251 let frontier = load_frontier_or_fail(frontier_path);
12252 let mut ranked = frontier
12253 .findings
12254 .iter()
12255 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
12256 .filter(|finding| {
12257 domain.is_none_or(|domain| {
12258 finding
12259 .assertion
12260 .text
12261 .to_lowercase()
12262 .contains(&domain.to_lowercase())
12263 || finding
12264 .assertion
12265 .entities
12266 .iter()
12267 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
12268 })
12269 })
12270 .map(|finding| {
12271 let dependency_count = frontier
12272 .findings
12273 .iter()
12274 .flat_map(|candidate| candidate.links.iter())
12275 .filter(|link| link.target == finding.id)
12276 .count();
12277 let score = dependency_count as f64 + finding.confidence.score;
12278 json!({
12279 "id": &finding.id,
12280 "kind": "candidate_gap_review_lead",
12281 "assertion": &finding.assertion.text,
12282 "score": score,
12283 "dependency_count": dependency_count,
12284 "confidence": finding.confidence.score,
12285 "evidence_type": &finding.evidence.evidence_type,
12286 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
12287 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
12288 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
12289 })
12290 })
12291 .collect::<Vec<_>>();
12292 ranked.sort_by(|a, b| {
12293 b.get("score")
12294 .and_then(Value::as_f64)
12295 .partial_cmp(&a.get("score").and_then(Value::as_f64))
12296 .unwrap_or(std::cmp::Ordering::Equal)
12297 });
12298 ranked.truncate(top);
12299 if json_output {
12300 let source_hash = hash_path_or_fail(frontier_path);
12301 let payload = json!({
12302 "ok": true,
12303 "command": "gaps rank",
12304 "schema_version": project::VELA_SCHEMA_VERSION,
12305 "frontier": {
12306 "name": &frontier.project.name,
12307 "source": frontier_path.display().to_string(),
12308 "hash": format!("sha256:{source_hash}"),
12309 },
12310 "filters": {
12311 "top": top,
12312 "domain": domain,
12313 },
12314 "count": ranked.len(),
12315 "ranking_label": "candidate gap review leads",
12316 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
12317 "review_leads": ranked.clone(),
12318 "gaps": ranked,
12319 });
12320 println!(
12321 "{}",
12322 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
12323 );
12324 } else {
12325 println!();
12326 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
12327 println!(" {}", style::tick_row(60));
12328 println!(" review source scope; these are not guaranteed experiment targets.");
12329 println!();
12330 for (idx, gap) in ranked.iter().enumerate() {
12331 println!(
12332 " {}. [{}] score={} {}",
12333 idx + 1,
12334 gap["id"].as_str().unwrap_or("?"),
12335 gap["score"].as_f64().unwrap_or(0.0),
12336 gap["assertion"].as_str().unwrap_or("")
12337 );
12338 }
12339 }
12340}
12341
12342async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
12343 if inputs.len() < 2 {
12344 fail("need at least 2 frontier files for bridge detection.");
12345 }
12346 println!();
12347 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
12348 println!(" {}", style::tick_row(60));
12349 println!(" loading {} frontiers...", inputs.len());
12350 let mut named_projects = Vec::<(String, project::Project)>::new();
12351 let mut total_findings = 0;
12352 for path in inputs {
12353 let frontier = load_frontier_or_fail(path);
12354 let name = path
12355 .file_stem()
12356 .unwrap_or_default()
12357 .to_string_lossy()
12358 .to_string();
12359 println!(" {} · {} findings", name, frontier.stats.findings);
12360 total_findings += frontier.stats.findings;
12361 named_projects.push((name, frontier));
12362 }
12363 let refs = named_projects
12364 .iter()
12365 .map(|(name, frontier)| (name.as_str(), frontier))
12366 .collect::<Vec<_>>();
12367 let mut bridges = bridge::detect_bridges(&refs);
12368 if check_novelty && !bridges.is_empty() {
12369 let client = Client::new();
12370 let check_count = bridges.len().min(top_n);
12371 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
12372 for bridge_item in bridges.iter_mut().take(check_count) {
12373 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
12374 match bridge::check_novelty(&client, &query).await {
12375 Ok(count) => bridge_item.pubmed_count = Some(count),
12376 Err(e) => eprintln!(
12377 " {} prior-art check failed for {}: {e}",
12378 style::err_prefix(),
12379 bridge_item.entity_name
12380 ),
12381 }
12382 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
12383 }
12384 }
12385 print!("{}", bridge::format_report(&bridges, total_findings));
12386}
12387
12388struct BenchArgs {
12389 frontier: Option<PathBuf>,
12390 gold: Option<PathBuf>,
12391 entity_gold: Option<PathBuf>,
12392 link_gold: Option<PathBuf>,
12393 suite: Option<PathBuf>,
12394 suite_ready: bool,
12395 min_f1: Option<f64>,
12396 min_precision: Option<f64>,
12397 min_recall: Option<f64>,
12398 no_thresholds: bool,
12399 json: bool,
12400}
12401
12402fn cmd_agent_bench(
12407 gold: &Path,
12408 candidate: &Path,
12409 sources: Option<&Path>,
12410 threshold: Option<f64>,
12411 report_path: Option<&Path>,
12412 json_out: bool,
12413) {
12414 let input = crate::agent_bench::BenchInput {
12415 gold_path: gold.to_path_buf(),
12416 candidate_path: candidate.to_path_buf(),
12417 sources: sources.map(Path::to_path_buf),
12418 threshold: threshold.unwrap_or(0.0),
12419 };
12420 let report = match crate::agent_bench::run(input) {
12421 Ok(r) => r,
12422 Err(e) => {
12423 eprintln!("{} bench failed: {e}", style::err_prefix());
12424 std::process::exit(1);
12425 }
12426 };
12427
12428 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
12429 if let Some(path) = report_path
12430 && let Err(e) = std::fs::write(path, &json)
12431 {
12432 eprintln!(
12433 "{} failed to write report to {}: {e}",
12434 style::err_prefix(),
12435 path.display()
12436 );
12437 }
12438
12439 if json_out {
12440 println!("{json}");
12441 } else {
12442 println!();
12443 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
12444 println!(" {}", style::tick_row(60));
12445 print!("{}", crate::agent_bench::render_pretty(&report));
12446 println!();
12447 }
12448
12449 if !report.pass {
12450 std::process::exit(1);
12451 }
12452}
12453
12454fn cmd_bench(args: BenchArgs) {
12455 if args.suite_ready {
12456 let suite_path = args
12457 .suite
12458 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
12459 let payload =
12460 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
12461 println!(
12462 "{}",
12463 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
12464 );
12465 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12466 std::process::exit(1);
12467 }
12468 return;
12469 }
12470 if let Some(suite_path) = args.suite {
12471 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
12472 if args.json {
12473 println!(
12474 "{}",
12475 serde_json::to_string_pretty(&payload)
12476 .expect("failed to serialize benchmark suite")
12477 );
12478 } else {
12479 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12480 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
12481 println!();
12482 println!(" {}", "VELA · BENCH · SUITE".dimmed());
12483 println!(" {}", style::tick_row(60));
12484 println!(" suite: {}", suite_path.display());
12485 println!(
12486 " status: {}",
12487 if ok {
12488 style::ok("pass")
12489 } else {
12490 style::lost("fail")
12491 }
12492 );
12493 println!(
12494 " tasks: {}/{} passed",
12495 metrics
12496 .get("tasks_passed")
12497 .and_then(Value::as_u64)
12498 .unwrap_or(0),
12499 metrics
12500 .get("tasks_total")
12501 .and_then(Value::as_u64)
12502 .unwrap_or(0)
12503 );
12504 }
12505 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12506 std::process::exit(1);
12507 }
12508 return;
12509 }
12510
12511 let frontier = args
12512 .frontier
12513 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
12514 let thresholds = benchmark::BenchmarkThresholds {
12515 min_f1: if args.no_thresholds {
12516 None
12517 } else {
12518 args.min_f1.or(Some(0.05))
12519 },
12520 min_precision: if args.no_thresholds {
12521 None
12522 } else {
12523 args.min_precision
12524 },
12525 min_recall: if args.no_thresholds {
12526 None
12527 } else {
12528 args.min_recall
12529 },
12530 ..Default::default()
12531 };
12532 if let Some(path) = args.link_gold {
12533 print_benchmark_or_exit(benchmark::task_envelope(
12534 &frontier,
12535 None,
12536 benchmark::BenchmarkMode::Link,
12537 Some(&path),
12538 &thresholds,
12539 None,
12540 ));
12541 } else if let Some(path) = args.entity_gold {
12542 print_benchmark_or_exit(benchmark::task_envelope(
12543 &frontier,
12544 None,
12545 benchmark::BenchmarkMode::Entity,
12546 Some(&path),
12547 &thresholds,
12548 None,
12549 ));
12550 } else if let Some(path) = args.gold {
12551 if args.json {
12552 print_benchmark_or_exit(benchmark::task_envelope(
12553 &frontier,
12554 None,
12555 benchmark::BenchmarkMode::Finding,
12556 Some(&path),
12557 &thresholds,
12558 None,
12559 ));
12560 } else {
12561 benchmark::run(&frontier, &path, false);
12562 }
12563 } else {
12564 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
12565 }
12566}
12567
12568fn print_benchmark_or_exit(result: Result<Value, String>) {
12569 let payload = result.unwrap_or_else(|e| fail_return(&e));
12570 println!(
12571 "{}",
12572 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
12573 );
12574 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12575 std::process::exit(1);
12576 }
12577}
12578
12579fn cmd_packet(action: PacketAction) {
12580 let (result, json_output) = match action {
12581 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
12582 PacketAction::Validate { path, json } => (packet::validate(&path), json),
12583 };
12584 match result {
12585 Ok(output) if json_output => {
12586 println!(
12587 "{}",
12588 serde_json::to_string_pretty(&json!({
12589 "ok": true,
12590 "command": "packet",
12591 "result": output,
12592 }))
12593 .expect("failed to serialize packet response")
12594 );
12595 }
12596 Ok(output) => println!("{output}"),
12597 Err(e) => fail(&e),
12598 }
12599}
12600
12601fn cmd_verify(path: &Path, json_output: bool) {
12606 let result = packet::validate(path);
12607 match result {
12608 Ok(output) if json_output => {
12609 println!(
12610 "{}",
12611 serde_json::to_string_pretty(&json!({
12612 "ok": true,
12613 "command": "verify",
12614 "result": output,
12615 }))
12616 .expect("failed to serialize verify response")
12617 );
12618 }
12619 Ok(output) => {
12620 println!("{output}");
12621 println!(
12622 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
12623 );
12624 }
12625 Err(e) => fail(&e),
12626 }
12627}
12628
12629fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
12630 if path.join(".vela").exists() {
12631 fail(&format!(
12632 "already initialized: {} exists",
12633 path.join(".vela").display()
12634 ));
12635 }
12636 let payload = frontier_repo::initialize(
12637 path,
12638 frontier_repo::InitOptions {
12639 name,
12640 template,
12641 initialize_git,
12642 },
12643 )
12644 .unwrap_or_else(|e| fail_return(&e));
12645 if json_output {
12646 println!(
12647 "{}",
12648 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
12649 );
12650 } else {
12651 println!(
12652 "{} initialized frontier repository in {}",
12653 style::ok("ok"),
12654 path.display()
12655 );
12656 }
12657}
12658
12659fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
12660 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
12661 let target = into
12662 .map(Path::to_path_buf)
12663 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
12664 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
12665 println!(
12666 "{} {} findings · {}",
12667 style::ok("imported"),
12668 frontier.findings.len(),
12669 target.display()
12670 );
12671}
12672
12673fn cmd_locator_repair(
12674 path: &Path,
12675 atom_id: &str,
12676 locator_override: Option<&str>,
12677 reviewer: &str,
12678 reason: &str,
12679 apply: bool,
12680 json_output: bool,
12681) {
12682 let report = state::repair_evidence_atom_locator(
12683 path,
12684 atom_id,
12685 locator_override,
12686 reviewer,
12687 reason,
12688 apply,
12689 )
12690 .unwrap_or_else(|e| fail_return(&e));
12691 print_state_report(&report, json_output);
12692}
12693
12694async fn cmd_source_fetch(
12699 identifier: &str,
12700 cache_root: Option<&Path>,
12701 out_path: Option<&Path>,
12702 refresh: bool,
12703 _json_output: bool,
12704) {
12705 use sha2::{Digest, Sha256};
12706
12707 let normalized = normalize_source_identifier(identifier);
12708 let cache_path = cache_root.map(|root| {
12709 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
12710 root.join("sources")
12711 .join("cache")
12712 .join(format!("{hash}.json"))
12713 });
12714
12715 if !refresh
12716 && let Some(p) = cache_path.as_ref()
12717 && p.is_file()
12718 {
12719 let body = std::fs::read_to_string(p)
12720 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
12721 emit_source_fetch_result(&body, out_path);
12722 return;
12723 }
12724
12725 let result = fetch_source_metadata(&normalized).await;
12726 let json = match result {
12727 Ok(value) => serde_json::to_string_pretty(&value)
12728 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
12729 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
12730 };
12731
12732 if let Some(p) = cache_path.as_ref() {
12733 if let Some(parent) = p.parent() {
12734 std::fs::create_dir_all(parent)
12735 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
12736 }
12737 std::fs::write(p, &json)
12738 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
12739 }
12740 emit_source_fetch_result(&json, out_path);
12741}
12742
12743fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
12744 if let Some(p) = out_path {
12745 if let Some(parent) = p.parent() {
12746 let _ = std::fs::create_dir_all(parent);
12747 }
12748 std::fs::write(p, body)
12749 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
12750 } else {
12751 println!("{body}");
12752 }
12753}
12754
12755fn normalize_source_identifier(raw: &str) -> String {
12756 let trimmed = raw.trim();
12757 if trimmed.starts_with("doi:")
12758 || trimmed.starts_with("pmid:")
12759 || trimmed.starts_with("nct:")
12760 || trimmed.starts_with("pmc:")
12761 {
12762 return trimmed.to_string();
12763 }
12764 if trimmed.starts_with("10.") {
12765 return format!("doi:{trimmed}");
12766 }
12767 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
12768 return format!(
12769 "nct:{}",
12770 trimmed
12771 .to_uppercase()
12772 .trim_start_matches("NCT")
12773 .to_string()
12774 .split_at(0)
12775 .0
12776 );
12777 }
12778 if trimmed.chars().all(|c| c.is_ascii_digit()) {
12779 return format!("pmid:{trimmed}");
12780 }
12781 trimmed.to_string()
12782}
12783
12784async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
12785 let client = Client::builder()
12786 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
12787 .timeout(std::time::Duration::from_secs(30))
12788 .build()
12789 .map_err(|e| format!("client build: {e}"))?;
12790 if let Some(rest) = normalized.strip_prefix("doi:") {
12791 let mut record = fetch_via_crossref(&client, rest).await?;
12798 let crossref_abstract = record
12799 .get("abstract")
12800 .and_then(|v| v.as_str())
12801 .unwrap_or("");
12802 if crossref_abstract.is_empty()
12803 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
12804 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
12805 {
12806 let pubmed_abstract = pubmed_record
12807 .get("abstract")
12808 .and_then(|v| v.as_str())
12809 .unwrap_or("")
12810 .to_string();
12811 if !pubmed_abstract.is_empty()
12812 && let Some(obj) = record.as_object_mut()
12813 {
12814 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
12815 obj.insert(
12816 "abstract_source".to_string(),
12817 Value::String(format!("pubmed:{pmid}")),
12818 );
12819 }
12820 }
12821 return Ok(record);
12822 }
12823 if let Some(rest) = normalized.strip_prefix("pmid:") {
12824 return fetch_via_pubmed(&client, rest).await;
12825 }
12826 if let Some(rest) = normalized.strip_prefix("nct:") {
12827 return fetch_via_ctgov(&client, rest).await;
12828 }
12829 Err(format!(
12830 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
12831 ))
12832}
12833
12834async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
12838 let url = format!(
12839 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
12840 urlencoding::encode(doi)
12841 );
12842 let resp = client.get(&url).send().await.ok()?;
12843 if !resp.status().is_success() {
12844 return None;
12845 }
12846 let body: Value = resp.json().await.ok()?;
12847 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
12848 if id_list.len() != 1 {
12849 return None;
12852 }
12853 id_list.first()?.as_str().map(|s| s.to_string())
12854}
12855
12856async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
12857 let url = format!("https://api.crossref.org/works/{doi}");
12858 let resp = client
12859 .get(&url)
12860 .send()
12861 .await
12862 .map_err(|e| format!("crossref get: {e}"))?;
12863 if !resp.status().is_success() {
12864 return Err(format!("crossref returned {}", resp.status()));
12865 }
12866 let body: Value = resp
12867 .json()
12868 .await
12869 .map_err(|e| format!("crossref json: {e}"))?;
12870 let work = body.get("message").cloned().unwrap_or(Value::Null);
12871 let title = work
12872 .get("title")
12873 .and_then(|v| v.as_array())
12874 .and_then(|a| a.first())
12875 .and_then(|v| v.as_str())
12876 .unwrap_or("")
12877 .to_string();
12878 let abstract_html = work
12879 .get("abstract")
12880 .and_then(|v| v.as_str())
12881 .unwrap_or("")
12882 .to_string();
12883 let abstract_text = strip_jats_tags(&abstract_html);
12884 let year = work
12885 .get("issued")
12886 .and_then(|v| v.get("date-parts"))
12887 .and_then(|v| v.as_array())
12888 .and_then(|a| a.first())
12889 .and_then(|v| v.as_array())
12890 .and_then(|a| a.first())
12891 .and_then(|v| v.as_i64());
12892 let journal = work
12893 .get("container-title")
12894 .and_then(|v| v.as_array())
12895 .and_then(|a| a.first())
12896 .and_then(|v| v.as_str())
12897 .unwrap_or("")
12898 .to_string();
12899 let authors = work
12900 .get("author")
12901 .and_then(|v| v.as_array())
12902 .map(|arr| {
12903 arr.iter()
12904 .filter_map(|a| {
12905 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
12906 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
12907 let combined = format!("{given} {family}").trim().to_string();
12908 if combined.is_empty() {
12909 None
12910 } else {
12911 Some(combined)
12912 }
12913 })
12914 .collect::<Vec<_>>()
12915 })
12916 .unwrap_or_default();
12917 Ok(json!({
12918 "schema": "vela.source_fetch.v0.1",
12919 "identifier": format!("doi:{doi}"),
12920 "source": "crossref",
12921 "title": title,
12922 "abstract": abstract_text,
12923 "year": year,
12924 "journal": journal,
12925 "authors": authors,
12926 "retrieved_at": chrono::Utc::now().to_rfc3339(),
12927 }))
12928}
12929
12930async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
12931 let url = format!(
12932 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
12933 );
12934 let resp = client
12935 .get(&url)
12936 .send()
12937 .await
12938 .map_err(|e| format!("pubmed get: {e}"))?;
12939 if !resp.status().is_success() {
12940 return Err(format!("pubmed returned {}", resp.status()));
12941 }
12942 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
12943 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
12944 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
12945 let year = extract_xml_text(&xml, "<Year>", "</Year>")
12946 .parse::<i64>()
12947 .ok();
12948 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
12949 Ok(json!({
12950 "schema": "vela.source_fetch.v0.1",
12951 "identifier": format!("pmid:{pmid}"),
12952 "source": "pubmed",
12953 "title": title,
12954 "abstract": abstract_text,
12955 "year": year,
12956 "journal": journal,
12957 "authors": Vec::<String>::new(),
12958 "retrieved_at": chrono::Utc::now().to_rfc3339(),
12959 }))
12960}
12961
12962async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
12963 let nct_clean = nct.trim();
12964 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
12965 nct_clean.to_uppercase()
12966 } else {
12967 format!("NCT{nct_clean}")
12968 };
12969 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
12970 let resp = client
12971 .get(&url)
12972 .send()
12973 .await
12974 .map_err(|e| format!("ctgov get: {e}"))?;
12975 if !resp.status().is_success() {
12976 return Err(format!("ctgov returned {}", resp.status()));
12977 }
12978 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
12979 let title = body
12980 .pointer("/protocolSection/identificationModule/briefTitle")
12981 .and_then(|v| v.as_str())
12982 .unwrap_or("")
12983 .to_string();
12984 let abstract_text = body
12985 .pointer("/protocolSection/descriptionModule/briefSummary")
12986 .and_then(|v| v.as_str())
12987 .unwrap_or("")
12988 .to_string();
12989 let phase = body
12990 .pointer("/protocolSection/designModule/phases")
12991 .and_then(|v| v.as_array())
12992 .and_then(|a| a.first())
12993 .and_then(|v| v.as_str())
12994 .unwrap_or("")
12995 .to_string();
12996 Ok(json!({
12997 "schema": "vela.source_fetch.v0.1",
12998 "identifier": format!("nct:{nct_id}"),
12999 "source": "clinicaltrials.gov",
13000 "title": title,
13001 "abstract": abstract_text,
13002 "year": Value::Null,
13003 "journal": phase,
13004 "authors": Vec::<String>::new(),
13005 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13006 }))
13007}
13008
13009fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
13010 if let Some(start) = xml.find(open) {
13011 let after = &xml[start + open.len()..];
13012 if let Some(end) = after.find(close) {
13013 return after[..end].trim().to_string();
13014 }
13015 }
13016 String::new()
13017}
13018
13019fn strip_jats_tags(html: &str) -> String {
13020 let mut out = String::with_capacity(html.len());
13021 let mut in_tag = false;
13022 for c in html.chars() {
13023 match c {
13024 '<' => in_tag = true,
13025 '>' => in_tag = false,
13026 _ if !in_tag => out.push(c),
13027 _ => {}
13028 }
13029 }
13030 out.split_whitespace().collect::<Vec<_>>().join(" ")
13031}
13032
13033fn cmd_span_repair(
13034 path: &Path,
13035 finding_id: &str,
13036 section: &str,
13037 text: &str,
13038 reviewer: &str,
13039 reason: &str,
13040 apply: bool,
13041 json_output: bool,
13042) {
13043 let report =
13044 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
13045 .unwrap_or_else(|e| fail_return(&e));
13046 print_state_report(&report, json_output);
13047}
13048
13049#[allow(clippy::too_many_arguments)]
13050fn cmd_entity_resolve(
13051 path: &Path,
13052 finding_id: &str,
13053 entity_name: &str,
13054 source: &str,
13055 id: &str,
13056 confidence: f64,
13057 matched_name: Option<&str>,
13058 resolution_method: &str,
13059 reviewer: &str,
13060 reason: &str,
13061 apply: bool,
13062 json_output: bool,
13063) {
13064 let report = state::resolve_finding_entity(
13065 path,
13066 finding_id,
13067 entity_name,
13068 source,
13069 id,
13070 confidence,
13071 matched_name,
13072 resolution_method,
13073 reviewer,
13074 reason,
13075 apply,
13076 )
13077 .unwrap_or_else(|e| fail_return(&e));
13078 print_state_report(&report, json_output);
13079}
13080
13081fn cmd_propagate(
13082 path: &Path,
13083 retract: Option<String>,
13084 reduce_confidence: Option<String>,
13085 to: Option<f64>,
13086 output: Option<&Path>,
13087) {
13088 let mut frontier = load_frontier_or_fail(path);
13089 let (finding_id, action, label) = if let Some(id) = retract {
13090 (id, propagate::PropagationAction::Retracted, "retraction")
13091 } else if let Some(id) = reduce_confidence {
13092 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
13093 if !(0.0..=1.0).contains(&score) {
13094 fail("--to must be between 0.0 and 1.0");
13095 }
13096 (
13097 id,
13098 propagate::PropagationAction::ConfidenceReduced { new_score: score },
13099 "confidence reduction",
13100 )
13101 } else {
13102 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
13103 };
13104 if !frontier.findings.iter().any(|f| f.id == finding_id) {
13105 fail(&format!("finding not found: {finding_id}"));
13106 }
13107 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
13108 frontier.review_events.extend(result.events.clone());
13113 project::recompute_stats(&mut frontier);
13114 propagate::print_result(&result, label, &finding_id);
13115 let out = output.unwrap_or(path);
13116 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
13117 println!(" output: {}", out.display());
13118}
13119
13120fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
13121 let source_desc = source
13122 .map(|p| p.display().to_string())
13123 .or_else(|| frontiers.map(|p| p.display().to_string()))
13124 .unwrap_or_else(|| "frontier.json".to_string());
13125 let args = if let Some(path) = source {
13126 format!(r#""serve", "{}""#, path.display())
13127 } else if let Some(path) = frontiers {
13128 format!(r#""serve", "--frontiers", "{}""#, path.display())
13129 } else {
13130 r#""serve", "frontier.json""#.to_string()
13131 };
13132 println!(
13133 r#"Add this MCP server configuration to your client:
13134
13135{{
13136 "mcpServers": {{
13137 "vela": {{
13138 "command": "vela",
13139 "args": [{args}]
13140 }}
13141 }}
13142}}
13143
13144Source: {source_desc}"#
13145 );
13146}
13147
13148fn parse_entities(input: &str) -> Vec<(String, String)> {
13149 if input.trim().is_empty() {
13150 return Vec::new();
13151 }
13152 input
13153 .split(',')
13154 .filter_map(|pair| {
13155 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
13156 if parts.len() == 2 {
13157 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
13158 } else {
13159 eprintln!(
13160 "{} skipping malformed entity '{}'",
13161 style::warn("warn"),
13162 pair.trim()
13163 );
13164 None
13165 }
13166 })
13167 .collect()
13168}
13169
13170fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
13171 inputs
13172 .iter()
13173 .filter_map(|input| {
13174 let trimmed = input.trim();
13175 if trimmed.is_empty() {
13176 return None;
13177 }
13178 if trimmed.starts_with('{') {
13179 match serde_json::from_str::<Value>(trimmed) {
13180 Ok(value @ Value::Object(_)) => return Some(value),
13181 Ok(_) | Err(_) => {
13182 eprintln!(
13183 "{} evidence span JSON should be an object; storing as text",
13184 style::warn("warn")
13185 );
13186 }
13187 }
13188 }
13189 Some(json!({
13190 "section": "curator_source",
13191 "text": trimmed,
13192 }))
13193 })
13194 .collect()
13195}
13196
13197fn hash_path(path: &Path) -> Result<String, String> {
13198 let mut hasher = Sha256::new();
13199 if path.is_file() {
13200 let bytes = std::fs::read(path)
13201 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
13202 hasher.update(&bytes);
13203 } else if path.is_dir() {
13204 let mut files = Vec::new();
13205 collect_hash_files(path, path, &mut files)?;
13206 files.sort();
13207 for rel in files {
13208 hasher.update(rel.to_string_lossy().as_bytes());
13209 let bytes = std::fs::read(path.join(&rel))
13210 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
13211 hasher.update(bytes);
13212 }
13213 } else {
13214 return Err(format!("Cannot hash missing path {}", path.display()));
13215 }
13216 Ok(format!("{:x}", hasher.finalize()))
13217}
13218
13219fn load_frontier_or_fail(path: &Path) -> project::Project {
13220 repo::load_from_path(path).unwrap_or_else(|e| {
13221 fail_return(&format!(
13222 "Failed to load frontier '{}': {e}",
13223 path.display()
13224 ))
13225 })
13226}
13227
13228fn hash_path_or_fail(path: &Path) -> String {
13229 hash_path(path).unwrap_or_else(|e| {
13230 fail_return(&format!(
13231 "Failed to hash frontier '{}': {e}",
13232 path.display()
13233 ))
13234 })
13235}
13236
13237fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
13238 for entry in
13239 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
13240 {
13241 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
13242 let path = entry.path();
13243 if path.is_dir() {
13244 collect_hash_files(root, &path, files)?;
13245 } else if path.is_file() {
13246 files.push(
13247 path.strip_prefix(root)
13248 .map_err(|e| e.to_string())?
13249 .to_path_buf(),
13250 );
13251 }
13252 }
13253 Ok(())
13254}
13255
13256fn schema_error_suggestion(error: &str) -> &'static str {
13257 if schema_error_action(error).is_some() {
13258 "Run `vela normalize` to repair deterministic frontier state."
13259 } else {
13260 "Inspect and correct the referenced frontier field."
13261 }
13262}
13263
13264fn schema_error_fix(error: &str) -> bool {
13265 schema_error_action(error).is_some()
13266}
13267
13268fn schema_error_action(error: &str) -> Option<&'static str> {
13269 if error.contains("stats.findings")
13270 || error.contains("stats.links")
13271 || error.contains("Invalid compiler")
13272 || error.contains("Invalid vela_version")
13273 || error.contains("Invalid schema")
13274 {
13275 Some("normalize_metadata_and_stats")
13276 } else if error.contains("does not match content-address") {
13277 Some("rewrite_ids")
13278 } else {
13279 None
13280 }
13281}
13282
13283fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
13284 let mut actions = std::collections::BTreeMap::<String, usize>::new();
13285 for diagnostic in diagnostics {
13286 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
13287 *actions.entry(action.to_string()).or_default() += 1;
13288 }
13289 }
13290 actions
13291 .into_iter()
13292 .map(|(action, count)| {
13293 let command = if action == "rewrite_ids" {
13294 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
13295 } else {
13296 "vela normalize <frontier> --write"
13297 };
13298 json!({
13299 "action": action,
13300 "count": count,
13301 "command": command,
13302 })
13303 })
13304 .collect()
13305}
13306
13307fn cmd_integrity(frontier: &Path, json: bool) {
13308 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
13309 if json {
13310 println!(
13311 "{}",
13312 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
13313 );
13314 } else {
13315 println!("vela integrity");
13316 println!(" frontier: {}", frontier.display());
13317 println!(" status: {}", report.status);
13318 println!(" proof freshness: {}", report.proof_freshness);
13319 println!(" structural errors: {}", report.structural_errors.len());
13320 for error in report.structural_errors.iter().take(8) {
13321 println!(" - {}: {}", error.rule_id, error.message);
13322 }
13323 }
13324}
13325
13326fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
13327 let report =
13328 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
13329 if json {
13330 println!(
13331 "{}",
13332 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
13333 );
13334 } else {
13335 println!("vela impact");
13336 println!(" finding: {}", report.target.id);
13337 println!(" frontier: {}", report.frontier.vfr_id);
13338 println!(" direct dependents: {}", report.summary.direct_dependents);
13339 println!(" downstream: {}", report.summary.total_downstream);
13340 println!(" open proposals: {}", report.summary.open_proposals);
13341 println!(" accepted events: {}", report.summary.accepted_events);
13342 println!(" proof: {}", report.summary.proof_status);
13343 }
13344}
13345
13346fn empty_signal_report() -> signals::SignalReport {
13347 signals::SignalReport {
13348 schema: "vela.signals.v0".to_string(),
13349 frontier: "unavailable".to_string(),
13350 signals: Vec::new(),
13351 review_queue: Vec::new(),
13352 proof_readiness: signals::ProofReadiness {
13353 status: "unavailable".to_string(),
13354 blockers: 0,
13355 warnings: 0,
13356 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
13357 },
13358 }
13359}
13360
13361fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
13362 println!();
13363 println!(" {}", "SIGNALS".dimmed());
13364 println!(" {}", style::tick_row(60));
13365 println!(" total signals: {}", report.signals.len());
13366 println!(" proof readiness: {}", report.proof_readiness.status);
13367 if !report.review_queue.is_empty() {
13368 println!(" review queue: {} items", report.review_queue.len());
13369 }
13370 if strict && report.proof_readiness.status != "ready" {
13371 println!(
13372 " {} proof readiness has blocking signals.",
13373 style::lost("strict check failed")
13374 );
13375 }
13376}
13377
13378fn append_packet_json_file(
13379 packet_dir: &Path,
13380 relative_path: &str,
13381 value: &Value,
13382) -> Result<(), String> {
13383 let content = serde_json::to_vec_pretty(value)
13384 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
13385 let path = packet_dir.join(relative_path);
13386 if let Some(parent) = path.parent() {
13387 std::fs::create_dir_all(parent)
13388 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
13389 }
13390 std::fs::write(&path, &content)
13391 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
13392 let entry = json!({
13393 "path": relative_path,
13394 "sha256": hex::encode(Sha256::digest(&content)),
13395 "bytes": content.len(),
13396 });
13397
13398 for manifest_name in ["manifest.json", "packet.lock.json"] {
13399 let manifest_path = packet_dir.join(manifest_name);
13400 let data = std::fs::read_to_string(&manifest_path)
13401 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
13402 let mut manifest: Value = serde_json::from_str(&data)
13403 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
13404 let array_key = if manifest_name == "manifest.json" {
13405 "included_files"
13406 } else {
13407 "files"
13408 };
13409 let files = manifest
13410 .get_mut(array_key)
13411 .and_then(Value::as_array_mut)
13412 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
13413 files.retain(|file| {
13414 file.get("path")
13415 .and_then(Value::as_str)
13416 .is_none_or(|path| path != relative_path)
13417 });
13418 files.push(entry.clone());
13419 std::fs::write(
13420 &manifest_path,
13421 serde_json::to_vec_pretty(&manifest)
13422 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
13423 )
13424 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
13425 }
13426
13427 let lock_path = packet_dir.join("packet.lock.json");
13428 let lock_content = std::fs::read(&lock_path)
13429 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
13430 let lock_entry = json!({
13431 "path": "packet.lock.json",
13432 "sha256": hex::encode(Sha256::digest(&lock_content)),
13433 "bytes": lock_content.len(),
13434 });
13435 let manifest_path = packet_dir.join("manifest.json");
13436 let data = std::fs::read_to_string(&manifest_path)
13437 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
13438 let mut manifest: Value = serde_json::from_str(&data)
13439 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
13440 let files = manifest
13441 .get_mut("included_files")
13442 .and_then(Value::as_array_mut)
13443 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
13444 files.retain(|file| {
13445 file.get("path")
13446 .and_then(Value::as_str)
13447 .is_none_or(|path| path != "packet.lock.json")
13448 });
13449 files.push(lock_entry);
13450 std::fs::write(
13451 &manifest_path,
13452 serde_json::to_vec_pretty(&manifest)
13453 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
13454 )
13455 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
13456 Ok(())
13457}
13458
13459fn print_tool_check_report(report: &Value) {
13460 let summary = report.get("summary").unwrap_or(&Value::Null);
13461 let frontier = report.get("frontier").unwrap_or(&Value::Null);
13462 println!();
13463 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
13464 println!(" {}", style::tick_row(60));
13465 println!(
13466 "frontier: {}",
13467 frontier
13468 .get("name")
13469 .and_then(Value::as_str)
13470 .unwrap_or("unknown")
13471 );
13472 println!(
13473 "findings: {}",
13474 frontier
13475 .get("findings")
13476 .and_then(Value::as_u64)
13477 .unwrap_or_default()
13478 );
13479 println!(
13480 "checks: {} passed, {} failed",
13481 summary
13482 .get("passed")
13483 .and_then(Value::as_u64)
13484 .unwrap_or_default(),
13485 summary
13486 .get("failed")
13487 .and_then(Value::as_u64)
13488 .unwrap_or_default()
13489 );
13490 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
13491 let names = tools
13492 .iter()
13493 .filter_map(Value::as_str)
13494 .collect::<Vec<_>>()
13495 .join(", ");
13496 println!("tools: {names}");
13497 }
13498 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
13499 for check in checks {
13500 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
13501 style::ok("ok")
13502 } else {
13503 style::lost("lost")
13504 };
13505 println!(
13506 " {} {}",
13507 status,
13508 check
13509 .get("tool")
13510 .and_then(Value::as_str)
13511 .unwrap_or("unknown")
13512 );
13513 }
13514 }
13515}
13516
13517fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
13518 if json_output {
13519 println!(
13520 "{}",
13521 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
13522 );
13523 } else {
13524 println!("{}", report.message);
13525 println!(" frontier: {}", report.frontier);
13526 println!(" finding: {}", report.finding_id);
13527 println!(" proposal: {}", report.proposal_id);
13528 println!(" status: {}", report.proposal_status);
13529 if let Some(event_id) = &report.applied_event_id {
13530 println!(" event: {}", event_id);
13531 }
13532 println!(" wrote: {}", report.wrote_to);
13533 }
13534}
13535
13536fn print_history(payload: &Value) {
13537 let finding = payload.get("finding").unwrap_or(&Value::Null);
13538 println!("vela history");
13539 println!(
13540 " finding: {}",
13541 finding
13542 .get("id")
13543 .and_then(Value::as_str)
13544 .unwrap_or("unknown")
13545 );
13546 println!(
13547 " assertion: {}",
13548 finding
13549 .get("assertion")
13550 .and_then(Value::as_str)
13551 .unwrap_or("")
13552 );
13553 println!(
13554 " confidence: {:.3}",
13555 finding
13556 .get("confidence")
13557 .and_then(Value::as_f64)
13558 .unwrap_or_default()
13559 );
13560 let reviews = payload
13561 .get("review_events")
13562 .and_then(Value::as_array)
13563 .map_or(0, Vec::len);
13564 let updates = payload
13565 .get("confidence_updates")
13566 .and_then(Value::as_array)
13567 .map_or(0, Vec::len);
13568 let annotations = finding
13569 .get("annotations")
13570 .and_then(Value::as_array)
13571 .map_or(0, Vec::len);
13572 let sources = payload
13573 .get("sources")
13574 .and_then(Value::as_array)
13575 .map_or(0, Vec::len);
13576 let atoms = payload
13577 .get("evidence_atoms")
13578 .and_then(Value::as_array)
13579 .map_or(0, Vec::len);
13580 let conditions = payload
13581 .get("condition_records")
13582 .and_then(Value::as_array)
13583 .map_or(0, Vec::len);
13584 let proposals = payload
13585 .get("proposals")
13586 .and_then(Value::as_array)
13587 .map_or(0, Vec::len);
13588 let events = payload
13589 .get("events")
13590 .and_then(Value::as_array)
13591 .map_or(0, Vec::len);
13592 println!(" review events: {reviews}");
13593 println!(" confidence updates: {updates}");
13594 println!(" annotations: {annotations}");
13595 println!(" sources: {sources}");
13596 println!(" evidence atoms: {atoms}");
13597 println!(" condition records: {conditions}");
13598 println!(" proposals: {proposals}");
13599 println!(" canonical events: {events}");
13600 if let Some(status) = payload
13601 .get("proof_state")
13602 .and_then(|value| value.get("latest_packet"))
13603 .and_then(|value| value.get("status"))
13604 .and_then(Value::as_str)
13605 {
13606 println!(" proof state: {status}");
13607 }
13608 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
13609 for event in events.iter().take(8) {
13610 println!(
13611 " - {} {} {}",
13612 event
13613 .get("reviewed_at")
13614 .and_then(Value::as_str)
13615 .unwrap_or(""),
13616 event.get("id").and_then(Value::as_str).unwrap_or(""),
13617 event.get("reason").and_then(Value::as_str).unwrap_or("")
13618 );
13619 }
13620 }
13621}
13622
13623#[derive(Debug, Serialize)]
13624pub struct ProofTrace {
13625 pub trace_version: String,
13626 pub command: Vec<String>,
13627 pub source: String,
13628 pub source_hash: String,
13629 pub schema_version: String,
13630 pub checked_artifacts: Vec<String>,
13631 pub benchmark: Option<Value>,
13632 pub packet_manifest: String,
13633 pub packet_validation: String,
13634 pub caveats: Vec<String>,
13635 pub status: String,
13636 pub trace_path: String,
13637}
13638
13639const SCIENCE_SUBCOMMANDS: &[&str] = &[
13640 "compile-notes",
13641 "compile-code",
13642 "compile-data",
13643 "review-pending",
13644 "find-tensions",
13645 "plan-experiments",
13646 "scout",
13647 "check",
13648 "normalize",
13649 "integrity",
13650 "impact",
13651 "proof",
13652 "repo",
13653 "serve",
13654 "stats",
13655 "search",
13656 "tensions",
13657 "gaps",
13658 "bridge",
13659 "export",
13660 "packet",
13661 "bench",
13662 "conformance",
13663 "version",
13664 "sign",
13665 "actor",
13666 "frontier",
13667 "queue",
13668 "registry",
13669 "init",
13670 "import",
13671 "diff",
13672 "proposals",
13673 "finding",
13674 "link",
13675 "entity",
13676 "review",
13677 "note",
13678 "caveat",
13679 "revise",
13680 "reject",
13681 "history",
13682 "import-events",
13683 "retract",
13684 "propagate",
13685 "replicate",
13687 "replications",
13688 "dataset-add",
13691 "datasets",
13692 "code-add",
13693 "code-artifacts",
13694 "artifact-add",
13695 "artifact-to-state",
13696 "bridge-kit",
13697 "source-adapter",
13698 "runtime-adapter",
13699 "artifacts",
13700 "artifact-audit",
13701 "decision-brief",
13702 "trial-summary",
13703 "source-verification",
13704 "source-ingest-plan",
13705 "clinical-trial-import",
13706 "negative-result-add",
13708 "negative-results",
13709 "trajectory-create",
13711 "trajectory-step",
13712 "trajectories",
13713 "tier-set",
13715 "locator-repair",
13717 "span-repair",
13719 "entity-resolve",
13721 "source-fetch",
13723 "predict",
13726 "resolve",
13727 "predictions",
13728 "predictions-expire",
13729 "calibration",
13730 "consensus",
13733 "federation",
13735 "causal",
13737 "status",
13741 "log",
13742 "inbox",
13743 "ask",
13744 "bridges",
13746 "workbench",
13748 "verify",
13750 "ingest",
13754 "propose",
13755 "accept",
13756 "attest",
13757 "lineage",
13758 "carina",
13761];
13762
13763pub fn is_science_subcommand(name: &str) -> bool {
13764 SCIENCE_SUBCOMMANDS.contains(&name)
13765}
13766
13767fn print_strict_help() {
13768 println!(
13769 r#"Vela {}
13770Version control for scientific state.
13771
13772Usage:
13773 vela <COMMAND>
13774
13775Core flow (v0.74):
13776 init Initialize a split frontier repo
13777 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
13778 propose Create a finding.review proposal
13779 diff Preview a `vpr_*` proposal, or compare two frontier files
13780 accept Apply a proposal under reviewer authority
13781 attest Sign findings under your private key
13782 log Recent canonical state events
13783 lineage State-transition replay for one finding
13784 serve Local Workbench (findings, evidence, diff, lineage)
13785
13786Read-only inspection:
13787 check Validate a frontier, repo, or proof packet
13788 integrity Check accepted frontier state integrity
13789 impact Report downstream finding impact
13790 normalize Apply deterministic frontier-state repairs
13791 proof Export and validate a proof packet
13792 repo Inspect split frontier repository status and shape
13793 stats Show frontier statistics
13794 search Search findings
13795 tensions List candidate contradictions and tensions
13796 gaps Inspect and rank candidate gap review leads
13797 bridge Find candidate cross-domain connections
13798
13799Advanced (proposal-creation, agent inboxes, federation):
13800 scout Run Literature Scout against a folder of PDFs (writes proposals)
13801 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
13802 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
13803 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
13804 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
13805 find-tensions Run Contradiction Finder: surface real contradictions among findings
13806 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
13807 export Export frontier artifacts
13808 packet Inspect or validate proof packets
13809 bench Run deterministic benchmark gates
13810 conformance Run protocol conformance vectors
13811 sign Optional signing and signature verification
13812 runtime-adapter
13813 Normalize external runtime exports into reviewable proposals
13814 version Show version information
13815 import Import frontier.json into a .vela repo
13816 proposals Inspect, validate, export, import, accept, or reject write proposals
13817 artifact-to-state
13818 Import a Carina artifact packet as reviewable proposals
13819 bridge-kit
13820 Validate Carina artifact packets before importing runtime output
13821 source-adapter
13822 Run reviewed source adapters into artifact-to-state proposals
13823 finding Add or manage finding bundles as frontier state
13824 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
13825 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
13826 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
13827 actor Register Ed25519 publisher identities in a frontier
13828 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
13829 review Create a review proposal or review interactively
13830 note Add a lightweight note to a finding
13831 caveat Create an explicit caveat proposal
13832 revise Create a confidence revision proposal
13833 reject Create a rejection proposal
13834 history Show state-transition history for one finding (v0.74 alias: `lineage`)
13835 import-events Import review/state events from a packet or JSON file
13836 retract Create a retraction proposal
13837 propagate Simulate impact over declared dependency links
13838 artifact-add Register a content-addressed artifact
13839 artifacts List content-addressed artifacts
13840 artifact-audit Audit artifact locators, hashes, references, and profiles
13841 decision-brief Show the validated decision brief projection
13842 trial-summary Show the validated trial outcome projection
13843 source-verification Show the validated source verification projection
13844 source-ingest-plan Show the validated source ingest plan
13845 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
13846 locator-repair Mechanically repair an evidence atom's missing source locator
13847 span-repair Mechanically repair a finding's missing evidence span
13848 entity-resolve Resolve a finding entity to a canonical id
13849 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
13850
13851Quick start (the demo):
13852 vela init demo --name "Your bounded question"
13853 vela ingest paper.pdf --frontier demo
13854 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
13855 vela diff <vpr_id> --frontier demo
13856 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
13857 vela serve --path demo
13858
13859Substrate health:
13860 vela frontier materialize my-frontier --json
13861 vela repo status my-frontier --json
13862 vela proof verify my-frontier --json
13863 vela check my-frontier --strict --json
13864
13865Monolithic frontier file:
13866 vela frontier new frontier.json --name "Your bounded question"
13867 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
13868 vela check frontier.json --json
13869 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
13870 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
13871
13872Publish your own frontier (see docs/PUBLISHING.md):
13873 vela frontier new ./frontier.json --name "Your bounded question"
13874 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
13875 vela sign generate-keypair --out keys
13876 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
13877 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
13878 --to https://vela-hub.fly.dev
13879"#,
13880 env!("CARGO_PKG_VERSION")
13881 );
13882}
13883
13884pub type ScoutHandler = fn(
13893 folder: PathBuf,
13894 frontier: PathBuf,
13895 backend: Option<String>,
13896 dry_run: bool,
13897 json: bool,
13898) -> Pin<Box<dyn Future<Output = ()> + Send>>;
13899
13900static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
13901
13902pub fn register_scout_handler(handler: ScoutHandler) {
13906 let _ = SCOUT_HANDLER.set(handler);
13907}
13908
13909pub type NotesHandler = fn(
13913 vault: PathBuf,
13914 frontier: PathBuf,
13915 backend: Option<String>,
13916 max_files: Option<usize>,
13917 max_items_per_category: Option<usize>,
13918 dry_run: bool,
13919 json: bool,
13920) -> Pin<Box<dyn Future<Output = ()> + Send>>;
13921
13922static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
13923
13924pub fn register_notes_handler(handler: NotesHandler) {
13926 let _ = NOTES_HANDLER.set(handler);
13927}
13928
13929pub type CodeHandler = fn(
13931 root: PathBuf,
13932 frontier: PathBuf,
13933 backend: Option<String>,
13934 max_files: Option<usize>,
13935 dry_run: bool,
13936 json: bool,
13937) -> Pin<Box<dyn Future<Output = ()> + Send>>;
13938
13939static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
13940
13941pub fn register_code_handler(handler: CodeHandler) {
13943 let _ = CODE_HANDLER.set(handler);
13944}
13945
13946pub type DatasetsHandler = fn(
13948 root: PathBuf,
13949 frontier: PathBuf,
13950 backend: Option<String>,
13951 sample_rows: Option<usize>,
13952 dry_run: bool,
13953 json: bool,
13954) -> Pin<Box<dyn Future<Output = ()> + Send>>;
13955
13956static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
13957
13958pub fn register_datasets_handler(handler: DatasetsHandler) {
13960 let _ = DATASETS_HANDLER.set(handler);
13961}
13962
13963pub type ReviewerHandler = fn(
13965 frontier: PathBuf,
13966 backend: Option<String>,
13967 max_proposals: Option<usize>,
13968 batch_size: usize,
13969 dry_run: bool,
13970 json: bool,
13971) -> Pin<Box<dyn Future<Output = ()> + Send>>;
13972
13973static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
13974
13975pub fn register_reviewer_handler(handler: ReviewerHandler) {
13977 let _ = REVIEWER_HANDLER.set(handler);
13978}
13979
13980pub type TensionsHandler = fn(
13982 frontier: PathBuf,
13983 backend: Option<String>,
13984 max_findings: Option<usize>,
13985 dry_run: bool,
13986 json: bool,
13987) -> Pin<Box<dyn Future<Output = ()> + Send>>;
13988
13989static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
13990
13991pub fn register_tensions_handler(handler: TensionsHandler) {
13993 let _ = TENSIONS_HANDLER.set(handler);
13994}
13995
13996pub type ExperimentsHandler = fn(
13998 frontier: PathBuf,
13999 backend: Option<String>,
14000 max_findings: Option<usize>,
14001 dry_run: bool,
14002 json: bool,
14003) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14004
14005static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
14006
14007pub fn register_experiments_handler(handler: ExperimentsHandler) {
14009 let _ = EXPERIMENTS_HANDLER.set(handler);
14010}
14011
14012fn find_vela_repo() -> Option<PathBuf> {
14028 let mut cur = std::env::current_dir().ok()?;
14029 loop {
14030 if cur.join(".vela").is_dir() {
14031 return Some(cur);
14032 }
14033 if !cur.pop() {
14034 return None;
14035 }
14036 }
14037}
14038
14039fn print_session_help() {
14040 println!();
14041 println!(
14042 " Vela {} · Version control for scientific state.",
14043 env!("CARGO_PKG_VERSION")
14044 );
14045 println!();
14046 println!(" USAGE");
14047 println!(" vela Open a session against the nearest .vela/ repo");
14048 println!(" vela <command> Run a specific subcommand");
14049 println!(" vela help advanced Full subcommand list (30+ commands)");
14050 println!();
14051 println!(" CORE FLOW (v0.74)");
14052 println!(" init Initialize a split frontier repo");
14053 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
14054 println!(" propose Create a finding.review proposal");
14055 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
14056 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
14057 println!(" attest Sign findings under your private key");
14058 println!(" log Recent canonical state events");
14059 println!(" lineage <vf_id> State-transition replay for one finding");
14060 println!(" serve Local Workbench (find, evidence, diff, lineage)");
14061 println!();
14062 println!(" DAILY ALSO-RANS");
14063 println!(" status One-screen frontier health");
14064 println!(" inbox Pending review proposals");
14065 println!(" review Review a proposal interactively");
14066 println!(" ask <question> Plain-text query against the frontier");
14067 println!();
14068 println!(" REASONING (Pearl 1 → 2 → 3)");
14069 println!(" causal audit Per-finding identifiability");
14070 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
14071 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
14072 println!();
14073 println!(" COMPOSITION");
14074 println!(" bridge <a> <b> Cross-frontier hypotheses");
14075 println!(" consensus <vf> Field consensus over similar claims");
14076 println!();
14077 println!(" PUBLISH");
14078 println!(" registry publish Push a signed manifest to the hub");
14079 println!(" federation peer-add Federate with another hub");
14080 println!();
14081 println!(" In session, type a single letter for a quick verb, or any");
14082 println!(" question in plain text. `q` or `exit` quits.");
14083 println!();
14084}
14085
14086fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
14087 use crate::causal_reasoning::{audit_frontier, summarize_audit};
14088
14089 let label = frontier_label(project);
14090 let vfr = project.frontier_id();
14091 let vfr_short = vfr.chars().take(16).collect::<String>();
14092
14093 let mut pending = 0usize;
14094 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
14095 for p in &project.proposals {
14096 if p.status == "pending_review" {
14097 pending += 1;
14098 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
14099 }
14100 }
14101
14102 let audit = audit_frontier(project);
14103 let audit_summary = summarize_audit(&audit);
14104
14105 let bridges_dir = repo_path.join(".vela/bridges");
14106 let mut bridge_total = 0usize;
14107 let mut bridge_confirmed = 0usize;
14108 let mut bridge_derived = 0usize;
14109 if bridges_dir.is_dir()
14110 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
14111 {
14112 for entry in entries.flatten() {
14113 let path = entry.path();
14114 if path.extension().and_then(|s| s.to_str()) != Some("json") {
14115 continue;
14116 }
14117 bridge_total += 1;
14118 if let Ok(data) = std::fs::read_to_string(&path)
14119 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
14120 {
14121 match b.status {
14122 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
14123 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
14124 _ => {}
14125 }
14126 }
14127 }
14128 }
14129
14130 let mut targets_with_success = std::collections::HashSet::new();
14131 let mut failed_replications = 0usize;
14132 for r in &project.replications {
14133 if r.outcome == "replicated" {
14134 targets_with_success.insert(r.target_finding.clone());
14135 } else if r.outcome == "failed" {
14136 failed_replications += 1;
14137 }
14138 }
14139
14140 println!();
14141 let version = crate::project::VELA_COMPILER_VERSION
14142 .strip_prefix("vela/")
14143 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
14144 println!(
14145 " {}",
14146 format!("VELA · {version} · {label}")
14147 .to_uppercase()
14148 .dimmed()
14149 );
14150 println!(" {}", style::tick_row(60));
14151 println!(
14152 " vfr_id {}… repo {}",
14153 vfr_short,
14154 repo_path.display()
14155 );
14156 println!(
14157 " findings {:>4} events {} proposals pending {}",
14158 project.findings.len(),
14159 project.events.len(),
14160 pending
14161 );
14162
14163 if pending > 0 {
14164 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
14165 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
14166 }
14167 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
14168 println!(
14169 " {} · {} underidentified · {} conditional",
14170 if audit_summary.underidentified > 0 {
14171 style::lost("audit")
14172 } else {
14173 style::warn("audit")
14174 },
14175 audit_summary.underidentified,
14176 audit_summary.conditional,
14177 );
14178 }
14179 if bridge_total > 0 {
14180 println!(
14181 " {} · {} total · {} confirmed · {} awaiting review",
14182 style::ok("bridges"),
14183 bridge_total,
14184 bridge_confirmed,
14185 bridge_derived
14186 );
14187 }
14188 if !project.replications.is_empty() {
14189 println!(
14190 " {} · {} records · {} findings replicated · {} failed",
14191 style::ok("replications"),
14192 project.replications.len(),
14193 targets_with_success.len(),
14194 failed_replications,
14195 );
14196 }
14197
14198 println!();
14199 println!(" type a verb or ask anything:");
14200 println!(" a audit problems i inbox (pending) b bridges");
14201 println!(" g causal graph l log (recent) c counterfactuals");
14202 println!(" s refresh status h help (more verbs) q quit");
14203 println!();
14204}
14205
14206fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
14208 match verb {
14209 "a" | "audit" => {
14210 let action = CausalAction::Audit {
14211 frontier: repo_path.to_path_buf(),
14212 problems_only: true,
14213 json: false,
14214 };
14215 cmd_causal(action);
14216 true
14217 }
14218 "i" | "inbox" => {
14219 let action = ProposalAction::List {
14220 frontier: repo_path.to_path_buf(),
14221 status: Some("pending_review".into()),
14222 json: false,
14223 };
14224 cmd_proposals(action);
14225 true
14226 }
14227 "b" | "bridges" => {
14228 let action = BridgesAction::List {
14229 frontier: repo_path.to_path_buf(),
14230 status: None,
14231 json: false,
14232 };
14233 cmd_bridges(action);
14234 true
14235 }
14236 "g" | "graph" => {
14237 let action = CausalAction::Graph {
14238 frontier: repo_path.to_path_buf(),
14239 node: None,
14240 json: false,
14241 };
14242 cmd_causal(action);
14243 true
14244 }
14245 "l" | "log" => {
14246 cmd_log(repo_path, 10, None, false);
14247 true
14248 }
14249 "c" | "counterfactual" | "counterfactuals" => {
14250 let project = match repo::load_from_path(repo_path) {
14253 Ok(p) => p,
14254 Err(e) => {
14255 eprintln!("{} {e}", style::err_prefix());
14256 return true;
14257 }
14258 };
14259 println!();
14260 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
14261 println!(" {}", style::tick_row(60));
14262 let mut pairs = 0usize;
14266 for child in &project.findings {
14267 for link in &child.links {
14268 if !matches!(link.link_type.as_str(), "depends" | "supports") {
14269 continue;
14270 }
14271 if link.mechanism.is_none() {
14272 continue;
14273 }
14274 let parent = link
14275 .target
14276 .split_once(':')
14277 .map_or(link.target.as_str(), |(_, r)| r);
14278 pairs += 1;
14279 if pairs <= 10 {
14280 println!(" · do({parent}) → {}", child.id);
14281 }
14282 }
14283 }
14284 if pairs == 0 {
14285 println!(" no mechanism-annotated edges found.");
14286 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
14287 } else {
14288 println!();
14289 println!(" {pairs} live pair(s). Run with:");
14290 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
14291 }
14292 println!();
14293 true
14294 }
14295 "s" | "status" | "refresh" => {
14296 match repo::load_from_path(repo_path) {
14298 Ok(p) => print_session_dashboard(&p, repo_path),
14299 Err(e) => eprintln!("{} {e}", style::err_prefix()),
14300 }
14301 true
14302 }
14303 "h" | "help" | "?" => {
14304 print_session_help();
14305 true
14306 }
14307 _ => false,
14308 }
14309}
14310
14311fn run_session() {
14312 let repo_path = match find_vela_repo() {
14313 Some(p) => p,
14314 None => {
14315 println!();
14316 println!(
14317 " {}",
14318 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
14319 );
14320 println!(" {}", style::tick_row(60));
14321 println!(" Run `vela init` here to create a frontier, or cd into one.");
14322 println!(" Or run `vela help` for the command list.");
14323 println!();
14324 return;
14325 }
14326 };
14327
14328 let project = match repo::load_from_path(&repo_path) {
14329 Ok(p) => p,
14330 Err(e) => {
14331 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
14332 std::process::exit(1);
14333 }
14334 };
14335
14336 print_session_dashboard(&project, &repo_path);
14337
14338 use std::io::{BufRead, Write};
14339 let stdin = std::io::stdin();
14340 let mut stdout = std::io::stdout();
14341 loop {
14342 print!(" > ");
14343 stdout.flush().ok();
14344 let mut line = String::new();
14345 if stdin.lock().read_line(&mut line).is_err() {
14346 break;
14347 }
14348 let input = line.trim();
14349 if input.is_empty() {
14350 continue;
14351 }
14352 if matches!(input, "q" | "quit" | "exit") {
14353 break;
14354 }
14355 if run_session_verb(input, &repo_path) {
14356 continue;
14357 }
14358 let project = match repo::load_from_path(&repo_path) {
14360 Ok(p) => p,
14361 Err(e) => {
14362 eprintln!("{} {e}", style::err_prefix());
14363 continue;
14364 }
14365 };
14366 answer(&project, input, false);
14367 }
14368}
14369
14370pub fn run_from_args() {
14371 style::init();
14372 let args = std::env::args().collect::<Vec<_>>();
14373 match args.get(1).map(String::as_str) {
14374 None => {
14378 run_session();
14379 return;
14380 }
14381 Some("-h" | "--help" | "help") => {
14382 if args.get(2).map(String::as_str) == Some("advanced") {
14385 print_strict_help();
14386 } else {
14387 print_session_help();
14388 }
14389 return;
14390 }
14391 Some("-V" | "--version" | "version") => {
14392 println!("vela {}", env!("CARGO_PKG_VERSION"));
14393 return;
14394 }
14395 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
14396 let json = args.iter().any(|arg| arg == "--json");
14397 let frontier = args
14398 .iter()
14399 .skip(3)
14400 .find(|arg| !arg.starts_with('-'))
14401 .map(PathBuf::from)
14402 .unwrap_or_else(|| {
14403 eprintln!(
14404 "{} proof verify requires a frontier repo",
14405 style::err_prefix()
14406 );
14407 std::process::exit(2);
14408 });
14409 cmd_proof_verify(&frontier, json);
14410 return;
14411 }
14412 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
14413 let frontier = args
14414 .iter()
14415 .skip(3)
14416 .find(|arg| !arg.starts_with('-'))
14417 .map(PathBuf::from)
14418 .unwrap_or_else(|| {
14419 eprintln!(
14420 "{} proof explain requires a frontier repo",
14421 style::err_prefix()
14422 );
14423 std::process::exit(2);
14424 });
14425 cmd_proof_explain(&frontier);
14426 return;
14427 }
14428 Some(cmd) if !is_science_subcommand(cmd) => {
14429 eprintln!(
14430 "{} unknown or non-release command: {cmd}",
14431 style::err_prefix()
14432 );
14433 eprintln!("run `vela --help` for the strict v0 command surface.");
14434 std::process::exit(2);
14435 }
14436 Some(_) => {}
14437 }
14438 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
14439 runtime.block_on(run_command());
14440}
14441
14442fn fail(message: &str) -> ! {
14443 eprintln!("{} {message}", style::err_prefix());
14444 std::process::exit(1);
14445}
14446
14447fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
14452 if !valid.contains(&value) {
14453 fail(&format!(
14454 "invalid {flag} '{value}'. Valid: {}",
14455 valid.join(", ")
14456 ));
14457 }
14458}
14459
14460fn fail_return<T>(message: &str) -> T {
14461 fail(message)
14462}