1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Quickstart {
601 #[arg(default_value = "demo")]
603 path: PathBuf,
604 #[arg(long, default_value = "Quickstart frontier")]
606 name: String,
607 #[arg(long, default_value = "reviewer:you")]
611 reviewer: String,
612 #[arg(long)]
615 assertion: Option<String>,
616 #[arg(long)]
619 keys_out: Option<PathBuf>,
620 #[arg(long)]
622 json: bool,
623 },
624 Lock {
631 path: PathBuf,
633 #[arg(long)]
636 check: bool,
637 #[arg(long)]
639 json: bool,
640 },
641 Import {
643 frontier: PathBuf,
644 #[arg(long)]
645 into: Option<PathBuf>,
646 },
647 Diff {
657 target: String,
660 frontier_b: Option<PathBuf>,
663 #[arg(long)]
667 frontier: Option<PathBuf>,
668 #[arg(long, default_value = "reviewer:preview")]
670 reviewer: String,
671 #[arg(long)]
672 json: bool,
673 #[arg(long)]
674 quiet: bool,
675 },
676 Proposals {
678 #[command(subcommand)]
679 action: ProposalAction,
680 },
681 ArtifactToState {
683 frontier: PathBuf,
685 packet: PathBuf,
687 #[arg(long)]
689 actor: String,
690 #[arg(long)]
692 apply_artifacts: bool,
693 #[arg(long)]
694 json: bool,
695 },
696 BridgeKit {
698 #[command(subcommand)]
699 action: BridgeKitAction,
700 },
701 SourceAdapter {
703 #[command(subcommand)]
704 action: SourceAdapterAction,
705 },
706 RuntimeAdapter {
708 #[command(subcommand)]
709 action: RuntimeAdapterAction,
710 },
711 Finding {
713 #[command(subcommand)]
714 command: FindingCommands,
715 },
716 Link {
720 #[command(subcommand)]
721 action: LinkAction,
722 },
723 Workbench {
728 #[arg(default_value = ".")]
730 path: PathBuf,
731 #[arg(long, default_value_t = 3850)]
733 port: u16,
734 #[arg(long)]
736 no_open: bool,
737 },
738 Bridges {
744 #[command(subcommand)]
745 action: BridgesAction,
746 },
747 Entity {
752 #[command(subcommand)]
753 action: EntityAction,
754 },
755 Review {
757 frontier: PathBuf,
759 finding_id: String,
761 #[arg(long)]
763 status: Option<String>,
764 #[arg(long)]
766 reason: Option<String>,
767 #[arg(long)]
769 reviewer: String,
770 #[arg(long)]
772 apply: bool,
773 #[arg(long)]
775 json: bool,
776 },
777 Note {
779 frontier: PathBuf,
780 finding_id: String,
781 #[arg(long)]
782 text: String,
783 #[arg(long)]
784 author: String,
785 #[arg(long)]
787 apply: bool,
788 #[arg(long)]
789 json: bool,
790 },
791 Caveat {
793 frontier: PathBuf,
794 finding_id: String,
795 #[arg(long)]
796 text: String,
797 #[arg(long)]
798 author: String,
799 #[arg(long)]
800 apply: bool,
801 #[arg(long)]
802 json: bool,
803 },
804 Revise {
806 frontier: PathBuf,
807 finding_id: String,
808 #[arg(long)]
810 confidence: f64,
811 #[arg(long)]
813 reason: String,
814 #[arg(long)]
816 reviewer: String,
817 #[arg(long)]
818 apply: bool,
819 #[arg(long)]
820 json: bool,
821 },
822 Reject {
824 frontier: PathBuf,
825 finding_id: String,
826 #[arg(long)]
827 reason: String,
828 #[arg(long)]
829 reviewer: String,
830 #[arg(long)]
831 apply: bool,
832 #[arg(long)]
833 json: bool,
834 },
835 History {
837 frontier: PathBuf,
838 finding_id: String,
839 #[arg(long)]
840 json: bool,
841 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
845 as_of: Option<String>,
846 },
847 ImportEvents {
849 source: PathBuf,
850 #[arg(long)]
851 into: PathBuf,
852 #[arg(long)]
853 json: bool,
854 },
855 Retract {
857 source: PathBuf,
858 finding_id: String,
859 #[arg(long)]
860 reason: String,
861 #[arg(long)]
862 reviewer: String,
863 #[arg(long)]
864 apply: bool,
865 #[arg(long)]
866 json: bool,
867 },
868 EntityAdd {
873 frontier: PathBuf,
874 finding_id: String,
875 #[arg(long)]
876 entity: String,
877 #[arg(long)]
881 entity_type: String,
882 #[arg(long)]
883 reviewer: String,
884 #[arg(long)]
885 reason: String,
886 #[arg(long)]
887 apply: bool,
888 #[arg(long)]
889 json: bool,
890 },
891 EntityResolve {
895 frontier: PathBuf,
896 finding_id: String,
897 #[arg(long)]
898 entity: String,
899 #[arg(long)]
900 source: String,
901 #[arg(long)]
902 id: String,
903 #[arg(long)]
904 confidence: f64,
905 #[arg(long)]
906 matched_name: Option<String>,
907 #[arg(long, default_value = "manual")]
908 resolution_method: String,
909 #[arg(long)]
910 reviewer: String,
911 #[arg(long)]
912 reason: String,
913 #[arg(long)]
914 apply: bool,
915 #[arg(long)]
916 json: bool,
917 },
918 SourceFetch {
926 identifier: String,
929 #[arg(long)]
933 cache: Option<PathBuf>,
934 #[arg(long)]
936 out: Option<PathBuf>,
937 #[arg(long)]
939 refresh: bool,
940 #[arg(long)]
941 json: bool,
942 },
943 SpanRepair {
946 frontier: PathBuf,
947 finding_id: String,
948 #[arg(long)]
949 section: String,
950 #[arg(long)]
951 text: String,
952 #[arg(long)]
953 reviewer: String,
954 #[arg(long)]
955 reason: String,
956 #[arg(long)]
957 apply: bool,
958 #[arg(long)]
959 json: bool,
960 },
961 LocatorRepair {
966 frontier: PathBuf,
967 atom_id: String,
968 #[arg(long)]
971 locator: Option<String>,
972 #[arg(long)]
975 reviewer: String,
976 #[arg(long)]
978 reason: String,
979 #[arg(long)]
981 apply: bool,
982 #[arg(long)]
983 json: bool,
984 },
985 Propagate {
987 frontier: PathBuf,
988 #[arg(long)]
989 retract: Option<String>,
990 #[arg(long)]
991 reduce_confidence: Option<String>,
992 #[arg(long)]
993 to: Option<f64>,
994 #[arg(short, long)]
995 output: Option<PathBuf>,
996 },
997 Replicate {
1006 frontier: PathBuf,
1008 target: String,
1010 #[arg(long)]
1012 outcome: String,
1013 #[arg(long)]
1015 by: String,
1016 #[arg(long)]
1020 conditions: String,
1021 #[arg(long)]
1023 source_title: String,
1024 #[arg(long)]
1026 doi: Option<String>,
1027 #[arg(long)]
1029 pmid: Option<String>,
1030 #[arg(long)]
1032 sample_size: Option<String>,
1033 #[arg(long, default_value = "")]
1036 note: String,
1037 #[arg(long)]
1039 previous_attempt: Option<String>,
1040 #[arg(long, default_value_t = false)]
1047 no_cascade: bool,
1048 #[arg(long)]
1050 json: bool,
1051 },
1052 Replications {
1055 frontier: PathBuf,
1057 #[arg(long)]
1059 target: Option<String>,
1060 #[arg(long)]
1062 json: bool,
1063 },
1064 DatasetAdd {
1071 frontier: PathBuf,
1073 #[arg(long)]
1075 name: String,
1076 #[arg(long)]
1078 version: Option<String>,
1079 #[arg(long)]
1083 content_hash: String,
1084 #[arg(long)]
1086 url: Option<String>,
1087 #[arg(long)]
1089 license: Option<String>,
1090 #[arg(long)]
1092 source_title: String,
1093 #[arg(long)]
1095 doi: Option<String>,
1096 #[arg(long)]
1098 row_count: Option<u64>,
1099 #[arg(long)]
1101 json: bool,
1102 },
1103 Datasets {
1105 frontier: PathBuf,
1106 #[arg(long)]
1107 json: bool,
1108 },
1109 CodeAdd {
1113 frontier: PathBuf,
1115 #[arg(long)]
1117 language: String,
1118 #[arg(long)]
1120 repo_url: Option<String>,
1121 #[arg(long)]
1124 commit: Option<String>,
1125 #[arg(long)]
1127 path: String,
1128 #[arg(long)]
1130 content_hash: String,
1131 #[arg(long)]
1133 line_start: Option<u32>,
1134 #[arg(long)]
1136 line_end: Option<u32>,
1137 #[arg(long)]
1139 entry_point: Option<String>,
1140 #[arg(long)]
1142 json: bool,
1143 },
1144 CodeArtifacts {
1146 frontier: PathBuf,
1147 #[arg(long)]
1148 json: bool,
1149 },
1150 ArtifactAdd {
1155 frontier: PathBuf,
1157 #[arg(long)]
1160 kind: String,
1161 #[arg(long)]
1163 name: String,
1164 #[arg(long)]
1167 file: Option<PathBuf>,
1168 #[arg(long)]
1170 url: Option<String>,
1171 #[arg(long)]
1173 content_hash: Option<String>,
1174 #[arg(long)]
1176 media_type: Option<String>,
1177 #[arg(long)]
1179 license: Option<String>,
1180 #[arg(long)]
1182 source_title: Option<String>,
1183 #[arg(long)]
1185 source_url: Option<String>,
1186 #[arg(long)]
1188 doi: Option<String>,
1189 #[arg(long)]
1191 target: Vec<String>,
1192 #[arg(long)]
1194 metadata: Vec<String>,
1195 #[arg(long, default_value = "public")]
1197 access_tier: String,
1198 #[arg(long, default_value = "reviewer:manual")]
1200 deposited_by: String,
1201 #[arg(long, default_value = "artifact deposit")]
1203 reason: String,
1204 #[arg(long)]
1206 json: bool,
1207 },
1208 Artifacts {
1210 frontier: PathBuf,
1211 #[arg(long)]
1213 target: Option<String>,
1214 #[arg(long)]
1215 json: bool,
1216 },
1217 ArtifactAudit {
1219 frontier: PathBuf,
1220 #[arg(long)]
1222 json: bool,
1223 },
1224 DecisionBrief {
1226 frontier: PathBuf,
1227 #[arg(long)]
1229 json: bool,
1230 },
1231 TrialSummary {
1233 frontier: PathBuf,
1234 #[arg(long)]
1236 json: bool,
1237 },
1238 SourceVerification {
1240 frontier: PathBuf,
1241 #[arg(long)]
1243 json: bool,
1244 },
1245 SourceIngestPlan {
1247 frontier: PathBuf,
1248 #[arg(long)]
1250 json: bool,
1251 },
1252 ClinicalTrialImport {
1255 frontier: PathBuf,
1257 nct_id: String,
1259 #[arg(long)]
1262 input_json: Option<PathBuf>,
1263 #[arg(long)]
1265 target: Vec<String>,
1266 #[arg(long, default_value = "reviewer:manual")]
1268 deposited_by: String,
1269 #[arg(long, default_value = "clinical trial record import")]
1271 reason: String,
1272 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1274 license: String,
1275 #[arg(long)]
1277 json: bool,
1278 },
1279 NegativeResultAdd {
1287 frontier: PathBuf,
1289 #[arg(long)]
1291 kind: String,
1292 #[arg(long)]
1294 deposited_by: String,
1295 #[arg(long)]
1297 reason: String,
1298 #[arg(long)]
1301 conditions_text: String,
1302 #[arg(long, default_value = "")]
1304 notes: String,
1305 #[arg(long)]
1308 target: Vec<String>,
1309 #[arg(long)]
1313 endpoint: Option<String>,
1314 #[arg(long)]
1316 intervention: Option<String>,
1317 #[arg(long)]
1319 comparator: Option<String>,
1320 #[arg(long)]
1322 population: Option<String>,
1323 #[arg(long)]
1325 n_enrolled: Option<u32>,
1326 #[arg(long)]
1328 power: Option<f64>,
1329 #[arg(long)]
1331 ci_lower: Option<f64>,
1332 #[arg(long)]
1334 ci_upper: Option<f64>,
1335 #[arg(long)]
1337 effect_size_threshold: Option<f64>,
1338 #[arg(long)]
1340 registry_id: Option<String>,
1341 #[arg(long)]
1344 reagent: Option<String>,
1345 #[arg(long)]
1347 observation: Option<String>,
1348 #[arg(long)]
1350 attempts: Option<u32>,
1351 #[arg(long)]
1354 source_title: String,
1355 #[arg(long)]
1357 doi: Option<String>,
1358 #[arg(long)]
1360 url: Option<String>,
1361 #[arg(long)]
1363 year: Option<i32>,
1364 #[arg(long)]
1366 json: bool,
1367 },
1368 NegativeResults {
1370 frontier: PathBuf,
1371 #[arg(long)]
1373 target: Option<String>,
1374 #[arg(long)]
1375 json: bool,
1376 },
1377 TrajectoryCreate {
1382 frontier: PathBuf,
1384 #[arg(long)]
1386 deposited_by: String,
1387 #[arg(long)]
1389 reason: String,
1390 #[arg(long)]
1395 target: Vec<String>,
1396 #[arg(long, default_value = "")]
1398 notes: String,
1399 #[arg(long)]
1400 json: bool,
1401 },
1402 TrajectoryStep {
1405 frontier: PathBuf,
1407 trajectory_id: String,
1409 #[arg(long)]
1411 kind: String,
1412 #[arg(long)]
1416 description: String,
1417 #[arg(long)]
1419 actor: String,
1420 #[arg(long)]
1422 reason: String,
1423 #[arg(long)]
1426 reference: Vec<String>,
1427 #[arg(long)]
1428 json: bool,
1429 },
1430 Trajectories {
1432 frontier: PathBuf,
1433 #[arg(long)]
1435 target: Option<String>,
1436 #[arg(long)]
1437 json: bool,
1438 },
1439 TierSet {
1445 frontier: PathBuf,
1447 #[arg(long)]
1449 object_type: String,
1450 #[arg(long)]
1452 object_id: String,
1453 #[arg(long)]
1455 tier: String,
1456 #[arg(long)]
1459 actor: String,
1460 #[arg(long)]
1463 reason: String,
1464 #[arg(long)]
1465 json: bool,
1466 },
1467 Predict {
1474 frontier: PathBuf,
1476 #[arg(long)]
1478 by: String,
1479 #[arg(long)]
1482 claim: String,
1483 #[arg(long)]
1485 criterion: String,
1486 #[arg(long)]
1488 resolves_by: Option<String>,
1489 #[arg(long)]
1491 confidence: f64,
1492 #[arg(long, default_value = "")]
1494 target: String,
1495 #[arg(long, default_value = "affirmed")]
1497 outcome: String,
1498 #[arg(long, default_value = "")]
1500 conditions: String,
1501 #[arg(long)]
1503 json: bool,
1504 },
1505 Resolve {
1510 frontier: PathBuf,
1512 prediction: String,
1514 #[arg(long)]
1516 outcome: String,
1517 #[arg(long)]
1519 matched: bool,
1520 #[arg(long)]
1523 by: String,
1524 #[arg(long, default_value = "1.0")]
1526 confidence: f64,
1527 #[arg(long, default_value = "")]
1529 source_title: String,
1530 #[arg(long)]
1532 doi: Option<String>,
1533 #[arg(long)]
1535 json: bool,
1536 },
1537 Predictions {
1539 frontier: PathBuf,
1540 #[arg(long)]
1542 by: Option<String>,
1543 #[arg(long)]
1545 open: bool,
1546 #[arg(long)]
1548 json: bool,
1549 },
1550 Calibration {
1553 frontier: PathBuf,
1554 #[arg(long)]
1556 actor: Option<String>,
1557 #[arg(long)]
1559 json: bool,
1560 },
1561 PredictionsExpire {
1569 frontier: PathBuf,
1570 #[arg(long)]
1573 now: Option<String>,
1574 #[arg(long)]
1577 dry_run: bool,
1578 #[arg(long)]
1579 json: bool,
1580 },
1581 Consensus {
1590 frontier: PathBuf,
1592 target: String,
1594 #[arg(long, default_value = "composite")]
1597 weighting: String,
1598 #[arg(long)]
1603 causal_claim: Option<String>,
1604 #[arg(long)]
1609 causal_grade_min: Option<String>,
1610 #[arg(long)]
1612 json: bool,
1613 },
1614
1615 Ingest {
1631 path: String,
1634 #[arg(long)]
1637 frontier: PathBuf,
1638 #[arg(short, long)]
1642 backend: Option<String>,
1643 #[arg(long)]
1647 actor: Option<String>,
1648 #[arg(long)]
1650 dry_run: bool,
1651 #[arg(long)]
1652 json: bool,
1653 },
1654
1655 Propose {
1661 frontier: PathBuf,
1662 finding_id: String,
1663 #[arg(long)]
1665 status: String,
1666 #[arg(long)]
1667 reason: String,
1668 #[arg(long)]
1669 reviewer: String,
1670 #[arg(long)]
1673 apply: bool,
1674 #[arg(long)]
1675 json: bool,
1676 },
1677
1678 Accept {
1682 frontier: PathBuf,
1683 proposal_id: String,
1684 #[arg(long)]
1685 reviewer: String,
1686 #[arg(long)]
1687 reason: String,
1688 #[arg(long)]
1689 json: bool,
1690 },
1691
1692 Attest {
1704 frontier: PathBuf,
1706 #[arg(long)]
1710 event: Option<String>,
1711 #[arg(long)]
1714 attester: Option<String>,
1715 #[arg(long)]
1718 scope_note: Option<String>,
1719 #[arg(long)]
1722 proof_id: Option<String>,
1723 #[arg(long)]
1728 signature: Option<String>,
1729 #[arg(long)]
1732 key: Option<PathBuf>,
1733 #[arg(long)]
1734 json: bool,
1735 },
1736
1737 Lineage {
1740 frontier: PathBuf,
1741 finding_id: String,
1742 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1743 as_of: Option<String>,
1744 #[arg(long)]
1745 json: bool,
1746 },
1747
1748 Carina {
1751 #[command(subcommand)]
1752 action: CarinaAction,
1753 },
1754
1755 Atlas {
1760 #[command(subcommand)]
1761 action: AtlasAction,
1762 },
1763
1764 Constellation {
1770 #[command(subcommand)]
1771 action: ConstellationAction,
1772 },
1773}
1774
1775#[derive(Subcommand)]
1780enum AtlasAction {
1781 Init {
1786 name: String,
1789 #[arg(long, value_delimiter = ',', num_args = 1..)]
1791 frontiers: Vec<PathBuf>,
1792 #[arg(long, default_value = "general")]
1795 domain: String,
1796 #[arg(long)]
1798 scope_note: Option<String>,
1799 #[arg(long, default_value = "atlases")]
1801 atlases_root: PathBuf,
1802 #[arg(long)]
1803 json: bool,
1804 },
1805 Materialize {
1809 name: String,
1811 #[arg(long, default_value = "atlases")]
1812 atlases_root: PathBuf,
1813 #[arg(long)]
1814 json: bool,
1815 },
1816 Serve {
1821 name: String,
1822 #[arg(long, default_value = "atlases")]
1823 atlases_root: PathBuf,
1824 #[arg(long, default_value_t = 3848)]
1825 port: u16,
1826 #[arg(long)]
1827 no_open: bool,
1828 },
1829 Update {
1836 name: String,
1837 #[arg(long, value_delimiter = ',')]
1840 add_frontier: Vec<PathBuf>,
1841 #[arg(long, value_delimiter = ',')]
1844 remove_vfr_id: Vec<String>,
1845 #[arg(long, default_value = "atlases")]
1846 atlases_root: PathBuf,
1847 #[arg(long)]
1848 json: bool,
1849 },
1850}
1851
1852#[derive(Subcommand)]
1856enum ConstellationAction {
1857 Init {
1861 name: String,
1862 #[arg(long, value_delimiter = ',', num_args = 1..)]
1864 atlases: Vec<PathBuf>,
1865 #[arg(long)]
1866 scope_note: Option<String>,
1867 #[arg(long, default_value = "constellations")]
1868 constellations_root: PathBuf,
1869 #[arg(long)]
1870 json: bool,
1871 },
1872 Materialize {
1877 name: String,
1878 #[arg(long, default_value = "constellations")]
1879 constellations_root: PathBuf,
1880 #[arg(long)]
1881 json: bool,
1882 },
1883 Serve {
1887 name: String,
1888 #[arg(long, default_value = "constellations")]
1889 constellations_root: PathBuf,
1890 #[arg(long, default_value_t = 3849)]
1891 port: u16,
1892 #[arg(long)]
1893 no_open: bool,
1894 },
1895}
1896
1897#[derive(Subcommand)]
1901enum CarinaAction {
1902 Validate {
1907 path: PathBuf,
1911 #[arg(long)]
1914 primitive: Option<String>,
1915 #[arg(long)]
1916 json: bool,
1917 },
1918 List {
1920 #[arg(long)]
1921 json: bool,
1922 },
1923 Schema { primitive: String },
1925}
1926
1927#[derive(Subcommand)]
1928enum PacketAction {
1929 Inspect {
1931 path: PathBuf,
1932 #[arg(long)]
1933 json: bool,
1934 },
1935 Validate {
1937 path: PathBuf,
1938 #[arg(long)]
1939 json: bool,
1940 },
1941}
1942
1943#[derive(Subcommand)]
1944enum SignAction {
1945 GenerateKeypair {
1947 #[arg(long, default_value = ".vela/keys")]
1948 out: PathBuf,
1949 #[arg(long)]
1950 json: bool,
1951 },
1952 Apply {
1954 frontier: PathBuf,
1955 #[arg(long)]
1956 private_key: PathBuf,
1957 #[arg(long)]
1958 json: bool,
1959 },
1960 Verify {
1962 frontier: PathBuf,
1963 #[arg(long)]
1964 public_key: Option<PathBuf>,
1965 #[arg(long)]
1966 json: bool,
1967 },
1968 ThresholdSet {
1973 frontier: PathBuf,
1974 finding_id: String,
1976 #[arg(long)]
1978 to: u32,
1979 #[arg(long)]
1980 json: bool,
1981 },
1982}
1983
1984#[derive(Subcommand)]
1985enum ActorAction {
1986 Add {
1988 frontier: PathBuf,
1989 id: String,
1991 #[arg(long)]
1993 pubkey: String,
1994 #[arg(long)]
1998 tier: Option<String>,
1999 #[arg(long)]
2003 orcid: Option<String>,
2004 #[arg(long)]
2009 clearance: Option<String>,
2010 #[arg(long)]
2011 json: bool,
2012 },
2013 List {
2015 frontier: PathBuf,
2016 #[arg(long)]
2017 json: bool,
2018 },
2019}
2020
2021#[derive(Subcommand)]
2022enum CausalAction {
2023 Audit {
2027 frontier: PathBuf,
2028 #[arg(long)]
2031 problems_only: bool,
2032 #[arg(long)]
2033 json: bool,
2034 },
2035 Effect {
2048 frontier: PathBuf,
2049 source: String,
2051 #[arg(long)]
2053 on: String,
2054 #[arg(long)]
2055 json: bool,
2056 },
2057 Graph {
2060 frontier: PathBuf,
2061 #[arg(long)]
2063 node: Option<String>,
2064 #[arg(long)]
2065 json: bool,
2066 },
2067 Counterfactual {
2074 frontier: PathBuf,
2075 intervene_on: String,
2077 #[arg(long)]
2079 set_to: f64,
2080 #[arg(long)]
2082 target: String,
2083 #[arg(long)]
2084 json: bool,
2085 },
2086}
2087
2088#[derive(Subcommand)]
2089enum BridgesAction {
2090 Derive {
2094 frontier_a: PathBuf,
2097 #[arg(long, default_value = "a")]
2099 label_a: String,
2100 frontier_b: PathBuf,
2102 #[arg(long, default_value = "b")]
2104 label_b: String,
2105 #[arg(long)]
2106 json: bool,
2107 },
2108 List {
2110 frontier: PathBuf,
2112 #[arg(long)]
2114 status: Option<String>,
2115 #[arg(long)]
2116 json: bool,
2117 },
2118 Show {
2120 frontier: PathBuf,
2121 bridge_id: String,
2122 #[arg(long)]
2123 json: bool,
2124 },
2125 Confirm {
2130 frontier: PathBuf,
2131 bridge_id: String,
2132 #[arg(long)]
2135 reviewer: Option<String>,
2136 #[arg(long)]
2138 note: Option<String>,
2139 #[arg(long)]
2140 json: bool,
2141 },
2142 Refute {
2145 frontier: PathBuf,
2146 bridge_id: String,
2147 #[arg(long)]
2148 reviewer: Option<String>,
2149 #[arg(long)]
2150 note: Option<String>,
2151 #[arg(long)]
2152 json: bool,
2153 },
2154}
2155
2156#[derive(Subcommand)]
2157enum FederationAction {
2158 PeerAdd {
2162 frontier: PathBuf,
2163 id: String,
2165 #[arg(long)]
2167 url: String,
2168 #[arg(long)]
2170 pubkey: String,
2171 #[arg(long, default_value = "")]
2173 note: String,
2174 #[arg(long)]
2175 json: bool,
2176 },
2177 PeerList {
2179 frontier: PathBuf,
2180 #[arg(long)]
2181 json: bool,
2182 },
2183 PeerRemove {
2187 frontier: PathBuf,
2188 id: String,
2189 #[arg(long)]
2190 json: bool,
2191 },
2192 Sync {
2209 frontier: PathBuf,
2210 peer_id: String,
2212 #[arg(long)]
2214 url: Option<String>,
2215 #[arg(long)]
2219 via_hub: bool,
2220 #[arg(long)]
2223 vfr_id: Option<String>,
2224 #[arg(long)]
2231 allow_cross_vfr: bool,
2232 #[arg(long)]
2234 dry_run: bool,
2235 #[arg(long)]
2236 json: bool,
2237 },
2238 PushResolution {
2251 frontier: PathBuf,
2252 conflict_event_id: String,
2256 #[arg(long = "to")]
2258 to: String,
2259 #[arg(long)]
2263 key: Option<PathBuf>,
2264 #[arg(long)]
2267 vfr_id: Option<String>,
2268 #[arg(long)]
2269 json: bool,
2270 },
2271}
2272
2273#[derive(Subcommand)]
2274enum FrontierAction {
2275 New {
2282 path: PathBuf,
2284 #[arg(long)]
2286 name: String,
2287 #[arg(long, default_value = "")]
2289 description: String,
2290 #[arg(long)]
2292 force: bool,
2293 #[arg(long)]
2294 json: bool,
2295 },
2296 Materialize {
2298 frontier: PathBuf,
2300 #[arg(long)]
2301 json: bool,
2302 },
2303 AddDep {
2307 frontier: PathBuf,
2309 vfr_id: String,
2311 #[arg(long)]
2314 locator: String,
2315 #[arg(long)]
2318 snapshot: String,
2319 #[arg(long)]
2321 name: Option<String>,
2322 #[arg(long)]
2323 json: bool,
2324 },
2325 ListDeps {
2327 frontier: PathBuf,
2328 #[arg(long)]
2329 json: bool,
2330 },
2331 RemoveDep {
2334 frontier: PathBuf,
2335 vfr_id: String,
2336 #[arg(long)]
2337 json: bool,
2338 },
2339 RefreshDeps {
2346 frontier: PathBuf,
2347 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2349 from: String,
2350 #[arg(long)]
2352 dry_run: bool,
2353 #[arg(long)]
2354 json: bool,
2355 },
2356 Diff {
2368 frontier: PathBuf,
2370 #[arg(long)]
2373 since: Option<String>,
2374 #[arg(long)]
2377 week: Option<String>,
2378 #[arg(long)]
2380 json: bool,
2381 },
2382}
2383
2384#[derive(Subcommand)]
2385enum RepoAction {
2386 Status {
2388 frontier: PathBuf,
2390 #[arg(long)]
2392 json: bool,
2393 },
2394 Doctor {
2396 frontier: PathBuf,
2398 #[arg(long)]
2400 json: bool,
2401 },
2402}
2403
2404#[derive(Subcommand)]
2405enum QueueAction {
2406 List {
2408 #[arg(long)]
2409 queue_file: Option<PathBuf>,
2410 #[arg(long)]
2411 json: bool,
2412 },
2413 Sign {
2416 #[arg(long)]
2418 actor: String,
2419 #[arg(long)]
2421 key: PathBuf,
2422 #[arg(long)]
2424 queue_file: Option<PathBuf>,
2425 #[arg(long, alias = "all")]
2431 yes_to_all: bool,
2432 #[arg(long)]
2433 json: bool,
2434 },
2435 Clear {
2437 #[arg(long)]
2438 queue_file: Option<PathBuf>,
2439 #[arg(long)]
2440 json: bool,
2441 },
2442}
2443
2444#[derive(Subcommand)]
2445enum RegistryAction {
2446 List {
2448 #[arg(long)]
2450 from: Option<String>,
2451 #[arg(long)]
2452 json: bool,
2453 },
2454 Publish {
2456 frontier: PathBuf,
2458 #[arg(long)]
2460 owner: String,
2461 #[arg(long)]
2463 key: PathBuf,
2464 #[arg(long)]
2471 locator: Option<String>,
2472 #[arg(long)]
2474 to: Option<String>,
2475 #[arg(long)]
2476 json: bool,
2477 },
2478 DependsOn {
2485 vfr_id: String,
2487 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2489 from: String,
2490 #[arg(long)]
2491 json: bool,
2492 },
2493 Mirror {
2501 vfr_id: String,
2503 #[arg(long)]
2505 from: String,
2506 #[arg(long)]
2508 to: String,
2509 #[arg(long)]
2510 json: bool,
2511 },
2512 Pull {
2514 vfr_id: String,
2516 #[arg(long)]
2518 from: Option<String>,
2519 #[arg(long)]
2523 out: PathBuf,
2524 #[arg(long)]
2527 transitive: bool,
2528 #[arg(long, default_value = "4")]
2531 depth: usize,
2532 #[arg(long)]
2533 json: bool,
2534 },
2535}
2536
2537#[derive(Subcommand)]
2538enum GapsAction {
2539 Rank {
2541 frontier: PathBuf,
2542 #[arg(long, default_value = "10")]
2543 top: usize,
2544 #[arg(long)]
2545 domain: Option<String>,
2546 #[arg(long)]
2547 json: bool,
2548 },
2549}
2550
2551#[derive(Subcommand)]
2552enum LinkAction {
2553 Add {
2558 frontier: PathBuf,
2560 #[arg(long)]
2562 from: String,
2563 #[arg(long)]
2565 to: String,
2566 #[arg(long, default_value = "supports")]
2568 r#type: String,
2569 #[arg(long, default_value = "")]
2571 note: String,
2572 #[arg(long, default_value = "reviewer")]
2574 inferred_by: String,
2575 #[arg(long)]
2584 no_check_target: bool,
2585 #[arg(long)]
2586 json: bool,
2587 },
2588}
2589
2590#[derive(Subcommand)]
2591enum EntityAction {
2592 Resolve {
2599 frontier: PathBuf,
2600 #[arg(long)]
2602 force: bool,
2603 #[arg(long)]
2604 json: bool,
2605 },
2606 List {
2608 #[arg(long)]
2609 json: bool,
2610 },
2611}
2612
2613#[derive(Subcommand)]
2614enum FindingCommands {
2615 Add {
2617 frontier: PathBuf,
2619 #[arg(long)]
2621 assertion: String,
2622 #[arg(long, default_value = "mechanism")]
2624 r#type: String,
2625 #[arg(long, default_value = "manual finding")]
2627 source: String,
2628 #[arg(long, default_value = "expert_assertion")]
2630 source_type: String,
2631 #[arg(long)]
2633 author: String,
2634 #[arg(long, default_value = "0.3")]
2636 confidence: f64,
2637 #[arg(long, default_value = "theoretical")]
2639 evidence_type: String,
2640 #[arg(long, default_value = "")]
2642 entities: String,
2643 #[arg(long)]
2645 entities_reviewed: bool,
2646 #[arg(long)]
2648 evidence_span: Vec<String>,
2649 #[arg(long)]
2651 gap: bool,
2652 #[arg(long)]
2654 negative_space: bool,
2655 #[arg(long)]
2657 doi: Option<String>,
2658 #[arg(long)]
2660 pmid: Option<String>,
2661 #[arg(long)]
2663 year: Option<i32>,
2664 #[arg(long)]
2666 journal: Option<String>,
2667 #[arg(long)]
2669 url: Option<String>,
2670 #[arg(long)]
2672 source_authors: Option<String>,
2673 #[arg(long)]
2675 conditions_text: Option<String>,
2676 #[arg(long)]
2678 species: Option<String>,
2679 #[arg(long)]
2681 in_vivo: bool,
2682 #[arg(long)]
2684 in_vitro: bool,
2685 #[arg(long)]
2687 human_data: bool,
2688 #[arg(long)]
2690 clinical_trial: bool,
2691 #[arg(long)]
2693 json: bool,
2694 #[arg(long)]
2696 apply: bool,
2697 },
2698 Supersede {
2705 frontier: PathBuf,
2707 old_id: String,
2709 #[arg(long)]
2711 assertion: String,
2712 #[arg(long, default_value = "mechanism")]
2714 r#type: String,
2715 #[arg(long, default_value = "manual finding")]
2717 source: String,
2718 #[arg(long, default_value = "expert_assertion")]
2720 source_type: String,
2721 #[arg(long)]
2723 author: String,
2724 #[arg(long)]
2726 reason: String,
2727 #[arg(long, default_value = "0.5")]
2729 confidence: f64,
2730 #[arg(long, default_value = "experimental")]
2732 evidence_type: String,
2733 #[arg(long, default_value = "")]
2735 entities: String,
2736 #[arg(long)]
2738 doi: Option<String>,
2739 #[arg(long)]
2741 pmid: Option<String>,
2742 #[arg(long)]
2744 year: Option<i32>,
2745 #[arg(long)]
2747 journal: Option<String>,
2748 #[arg(long)]
2750 url: Option<String>,
2751 #[arg(long)]
2753 source_authors: Option<String>,
2754 #[arg(long)]
2756 conditions_text: Option<String>,
2757 #[arg(long)]
2759 species: Option<String>,
2760 #[arg(long)]
2761 in_vivo: bool,
2762 #[arg(long)]
2763 in_vitro: bool,
2764 #[arg(long)]
2765 human_data: bool,
2766 #[arg(long)]
2767 clinical_trial: bool,
2768 #[arg(long)]
2769 json: bool,
2770 #[arg(long)]
2772 apply: bool,
2773 },
2774 CausalSet {
2780 frontier: PathBuf,
2782 finding_id: String,
2784 #[arg(long)]
2786 claim: String,
2787 #[arg(long)]
2790 grade: Option<String>,
2791 #[arg(long)]
2794 actor: String,
2795 #[arg(long)]
2798 reason: String,
2799 #[arg(long)]
2800 json: bool,
2801 },
2802}
2803
2804#[derive(Subcommand)]
2805enum ProposalAction {
2806 List {
2808 frontier: PathBuf,
2809 #[arg(long)]
2810 status: Option<String>,
2811 #[arg(long)]
2812 json: bool,
2813 },
2814 Show {
2816 frontier: PathBuf,
2817 proposal_id: String,
2818 #[arg(long)]
2819 json: bool,
2820 },
2821 Preview {
2823 frontier: PathBuf,
2824 proposal_id: String,
2825 #[arg(long, default_value = "reviewer:preview")]
2826 reviewer: String,
2827 #[arg(long)]
2828 json: bool,
2829 },
2830 Import {
2832 frontier: PathBuf,
2833 source: PathBuf,
2834 #[arg(long)]
2835 json: bool,
2836 },
2837 Validate {
2839 source: PathBuf,
2840 #[arg(long)]
2841 json: bool,
2842 },
2843 Export {
2845 frontier: PathBuf,
2846 output: PathBuf,
2847 #[arg(long)]
2848 status: Option<String>,
2849 #[arg(long)]
2850 json: bool,
2851 },
2852 Accept {
2854 frontier: PathBuf,
2855 proposal_id: String,
2856 #[arg(long)]
2857 reviewer: String,
2858 #[arg(long)]
2859 reason: String,
2860 #[arg(long)]
2861 json: bool,
2862 },
2863 Reject {
2865 frontier: PathBuf,
2866 proposal_id: String,
2867 #[arg(long)]
2868 reviewer: String,
2869 #[arg(long)]
2870 reason: String,
2871 #[arg(long)]
2872 json: bool,
2873 },
2874}
2875
2876#[derive(Subcommand)]
2877enum SourceAdapterAction {
2878 Run {
2880 frontier: PathBuf,
2882 adapter: String,
2884 #[arg(long)]
2886 actor: String,
2887 #[arg(long = "entry")]
2889 entries: Vec<String>,
2890 #[arg(long)]
2892 priority: Option<String>,
2893 #[arg(long)]
2895 include_excluded: bool,
2896 #[arg(long)]
2898 allow_partial: bool,
2899 #[arg(long)]
2901 dry_run: bool,
2902 #[arg(long)]
2904 input_dir: Option<PathBuf>,
2905 #[arg(long)]
2907 apply_artifacts: bool,
2908 #[arg(long)]
2910 json: bool,
2911 },
2912}
2913
2914#[derive(Subcommand)]
2915enum RuntimeAdapterAction {
2916 Run {
2918 frontier: PathBuf,
2920 adapter: String,
2922 #[arg(long)]
2924 input: PathBuf,
2925 #[arg(long)]
2927 actor: String,
2928 #[arg(long)]
2930 dry_run: bool,
2931 #[arg(long)]
2933 apply_artifacts: bool,
2934 #[arg(long)]
2936 json: bool,
2937 },
2938}
2939
2940#[derive(Subcommand)]
2941enum BridgeKitAction {
2942 Validate {
2944 source: PathBuf,
2946 #[arg(long)]
2948 json: bool,
2949 },
2950 VerifyProvenance {
2957 packet: PathBuf,
2959 #[arg(long)]
2961 json: bool,
2962 },
2963}
2964
2965pub async fn run_command() {
2966 dotenvy::dotenv().ok();
2967
2968 match Cli::parse().command {
2969 Commands::Scout {
2970 folder,
2971 frontier,
2972 backend,
2973 dry_run,
2974 json,
2975 } => {
2976 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
2977 }
2978 Commands::CompileNotes {
2979 vault,
2980 frontier,
2981 backend,
2982 max_files,
2983 max_items_per_category,
2984 dry_run,
2985 json,
2986 } => {
2987 cmd_compile_notes(
2988 &vault,
2989 &frontier,
2990 backend.as_deref(),
2991 max_files,
2992 max_items_per_category,
2993 dry_run,
2994 json,
2995 )
2996 .await;
2997 }
2998 Commands::CompileCode {
2999 root,
3000 frontier,
3001 backend,
3002 max_files,
3003 dry_run,
3004 json,
3005 } => {
3006 cmd_compile_code(
3007 &root,
3008 &frontier,
3009 backend.as_deref(),
3010 max_files,
3011 dry_run,
3012 json,
3013 )
3014 .await;
3015 }
3016 Commands::CompileData {
3017 root,
3018 frontier,
3019 backend,
3020 sample_rows,
3021 dry_run,
3022 json,
3023 } => {
3024 cmd_compile_data(
3025 &root,
3026 &frontier,
3027 backend.as_deref(),
3028 sample_rows,
3029 dry_run,
3030 json,
3031 )
3032 .await;
3033 }
3034 Commands::ReviewPending {
3035 frontier,
3036 backend,
3037 max_proposals,
3038 batch_size,
3039 dry_run,
3040 json,
3041 } => {
3042 cmd_review_pending(
3043 &frontier,
3044 backend.as_deref(),
3045 max_proposals,
3046 batch_size,
3047 dry_run,
3048 json,
3049 )
3050 .await;
3051 }
3052 Commands::FindTensions {
3053 frontier,
3054 backend,
3055 max_findings,
3056 dry_run,
3057 json,
3058 } => {
3059 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3060 }
3061 Commands::PlanExperiments {
3062 frontier,
3063 backend,
3064 max_findings,
3065 dry_run,
3066 json,
3067 } => {
3068 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3069 }
3070 Commands::Check {
3071 source,
3072 schema,
3073 stats,
3074 conformance,
3075 conformance_dir,
3076 all,
3077 schema_only,
3078 strict,
3079 fix,
3080 json,
3081 } => cmd_check(
3082 source.as_deref(),
3083 schema,
3084 stats,
3085 conformance,
3086 &conformance_dir,
3087 all,
3088 schema_only,
3089 strict,
3090 fix,
3091 json,
3092 ),
3093 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3094 Commands::Impact {
3095 frontier,
3096 finding_id,
3097 depth,
3098 json,
3099 } => cmd_impact(&frontier, &finding_id, depth, json),
3100 Commands::Discord {
3101 frontier,
3102 json,
3103 kind,
3104 } => cmd_discord(&frontier, json, kind.as_deref()),
3105 Commands::Normalize {
3106 source,
3107 out,
3108 write,
3109 dry_run,
3110 rewrite_ids,
3111 id_map,
3112 resync_provenance,
3113 json,
3114 } => cmd_normalize(
3115 &source,
3116 out.as_deref(),
3117 write,
3118 dry_run,
3119 rewrite_ids,
3120 id_map.as_deref(),
3121 resync_provenance,
3122 json,
3123 ),
3124 Commands::Proof {
3125 frontier,
3126 out,
3127 template,
3128 gold,
3129 record_proof_state,
3130 json,
3131 } => cmd_proof(
3132 &frontier,
3133 &out,
3134 &template,
3135 gold.as_deref(),
3136 record_proof_state,
3137 json,
3138 ),
3139 Commands::Repo { action } => cmd_repo(action),
3140 Commands::Serve {
3141 frontier,
3142 frontiers,
3143 backend,
3144 http,
3145 setup,
3146 check_tools,
3147 json,
3148 workbench,
3149 } => {
3150 if setup {
3151 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3152 } else if check_tools {
3153 let source =
3154 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3155 match serve::check_tools(source) {
3156 Ok(report) => {
3157 if json {
3158 println!(
3159 "{}",
3160 serde_json::to_string_pretty(&report)
3161 .expect("failed to serialize tool check report")
3162 );
3163 } else {
3164 print_tool_check_report(&report);
3165 }
3166 }
3167 Err(e) => fail(&format!("Tool check failed: {e}")),
3168 }
3169 } else {
3170 let source =
3171 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3172 let resolved_port = if workbench {
3174 Some(http.unwrap_or(3848))
3175 } else {
3176 http
3177 };
3178 if let Some(port) = resolved_port {
3179 serve::run_http(source, backend.as_deref(), port, workbench).await;
3180 } else {
3181 serve::run(source, backend.as_deref()).await;
3182 }
3183 }
3184 }
3185 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3186 Commands::Log {
3187 frontier,
3188 limit,
3189 kind,
3190 json,
3191 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3192 Commands::Inbox {
3193 frontier,
3194 kind,
3195 limit,
3196 json,
3197 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3198 Commands::Ask {
3199 frontier,
3200 question,
3201 json,
3202 } => cmd_ask(&frontier, &question.join(" "), json),
3203 Commands::Stats { frontier, json } => {
3204 if json {
3205 print_stats_json(&frontier);
3206 } else {
3207 cmd_stats(&frontier);
3208 }
3209 }
3210 Commands::Search {
3211 source,
3212 query,
3213 entity,
3214 r#type,
3215 all,
3216 limit,
3217 json,
3218 } => cmd_search(
3219 source.as_deref(),
3220 &query,
3221 entity.as_deref(),
3222 r#type.as_deref(),
3223 all.as_deref(),
3224 limit,
3225 json,
3226 ),
3227 Commands::Tensions {
3228 source,
3229 both_high,
3230 cross_domain,
3231 top,
3232 json,
3233 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3234 Commands::Gaps { action } => cmd_gaps(action),
3235 Commands::Bridge {
3236 inputs,
3237 novelty,
3238 top,
3239 } => cmd_bridge(&inputs, novelty, top).await,
3240 Commands::Export {
3241 frontier,
3242 format,
3243 output,
3244 } => export::run(&frontier, &format, output.as_deref()),
3245 Commands::Packet { action } => cmd_packet(action),
3246 Commands::Verify { path, json } => cmd_verify(&path, json),
3247 Commands::Bench {
3248 frontier,
3249 gold,
3250 candidate,
3251 sources,
3252 threshold,
3253 report,
3254 entity_gold,
3255 link_gold,
3256 suite,
3257 suite_ready,
3258 min_f1,
3259 min_precision,
3260 min_recall,
3261 no_thresholds,
3262 json,
3263 } => {
3264 if let Some(cand) = candidate.clone() {
3269 let Some(g) = gold.clone() else {
3270 eprintln!(
3271 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3272 style::err_prefix()
3273 );
3274 std::process::exit(2);
3275 };
3276 cmd_agent_bench(
3277 &g,
3278 &cand,
3279 sources.as_deref(),
3280 threshold,
3281 report.as_deref(),
3282 json,
3283 );
3284 } else {
3285 cmd_bench(BenchArgs {
3286 frontier,
3287 gold,
3288 entity_gold,
3289 link_gold,
3290 suite,
3291 suite_ready,
3292 min_f1,
3293 min_precision,
3294 min_recall,
3295 no_thresholds,
3296 json,
3297 });
3298 }
3299 }
3300 Commands::Conformance { dir } => {
3301 let _ = conformance::run(&dir);
3302 }
3303 Commands::Version => println!("vela 0.36.0"),
3304 Commands::Sign { action } => cmd_sign(action),
3305 Commands::Actor { action } => cmd_actor(action),
3306 Commands::Federation { action } => cmd_federation(action),
3307 Commands::Causal { action } => cmd_causal(action),
3308 Commands::Frontier { action } => cmd_frontier(action),
3309 Commands::Queue { action } => cmd_queue(action),
3310 Commands::Registry { action } => cmd_registry(action),
3311 Commands::Init {
3312 path,
3313 name,
3314 template,
3315 no_git,
3316 json,
3317 } => cmd_init(&path, &name, &template, !no_git, json),
3318 Commands::Quickstart {
3319 path,
3320 name,
3321 reviewer,
3322 assertion,
3323 keys_out,
3324 json,
3325 } => cmd_quickstart(
3326 &path,
3327 &name,
3328 &reviewer,
3329 assertion.as_deref(),
3330 keys_out.as_deref(),
3331 json,
3332 ),
3333 Commands::Lock { path, check, json } => cmd_lock(&path, check, json),
3334 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3335 Commands::Diff {
3336 target,
3337 frontier_b,
3338 frontier,
3339 reviewer,
3340 json,
3341 quiet,
3342 } => {
3343 if target.starts_with("vpr_") {
3348 let frontier_root = frontier
3349 .clone()
3350 .or_else(|| frontier_b.clone())
3351 .unwrap_or_else(|| std::path::PathBuf::from("."));
3352 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3353 .unwrap_or_else(|e| fail_return(&e));
3354 let payload = json!({
3355 "ok": true,
3356 "command": "diff.proposal",
3357 "frontier": frontier_root.display().to_string(),
3358 "proposal_id": target,
3359 "preview": preview,
3360 });
3361 if json {
3362 println!(
3363 "{}",
3364 serde_json::to_string_pretty(&payload)
3365 .expect("failed to serialize diff preview")
3366 );
3367 } else {
3368 println!("vela diff · proposal preview");
3369 println!(" proposal: {}", target);
3370 println!(" kind: {}", preview.kind);
3371 println!(
3372 " findings: {} -> {}",
3373 preview.findings_before, preview.findings_after
3374 );
3375 println!(
3376 " artifacts: {} -> {}",
3377 preview.artifacts_before, preview.artifacts_after
3378 );
3379 println!(
3380 " events: {} -> {}",
3381 preview.events_before, preview.events_after
3382 );
3383 if !preview.changed_findings.is_empty() {
3384 println!(
3385 " findings changed: {}",
3386 preview.changed_findings.join(", ")
3387 );
3388 }
3389 }
3390 } else {
3391 let frontier_a = std::path::PathBuf::from(&target);
3392 let b = frontier_b.unwrap_or_else(|| {
3393 fail_return(
3394 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3395 )
3396 });
3397 diff::run(&frontier_a, &b, json, quiet);
3398 }
3399 }
3400 Commands::Proposals { action } => cmd_proposals(action),
3401 Commands::ArtifactToState {
3402 frontier,
3403 packet,
3404 actor,
3405 apply_artifacts,
3406 json,
3407 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3408 Commands::BridgeKit { action } => cmd_bridge_kit(action).await,
3409 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3410 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3411 Commands::Link { action } => cmd_link(action),
3412 Commands::Workbench {
3413 path,
3414 port,
3415 no_open,
3416 } => {
3417 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3418 fail(&e);
3419 }
3420 }
3421 Commands::Bridges { action } => cmd_bridges(action),
3422 Commands::Entity { action } => cmd_entity(action),
3423 Commands::Finding { command } => match command {
3424 FindingCommands::Add {
3425 frontier,
3426 assertion,
3427 r#type,
3428 source,
3429 source_type,
3430 author,
3431 confidence,
3432 evidence_type,
3433 entities,
3434 entities_reviewed,
3435 evidence_span,
3436 gap,
3437 negative_space,
3438 doi,
3439 pmid,
3440 year,
3441 journal,
3442 url,
3443 source_authors,
3444 conditions_text,
3445 species,
3446 in_vivo,
3447 in_vitro,
3448 human_data,
3449 clinical_trial,
3450 json,
3451 apply,
3452 } => {
3453 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3454 validate_enum_arg(
3455 "--evidence-type",
3456 &evidence_type,
3457 bundle::VALID_EVIDENCE_TYPES,
3458 );
3459 validate_enum_arg(
3460 "--source-type",
3461 &source_type,
3462 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3463 );
3464 let parsed_entities = parse_entities(&entities);
3465 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3466 for (name, etype) in &parsed_entities {
3467 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3468 fail(&format!(
3469 "invalid entity type '{}' for '{}'. Valid: {}",
3470 etype,
3471 name,
3472 bundle::VALID_ENTITY_TYPES.join(", "),
3473 ));
3474 }
3475 }
3476 let parsed_source_authors = source_authors
3477 .map(|s| {
3478 s.split(';')
3479 .map(|a| a.trim().to_string())
3480 .filter(|a| !a.is_empty())
3481 .collect()
3482 })
3483 .unwrap_or_default();
3484 let parsed_species = species
3485 .map(|s| {
3486 s.split(';')
3487 .map(|a| a.trim().to_string())
3488 .filter(|a| !a.is_empty())
3489 .collect()
3490 })
3491 .unwrap_or_default();
3492 let report = state::add_finding(
3493 &frontier,
3494 state::FindingDraftOptions {
3495 text: assertion,
3496 assertion_type: r#type,
3497 source,
3498 source_type,
3499 author,
3500 confidence,
3501 evidence_type,
3502 entities: parsed_entities,
3503 doi,
3504 pmid,
3505 year,
3506 journal,
3507 url,
3508 source_authors: parsed_source_authors,
3509 conditions_text,
3510 species: parsed_species,
3511 in_vivo,
3512 in_vitro,
3513 human_data,
3514 clinical_trial,
3515 entities_reviewed,
3516 evidence_spans: parsed_evidence_spans,
3517 gap,
3518 negative_space,
3519 },
3520 apply,
3521 )
3522 .unwrap_or_else(|e| fail_return(&e));
3523 print_state_report(&report, json);
3524 }
3525 FindingCommands::Supersede {
3526 frontier,
3527 old_id,
3528 assertion,
3529 r#type,
3530 source,
3531 source_type,
3532 author,
3533 reason,
3534 confidence,
3535 evidence_type,
3536 entities,
3537 doi,
3538 pmid,
3539 year,
3540 journal,
3541 url,
3542 source_authors,
3543 conditions_text,
3544 species,
3545 in_vivo,
3546 in_vitro,
3547 human_data,
3548 clinical_trial,
3549 json,
3550 apply,
3551 } => {
3552 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3553 validate_enum_arg(
3554 "--evidence-type",
3555 &evidence_type,
3556 bundle::VALID_EVIDENCE_TYPES,
3557 );
3558 validate_enum_arg(
3559 "--source-type",
3560 &source_type,
3561 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3562 );
3563 let parsed_entities = parse_entities(&entities);
3564 for (name, etype) in &parsed_entities {
3565 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3566 fail(&format!(
3567 "invalid entity type '{}' for '{}'. Valid: {}",
3568 etype,
3569 name,
3570 bundle::VALID_ENTITY_TYPES.join(", "),
3571 ));
3572 }
3573 }
3574 let parsed_source_authors = source_authors
3575 .map(|s| {
3576 s.split(';')
3577 .map(|a| a.trim().to_string())
3578 .filter(|a| !a.is_empty())
3579 .collect()
3580 })
3581 .unwrap_or_default();
3582 let parsed_species = species
3583 .map(|s| {
3584 s.split(';')
3585 .map(|a| a.trim().to_string())
3586 .filter(|a| !a.is_empty())
3587 .collect()
3588 })
3589 .unwrap_or_default();
3590 let report = state::supersede_finding(
3591 &frontier,
3592 &old_id,
3593 &reason,
3594 state::FindingDraftOptions {
3595 text: assertion,
3596 assertion_type: r#type,
3597 source,
3598 source_type,
3599 author,
3600 confidence,
3601 evidence_type,
3602 entities: parsed_entities,
3603 doi,
3604 pmid,
3605 year,
3606 journal,
3607 url,
3608 source_authors: parsed_source_authors,
3609 conditions_text,
3610 species: parsed_species,
3611 in_vivo,
3612 in_vitro,
3613 human_data,
3614 clinical_trial,
3615 entities_reviewed: false,
3616 evidence_spans: Vec::new(),
3617 gap: false,
3618 negative_space: false,
3619 },
3620 apply,
3621 )
3622 .unwrap_or_else(|e| fail_return(&e));
3623 print_state_report(&report, json);
3624 }
3625 FindingCommands::CausalSet {
3626 frontier,
3627 finding_id,
3628 claim,
3629 grade,
3630 actor,
3631 reason,
3632 json,
3633 } => {
3634 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3635 fail(&format!(
3636 "invalid --claim '{claim}'; valid: {:?}",
3637 bundle::VALID_CAUSAL_CLAIMS
3638 ));
3639 }
3640 if let Some(g) = grade.as_deref()
3641 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3642 {
3643 fail(&format!(
3644 "invalid --grade '{g}'; valid: {:?}",
3645 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3646 ));
3647 }
3648 let report = state::set_causal(
3649 &frontier,
3650 &finding_id,
3651 &claim,
3652 grade.as_deref(),
3653 &actor,
3654 &reason,
3655 )
3656 .unwrap_or_else(|e| fail_return(&e));
3657 print_state_report(&report, json);
3658 }
3659 },
3660 Commands::Review {
3661 frontier,
3662 finding_id,
3663 status,
3664 reason,
3665 reviewer,
3666 apply,
3667 json,
3668 } => {
3669 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3670 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3671 let report = state::review_finding(
3672 &frontier,
3673 &finding_id,
3674 state::ReviewOptions {
3675 status,
3676 reason,
3677 reviewer,
3678 },
3679 apply,
3680 )
3681 .unwrap_or_else(|e| fail_return(&e));
3682 print_state_report(&report, json);
3683 }
3684 Commands::Note {
3685 frontier,
3686 finding_id,
3687 text,
3688 author,
3689 apply,
3690 json,
3691 } => {
3692 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3693 .unwrap_or_else(|e| fail_return(&e));
3694 print_state_report(&report, json);
3695 }
3696 Commands::Caveat {
3697 frontier,
3698 finding_id,
3699 text,
3700 author,
3701 apply,
3702 json,
3703 } => {
3704 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3705 .unwrap_or_else(|e| fail_return(&e));
3706 print_state_report(&report, json);
3707 }
3708 Commands::Revise {
3709 frontier,
3710 finding_id,
3711 confidence,
3712 reason,
3713 reviewer,
3714 apply,
3715 json,
3716 } => {
3717 let report = state::revise_confidence(
3718 &frontier,
3719 &finding_id,
3720 state::ReviseOptions {
3721 confidence,
3722 reason,
3723 reviewer,
3724 },
3725 apply,
3726 )
3727 .unwrap_or_else(|e| fail_return(&e));
3728 print_state_report(&report, json);
3729 }
3730 Commands::Reject {
3731 frontier,
3732 finding_id,
3733 reason,
3734 reviewer,
3735 apply,
3736 json,
3737 } => {
3738 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3739 .unwrap_or_else(|e| fail_return(&e));
3740 print_state_report(&report, json);
3741 }
3742 Commands::History {
3743 frontier,
3744 finding_id,
3745 json,
3746 as_of,
3747 } => {
3748 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3749 .unwrap_or_else(|e| fail_return(&e));
3750 if json {
3751 println!(
3752 "{}",
3753 serde_json::to_string_pretty(&payload)
3754 .expect("failed to serialize history response")
3755 );
3756 } else {
3757 print_history(&payload);
3758 }
3759 }
3760 Commands::ImportEvents { source, into, json } => {
3761 let report =
3762 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3763 if json {
3764 println!(
3765 "{}",
3766 serde_json::to_string_pretty(&json!({
3767 "ok": true,
3768 "command": "import-events",
3769 "source": report.source,
3770 "target": into.display().to_string(),
3771 "summary": {
3772 "imported": report.imported,
3773 "new": report.new,
3774 "duplicate": report.duplicate,
3775 "canonical_events_imported": report.events_imported,
3776 "canonical_events_new": report.events_new,
3777 "canonical_events_duplicate": report.events_duplicate,
3778 }
3779 }))
3780 .expect("failed to serialize import-events response")
3781 );
3782 } else {
3783 println!("{report}");
3784 }
3785 }
3786 Commands::Retract {
3787 source,
3788 finding_id,
3789 reason,
3790 reviewer,
3791 apply,
3792 json,
3793 } => {
3794 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3795 .unwrap_or_else(|e| fail_return(&e));
3796 print_state_report(&report, json);
3797 }
3798 Commands::LocatorRepair {
3799 frontier,
3800 atom_id,
3801 locator,
3802 reviewer,
3803 reason,
3804 apply,
3805 json,
3806 } => {
3807 cmd_locator_repair(
3808 &frontier,
3809 &atom_id,
3810 locator.as_deref(),
3811 &reviewer,
3812 &reason,
3813 apply,
3814 json,
3815 );
3816 }
3817 Commands::SourceFetch {
3818 identifier,
3819 cache,
3820 out,
3821 refresh,
3822 json,
3823 } => {
3824 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3825 }
3826 Commands::SpanRepair {
3827 frontier,
3828 finding_id,
3829 section,
3830 text,
3831 reviewer,
3832 reason,
3833 apply,
3834 json,
3835 } => {
3836 cmd_span_repair(
3837 &frontier,
3838 &finding_id,
3839 §ion,
3840 &text,
3841 &reviewer,
3842 &reason,
3843 apply,
3844 json,
3845 );
3846 }
3847 Commands::EntityAdd {
3848 frontier,
3849 finding_id,
3850 entity,
3851 entity_type,
3852 reviewer,
3853 reason,
3854 apply,
3855 json,
3856 } => {
3857 let report = state::add_finding_entity(
3858 &frontier,
3859 &finding_id,
3860 &entity,
3861 &entity_type,
3862 &reviewer,
3863 &reason,
3864 apply,
3865 )
3866 .unwrap_or_else(|e| fail_return(&e));
3867 print_state_report(&report, json);
3868 }
3869 Commands::EntityResolve {
3870 frontier,
3871 finding_id,
3872 entity,
3873 source,
3874 id,
3875 confidence,
3876 matched_name,
3877 resolution_method,
3878 reviewer,
3879 reason,
3880 apply,
3881 json,
3882 } => {
3883 cmd_entity_resolve(
3884 &frontier,
3885 &finding_id,
3886 &entity,
3887 &source,
3888 &id,
3889 confidence,
3890 matched_name.as_deref(),
3891 &resolution_method,
3892 &reviewer,
3893 &reason,
3894 apply,
3895 json,
3896 );
3897 }
3898 Commands::Propagate {
3899 frontier,
3900 retract,
3901 reduce_confidence,
3902 to,
3903 output,
3904 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3905 Commands::Replicate {
3906 frontier,
3907 target,
3908 outcome,
3909 by,
3910 conditions,
3911 source_title,
3912 doi,
3913 pmid,
3914 sample_size,
3915 note,
3916 previous_attempt,
3917 no_cascade,
3918 json,
3919 } => cmd_replicate(
3920 &frontier,
3921 &target,
3922 &outcome,
3923 &by,
3924 &conditions,
3925 &source_title,
3926 doi.as_deref(),
3927 pmid.as_deref(),
3928 sample_size.as_deref(),
3929 ¬e,
3930 previous_attempt.as_deref(),
3931 no_cascade,
3932 json,
3933 ),
3934 Commands::Replications {
3935 frontier,
3936 target,
3937 json,
3938 } => cmd_replications(&frontier, target.as_deref(), json),
3939 Commands::DatasetAdd {
3940 frontier,
3941 name,
3942 version,
3943 content_hash,
3944 url,
3945 license,
3946 source_title,
3947 doi,
3948 row_count,
3949 json,
3950 } => cmd_dataset_add(
3951 &frontier,
3952 &name,
3953 version.as_deref(),
3954 &content_hash,
3955 url.as_deref(),
3956 license.as_deref(),
3957 &source_title,
3958 doi.as_deref(),
3959 row_count,
3960 json,
3961 ),
3962 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
3963 Commands::CodeAdd {
3964 frontier,
3965 language,
3966 repo_url,
3967 commit,
3968 path,
3969 content_hash,
3970 line_start,
3971 line_end,
3972 entry_point,
3973 json,
3974 } => cmd_code_add(
3975 &frontier,
3976 &language,
3977 repo_url.as_deref(),
3978 commit.as_deref(),
3979 &path,
3980 &content_hash,
3981 line_start,
3982 line_end,
3983 entry_point.as_deref(),
3984 json,
3985 ),
3986 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
3987 Commands::ArtifactAdd {
3988 frontier,
3989 kind,
3990 name,
3991 file,
3992 url,
3993 content_hash,
3994 media_type,
3995 license,
3996 source_title,
3997 source_url,
3998 doi,
3999 target,
4000 metadata,
4001 access_tier,
4002 deposited_by,
4003 reason,
4004 json,
4005 } => cmd_artifact_add(
4006 &frontier,
4007 &kind,
4008 &name,
4009 file.as_deref(),
4010 url.as_deref(),
4011 content_hash.as_deref(),
4012 media_type.as_deref(),
4013 license.as_deref(),
4014 source_title.as_deref(),
4015 source_url.as_deref(),
4016 doi.as_deref(),
4017 target,
4018 metadata,
4019 &access_tier,
4020 &deposited_by,
4021 &reason,
4022 json,
4023 ),
4024 Commands::Artifacts {
4025 frontier,
4026 target,
4027 json,
4028 } => cmd_artifacts(&frontier, target.as_deref(), json),
4029 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
4030 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
4031 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
4032 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
4033 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
4034 Commands::ClinicalTrialImport {
4035 frontier,
4036 nct_id,
4037 input_json,
4038 target,
4039 deposited_by,
4040 reason,
4041 license,
4042 json,
4043 } => {
4044 cmd_clinical_trial_import(
4045 &frontier,
4046 &nct_id,
4047 input_json.as_deref(),
4048 target,
4049 &deposited_by,
4050 &reason,
4051 &license,
4052 json,
4053 )
4054 .await
4055 }
4056 Commands::NegativeResultAdd {
4057 frontier,
4058 kind,
4059 deposited_by,
4060 reason,
4061 conditions_text,
4062 notes,
4063 target,
4064 endpoint,
4065 intervention,
4066 comparator,
4067 population,
4068 n_enrolled,
4069 power,
4070 ci_lower,
4071 ci_upper,
4072 effect_size_threshold,
4073 registry_id,
4074 reagent,
4075 observation,
4076 attempts,
4077 source_title,
4078 doi,
4079 url,
4080 year,
4081 json,
4082 } => cmd_negative_result_add(
4083 &frontier,
4084 &kind,
4085 &deposited_by,
4086 &reason,
4087 &conditions_text,
4088 ¬es,
4089 target,
4090 endpoint.as_deref(),
4091 intervention.as_deref(),
4092 comparator.as_deref(),
4093 population.as_deref(),
4094 n_enrolled,
4095 power,
4096 ci_lower,
4097 ci_upper,
4098 effect_size_threshold,
4099 registry_id.as_deref(),
4100 reagent.as_deref(),
4101 observation.as_deref(),
4102 attempts,
4103 &source_title,
4104 doi.as_deref(),
4105 url.as_deref(),
4106 year,
4107 json,
4108 ),
4109 Commands::NegativeResults {
4110 frontier,
4111 target,
4112 json,
4113 } => cmd_negative_results(&frontier, target.as_deref(), json),
4114 Commands::TrajectoryCreate {
4115 frontier,
4116 deposited_by,
4117 reason,
4118 target,
4119 notes,
4120 json,
4121 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4122 Commands::TrajectoryStep {
4123 frontier,
4124 trajectory_id,
4125 kind,
4126 description,
4127 actor,
4128 reason,
4129 reference,
4130 json,
4131 } => cmd_trajectory_step(
4132 &frontier,
4133 &trajectory_id,
4134 &kind,
4135 &description,
4136 &actor,
4137 &reason,
4138 reference,
4139 json,
4140 ),
4141 Commands::Trajectories {
4142 frontier,
4143 target,
4144 json,
4145 } => cmd_trajectories(&frontier, target.as_deref(), json),
4146 Commands::TierSet {
4147 frontier,
4148 object_type,
4149 object_id,
4150 tier,
4151 actor,
4152 reason,
4153 json,
4154 } => cmd_tier_set(
4155 &frontier,
4156 &object_type,
4157 &object_id,
4158 &tier,
4159 &actor,
4160 &reason,
4161 json,
4162 ),
4163 Commands::Predict {
4164 frontier,
4165 by,
4166 claim,
4167 criterion,
4168 resolves_by,
4169 confidence,
4170 target,
4171 outcome,
4172 conditions,
4173 json,
4174 } => cmd_predict(
4175 &frontier,
4176 &by,
4177 &claim,
4178 &criterion,
4179 resolves_by.as_deref(),
4180 confidence,
4181 &target,
4182 &outcome,
4183 &conditions,
4184 json,
4185 ),
4186 Commands::Resolve {
4187 frontier,
4188 prediction,
4189 outcome,
4190 matched,
4191 by,
4192 confidence,
4193 source_title,
4194 doi,
4195 json,
4196 } => cmd_resolve(
4197 &frontier,
4198 &prediction,
4199 &outcome,
4200 matched,
4201 &by,
4202 confidence,
4203 &source_title,
4204 doi.as_deref(),
4205 json,
4206 ),
4207 Commands::Predictions {
4208 frontier,
4209 by,
4210 open,
4211 json,
4212 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4213 Commands::Calibration {
4214 frontier,
4215 actor,
4216 json,
4217 } => cmd_calibration(&frontier, actor.as_deref(), json),
4218 Commands::PredictionsExpire {
4219 frontier,
4220 now,
4221 dry_run,
4222 json,
4223 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4224 Commands::Consensus {
4225 frontier,
4226 target,
4227 weighting,
4228 causal_claim,
4229 causal_grade_min,
4230 json,
4231 } => cmd_consensus(
4232 &frontier,
4233 &target,
4234 &weighting,
4235 causal_claim.as_deref(),
4236 causal_grade_min.as_deref(),
4237 json,
4238 ),
4239
4240 Commands::Ingest {
4243 path,
4244 frontier,
4245 backend,
4246 actor,
4247 dry_run,
4248 json,
4249 } => {
4250 cmd_ingest(
4251 &path,
4252 &frontier,
4253 backend.as_deref(),
4254 actor.as_deref(),
4255 dry_run,
4256 json,
4257 )
4258 .await
4259 }
4260
4261 Commands::Propose {
4262 frontier,
4263 finding_id,
4264 status,
4265 reason,
4266 reviewer,
4267 apply,
4268 json,
4269 } => {
4270 let options = state::ReviewOptions {
4273 status: status.clone(),
4274 reason: reason.clone(),
4275 reviewer: reviewer.clone(),
4276 };
4277 let report = state::review_finding(&frontier, &finding_id, options, apply)
4278 .unwrap_or_else(|e| fail_return(&e));
4279 print_state_report(&report, json);
4280 }
4281
4282 Commands::Accept {
4283 frontier,
4284 proposal_id,
4285 reviewer,
4286 reason,
4287 json,
4288 } => {
4289 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4291 .unwrap_or_else(|e| fail_return(&e));
4292 let payload = json!({
4293 "ok": true,
4294 "command": "accept",
4295 "frontier": frontier.display().to_string(),
4296 "proposal_id": proposal_id,
4297 "reviewer": reviewer,
4298 "applied_event_id": event_id,
4299 });
4300 if json {
4301 println!(
4302 "{}",
4303 serde_json::to_string_pretty(&payload)
4304 .expect("failed to serialize accept response")
4305 );
4306 } else {
4307 println!(
4308 "{} accepted and applied proposal {}",
4309 style::ok("ok"),
4310 proposal_id
4311 );
4312 println!(" event: {}", event_id);
4313 }
4314 }
4315
4316 Commands::Attest {
4317 frontier,
4318 event,
4319 attester,
4320 scope_note,
4321 proof_id,
4322 signature,
4323 key,
4324 json,
4325 } => {
4326 if let Some(target_event_id) = event {
4330 let attester_id = attester.unwrap_or_else(|| {
4331 fail_return("attest: --attester is required in per-event mode")
4332 });
4333 let scope = scope_note.unwrap_or_else(|| {
4334 fail_return("attest: --scope-note is required in per-event mode")
4335 });
4336 let attestation_event_id = state::record_attestation(
4337 &frontier,
4338 &target_event_id,
4339 &attester_id,
4340 &scope,
4341 proof_id.as_deref(),
4342 signature.as_deref(),
4343 )
4344 .unwrap_or_else(|e| fail_return(&e));
4345 if json {
4346 let payload = json!({
4347 "ok": true,
4348 "command": "attest.event",
4349 "frontier": frontier.display().to_string(),
4350 "target_event_id": target_event_id,
4351 "attestation_event_id": attestation_event_id,
4352 "attester_id": attester_id,
4353 });
4354 println!(
4355 "{}",
4356 serde_json::to_string_pretty(&payload)
4357 .expect("failed to serialize attest.event response")
4358 );
4359 } else {
4360 println!(
4361 "{} attested {} by {} ({})",
4362 style::ok("ok"),
4363 target_event_id,
4364 attester_id,
4365 attestation_event_id
4366 );
4367 }
4368 return;
4369 }
4370 let key_path = key.unwrap_or_else(|| {
4372 fail_return(
4373 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
4374 )
4375 });
4376 let count =
4377 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
4378 let payload = json!({
4379 "ok": true,
4380 "command": "attest",
4381 "frontier": frontier.display().to_string(),
4382 "private_key": key_path.display().to_string(),
4383 "signed": count,
4384 });
4385 if json {
4386 println!(
4387 "{}",
4388 serde_json::to_string_pretty(&payload)
4389 .expect("failed to serialize attest response")
4390 );
4391 } else {
4392 println!(
4393 "{} {count} findings in {}",
4394 style::ok("attested"),
4395 frontier.display()
4396 );
4397 }
4398 }
4399
4400 Commands::Lineage {
4401 frontier,
4402 finding_id,
4403 as_of,
4404 json,
4405 } => {
4406 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4408 .unwrap_or_else(|e| fail_return(&e));
4409 if json {
4410 println!(
4411 "{}",
4412 serde_json::to_string_pretty(&payload)
4413 .expect("failed to serialize lineage response")
4414 );
4415 } else {
4416 print_history(&payload);
4417 }
4418 }
4419
4420 Commands::Carina { action } => cmd_carina(action),
4421
4422 Commands::Atlas { action } => cmd_atlas(action).await,
4423
4424 Commands::Constellation { action } => cmd_constellation(action).await,
4425 }
4426}
4427
4428async fn cmd_atlas(action: AtlasAction) {
4433 match action {
4434 AtlasAction::Init {
4435 name,
4436 frontiers,
4437 domain,
4438 scope_note,
4439 atlases_root,
4440 json,
4441 } => match ATLAS_INIT_HANDLER.get() {
4442 Some(handler) => {
4443 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
4444 }
4445 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
4446 },
4447 AtlasAction::Materialize {
4448 name,
4449 atlases_root,
4450 json,
4451 } => match ATLAS_MATERIALIZE_HANDLER.get() {
4452 Some(handler) => handler(atlases_root, name, json).await,
4453 None => fail("vela atlas materialize: handler not registered"),
4454 },
4455 AtlasAction::Serve {
4456 name,
4457 atlases_root,
4458 port,
4459 no_open,
4460 } => {
4461 match ATLAS_SERVE_HANDLER.get() {
4465 Some(handler) => handler(atlases_root, name, port, !no_open).await,
4466 None => fail("vela atlas serve: handler not registered"),
4467 }
4468 }
4469 AtlasAction::Update {
4470 name,
4471 add_frontier,
4472 remove_vfr_id,
4473 atlases_root,
4474 json,
4475 } => match ATLAS_UPDATE_HANDLER.get() {
4476 Some(handler) => {
4477 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
4478 }
4479 None => fail("vela atlas update: handler not registered"),
4480 },
4481 }
4482}
4483
4484async fn cmd_constellation(action: ConstellationAction) {
4488 match action {
4489 ConstellationAction::Init {
4490 name,
4491 atlases,
4492 scope_note,
4493 constellations_root,
4494 json,
4495 } => match CONSTELLATION_INIT_HANDLER.get() {
4496 Some(handler) => {
4497 handler(constellations_root, name, scope_note, atlases, json).await;
4498 }
4499 None => fail(
4500 "vela constellation init: handler not registered (built without vela-constellation)",
4501 ),
4502 },
4503 ConstellationAction::Materialize {
4504 name,
4505 constellations_root,
4506 json,
4507 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
4508 Some(handler) => handler(constellations_root, name, json).await,
4509 None => fail("vela constellation materialize: handler not registered"),
4510 },
4511 ConstellationAction::Serve {
4512 name,
4513 constellations_root,
4514 port,
4515 no_open,
4516 } => match CONSTELLATION_SERVE_HANDLER.get() {
4517 Some(handler) => handler(constellations_root, name, port, !no_open).await,
4518 None => fail("vela constellation serve: handler not registered"),
4519 },
4520 }
4521}
4522
4523fn cmd_carina(action: CarinaAction) {
4526 match action {
4527 CarinaAction::List { json } => {
4528 if json {
4529 println!(
4530 "{}",
4531 serde_json::to_string_pretty(&json!({
4532 "ok": true,
4533 "command": "carina.list",
4534 "primitives": carina_validate::PRIMITIVE_NAMES,
4535 }))
4536 .expect("failed to serialize carina.list")
4537 );
4538 } else {
4539 println!("Carina primitives bundled with this build:");
4540 for name in carina_validate::PRIMITIVE_NAMES {
4541 println!(" · {name}");
4542 }
4543 }
4544 }
4545 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4546 Some(text) => print!("{text}"),
4547 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4548 },
4549 CarinaAction::Validate {
4550 path,
4551 primitive,
4552 json,
4553 } => {
4554 let text = std::fs::read_to_string(&path)
4555 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4556 let value: Value = serde_json::from_str(&text)
4557 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4558 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4564 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4565 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4566 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4567 for (key, child) in primitives {
4568 let outcome = carina_validate::validate(key, child)
4569 .map(|()| carina_validate::detect_primitive(child));
4570 report.push((key.clone(), outcome));
4571 }
4572 } else {
4573 let outcome = match primitive.as_deref() {
4574 Some(name) => carina_validate::validate(name, &value).map(|()| {
4575 carina_validate::PRIMITIVE_NAMES
4576 .iter()
4577 .copied()
4578 .find(|p| *p == name)
4579 }),
4580 None => carina_validate::validate_auto(&value).map(Some),
4581 };
4582 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4583 report.push((label, outcome));
4584 }
4585
4586 let total = report.len();
4587 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4588 let fail = total - pass;
4589
4590 if json {
4591 let entries: Vec<Value> = report
4592 .iter()
4593 .map(|(label, r)| match r {
4594 Ok(name) => json!({
4595 "key": label,
4596 "primitive": name,
4597 "ok": true,
4598 }),
4599 Err(errs) => json!({
4600 "key": label,
4601 "ok": false,
4602 "errors": errs,
4603 }),
4604 })
4605 .collect();
4606 println!(
4607 "{}",
4608 serde_json::to_string_pretty(&json!({
4609 "ok": fail == 0,
4610 "command": "carina.validate",
4611 "file": path.display().to_string(),
4612 "total": total,
4613 "passed": pass,
4614 "failed": fail,
4615 "entries": entries,
4616 }))
4617 .expect("failed to serialize carina.validate")
4618 );
4619 } else {
4620 for (label, r) in &report {
4621 match r {
4622 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4623 Ok(None) => println!(" {} {label}", style::ok("ok")),
4624 Err(errs) => {
4625 println!(" {} {label}", style::lost("fail"));
4626 for e in errs {
4627 println!(" {e}");
4628 }
4629 }
4630 }
4631 }
4632 println!();
4633 if fail == 0 {
4634 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4635 } else {
4636 println!(
4637 "{} {pass}/{total} valid · {fail} failed",
4638 style::lost("carina.validate")
4639 );
4640 }
4641 }
4642
4643 if fail > 0 {
4644 std::process::exit(1);
4645 }
4646 }
4647 }
4648}
4649
4650fn cmd_consensus(
4653 frontier: &Path,
4654 target: &str,
4655 weighting_str: &str,
4656 causal_claim: Option<&str>,
4657 causal_grade_min: Option<&str>,
4658 json: bool,
4659) {
4660 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4661
4662 if !target.starts_with("vf_") {
4663 fail(&format!("target `{target}` is not a vf_ finding id"));
4664 }
4665 let scheme =
4666 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4667
4668 let parsed_claim = match causal_claim {
4669 None => None,
4670 Some("correlation") => Some(CausalClaim::Correlation),
4671 Some("mediation") => Some(CausalClaim::Mediation),
4672 Some("intervention") => Some(CausalClaim::Intervention),
4673 Some(other) => fail_return(&format!(
4674 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4675 )),
4676 };
4677 let parsed_grade = match causal_grade_min {
4678 None => None,
4679 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4680 Some("observational") => Some(CausalEvidenceGrade::Observational),
4681 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4682 Some("rct") => Some(CausalEvidenceGrade::Rct),
4683 Some(other) => fail_return(&format!(
4684 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4685 )),
4686 };
4687 let filter = crate::aggregate::AggregateFilter {
4688 causal_claim: parsed_claim,
4689 causal_grade_min: parsed_grade,
4690 };
4691 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4692
4693 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4694 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4695
4696 if json {
4697 println!(
4698 "{}",
4699 serde_json::to_string_pretty(&result).expect("serialize consensus")
4700 );
4701 return;
4702 }
4703
4704 println!();
4705 println!(
4706 " {}",
4707 format!(
4708 "VELA · CONSENSUS · {} ({})",
4709 result.target, result.weighting
4710 )
4711 .to_uppercase()
4712 .dimmed()
4713 );
4714 println!(" {}", style::tick_row(60));
4715 println!(
4716 " target: {}",
4717 truncate(&result.target_assertion, 80)
4718 );
4719 println!(" similar findings: {}", result.n_findings);
4720 println!(
4721 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
4722 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
4723 );
4724 println!();
4725 println!(" constituents (sorted by weight):");
4726 let mut sorted = result.constituents.clone();
4727 sorted.sort_by(|a, b| {
4728 b.weight
4729 .partial_cmp(&a.weight)
4730 .unwrap_or(std::cmp::Ordering::Equal)
4731 });
4732 for c in sorted.iter().take(10) {
4733 let repls = if c.n_replications > 0 {
4734 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
4735 } else {
4736 String::new()
4737 };
4738 println!(
4739 " · w={:.2} raw={:.2} adj={:.2}{}",
4740 c.weight, c.raw_score, c.adjusted_score, repls
4741 );
4742 println!(" {}", truncate(&c.assertion_text, 88));
4743 }
4744 if result.constituents.len() > 10 {
4745 println!(" ... ({} more)", result.constituents.len() - 10);
4746 }
4747}
4748
4749fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
4755 let trimmed = s.trim();
4756 if trimmed.eq_ignore_ascii_case("affirmed") {
4757 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
4758 }
4759 if trimmed.eq_ignore_ascii_case("falsified") {
4760 return Ok(crate::bundle::ExpectedOutcome::Falsified);
4761 }
4762 if let Some(rest) = trimmed.strip_prefix("cat:") {
4763 return Ok(crate::bundle::ExpectedOutcome::Categorical {
4764 value: rest.to_string(),
4765 });
4766 }
4767 if let Some(rest) = trimmed.strip_prefix("quant:") {
4768 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
4769 let (val_s, tol_s) = vt
4770 .split_once('±')
4771 .or_else(|| vt.split_once("+/-"))
4772 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
4773 let value: f64 = val_s
4774 .parse()
4775 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
4776 let tolerance: f64 = tol_s
4777 .parse()
4778 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
4779 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
4780 value,
4781 tolerance,
4782 units: units.to_string(),
4783 });
4784 }
4785 Err(format!(
4786 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
4787 ))
4788}
4789
4790#[allow(clippy::too_many_arguments)]
4792fn cmd_predict(
4793 frontier: &Path,
4794 by: &str,
4795 claim: &str,
4796 criterion: &str,
4797 resolves_by: Option<&str>,
4798 confidence: f64,
4799 target_csv: &str,
4800 outcome: &str,
4801 conditions_text: &str,
4802 json: bool,
4803) {
4804 if !(0.0..=1.0).contains(&confidence) {
4805 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4806 }
4807 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
4808
4809 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4810
4811 let targets: Vec<String> = target_csv
4812 .split(',')
4813 .map(|s| s.trim().to_string())
4814 .filter(|s| !s.is_empty())
4815 .collect();
4816 for t in &targets {
4817 if !t.starts_with("vf_") {
4818 fail(&format!("target `{t}` is not a vf_ id"));
4819 }
4820 if !project.findings.iter().any(|f| f.id == *t) {
4821 fail(&format!("target `{t}` not present in frontier"));
4822 }
4823 }
4824
4825 let lower = conditions_text.to_lowercase();
4826 let conditions = crate::bundle::Conditions {
4827 text: conditions_text.to_string(),
4828 species_verified: Vec::new(),
4829 species_unverified: Vec::new(),
4830 in_vitro: lower.contains("in vitro"),
4831 in_vivo: lower.contains("in vivo"),
4832 human_data: lower.contains("human") || lower.contains("clinical"),
4833 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
4834 concentration_range: None,
4835 duration: None,
4836 age_group: None,
4837 cell_type: None,
4838 };
4839
4840 let prediction = crate::bundle::Prediction::new(
4841 claim.to_string(),
4842 targets,
4843 None,
4844 resolves_by.map(|s| s.to_string()),
4845 criterion.to_string(),
4846 expected,
4847 by.to_string(),
4848 confidence,
4849 conditions,
4850 );
4851
4852 if project.predictions.iter().any(|p| p.id == prediction.id) {
4853 if json {
4854 println!(
4855 "{}",
4856 serde_json::to_string_pretty(&json!({
4857 "ok": false,
4858 "command": "predict",
4859 "reason": "prediction_already_exists",
4860 "id": prediction.id,
4861 }))
4862 .expect("serialize")
4863 );
4864 } else {
4865 println!(
4866 "{} prediction {} already exists in {}; skipping.",
4867 style::warn("predict"),
4868 prediction.id,
4869 frontier.display()
4870 );
4871 }
4872 return;
4873 }
4874
4875 let new_id = prediction.id.clone();
4876 project.predictions.push(prediction);
4877 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4878
4879 if json {
4880 println!(
4881 "{}",
4882 serde_json::to_string_pretty(&json!({
4883 "ok": true,
4884 "command": "predict",
4885 "id": new_id,
4886 "made_by": by,
4887 "confidence": confidence,
4888 "frontier": frontier.display().to_string(),
4889 }))
4890 .expect("serialize predict result")
4891 );
4892 } else {
4893 println!();
4894 println!(
4895 " {}",
4896 format!("VELA · PREDICT · {}", new_id)
4897 .to_uppercase()
4898 .dimmed()
4899 );
4900 println!(" {}", style::tick_row(60));
4901 println!(" by: {by}");
4902 println!(" confidence: {confidence:.3}");
4903 if let Some(d) = resolves_by {
4904 println!(" resolves by: {d}");
4905 }
4906 println!(" outcome: {outcome}");
4907 println!(" claim: {}", truncate(claim, 88));
4908 println!();
4909 println!(
4910 " {} prediction recorded in {}",
4911 style::ok("ok"),
4912 frontier.display()
4913 );
4914 }
4915}
4916
4917#[allow(clippy::too_many_arguments)]
4919fn cmd_resolve(
4920 frontier: &Path,
4921 prediction_id: &str,
4922 actual_outcome: &str,
4923 matched: bool,
4924 by: &str,
4925 confidence: f64,
4926 source_title: &str,
4927 doi: Option<&str>,
4928 json: bool,
4929) {
4930 if !prediction_id.starts_with("vpred_") {
4931 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
4932 }
4933 if !(0.0..=1.0).contains(&confidence) {
4934 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4935 }
4936 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4937 if !project.predictions.iter().any(|p| p.id == prediction_id) {
4938 fail(&format!(
4939 "prediction `{prediction_id}` not present in frontier"
4940 ));
4941 }
4942
4943 let evidence = crate::bundle::Evidence {
4944 evidence_type: "experimental".to_string(),
4945 model_system: String::new(),
4946 species: None,
4947 method: "prediction_resolution".to_string(),
4948 sample_size: None,
4949 effect_size: None,
4950 p_value: None,
4951 replicated: false,
4952 replication_count: None,
4953 evidence_spans: if source_title.is_empty() {
4954 Vec::new()
4955 } else {
4956 vec![serde_json::json!({"text": source_title})]
4957 },
4958 };
4959
4960 let _ = doi; let resolution = crate::bundle::Resolution::new(
4967 prediction_id.to_string(),
4968 actual_outcome.to_string(),
4969 matched,
4970 by.to_string(),
4971 evidence,
4972 confidence,
4973 );
4974
4975 if project.resolutions.iter().any(|r| r.id == resolution.id) {
4976 if json {
4977 println!(
4978 "{}",
4979 serde_json::to_string_pretty(&json!({
4980 "ok": false,
4981 "command": "resolve",
4982 "reason": "resolution_already_exists",
4983 "id": resolution.id,
4984 }))
4985 .expect("serialize")
4986 );
4987 } else {
4988 println!(
4989 "{} resolution {} already exists in {}; skipping.",
4990 style::warn("resolve"),
4991 resolution.id,
4992 frontier.display()
4993 );
4994 }
4995 return;
4996 }
4997
4998 let new_id = resolution.id.clone();
4999 project.resolutions.push(resolution);
5000 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5001
5002 if json {
5003 println!(
5004 "{}",
5005 serde_json::to_string_pretty(&json!({
5006 "ok": true,
5007 "command": "resolve",
5008 "id": new_id,
5009 "prediction": prediction_id,
5010 "matched": matched,
5011 "frontier": frontier.display().to_string(),
5012 }))
5013 .expect("serialize resolve result")
5014 );
5015 } else {
5016 println!();
5017 println!(
5018 " {}",
5019 format!("VELA · RESOLVE · {}", new_id)
5020 .to_uppercase()
5021 .dimmed()
5022 );
5023 println!(" {}", style::tick_row(60));
5024 println!(" prediction: {prediction_id}");
5025 println!(
5026 " matched: {}",
5027 if matched {
5028 style::ok("yes")
5029 } else {
5030 style::lost("no")
5031 }
5032 );
5033 println!(" by: {by}");
5034 println!(" outcome: {}", truncate(actual_outcome, 80));
5035 println!();
5036 println!(
5037 " {} resolution recorded in {}",
5038 style::ok("ok"),
5039 frontier.display()
5040 );
5041 }
5042}
5043
5044fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
5046 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5047
5048 let resolved_ids: std::collections::HashSet<&str> = project
5049 .resolutions
5050 .iter()
5051 .map(|r| r.prediction_id.as_str())
5052 .collect();
5053
5054 let mut filtered: Vec<&crate::bundle::Prediction> = project
5055 .predictions
5056 .iter()
5057 .filter(|p| by.is_none_or(|b| p.made_by == b))
5058 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
5059 .collect();
5060 filtered.sort_by(|a, b| {
5061 a.resolves_by
5062 .as_deref()
5063 .unwrap_or("9999")
5064 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
5065 });
5066
5067 if json {
5068 let payload: Vec<serde_json::Value> = filtered
5069 .iter()
5070 .map(|p| {
5071 json!({
5072 "id": p.id,
5073 "claim_text": p.claim_text,
5074 "made_by": p.made_by,
5075 "confidence": p.confidence,
5076 "predicted_at": p.predicted_at,
5077 "resolves_by": p.resolves_by,
5078 "expected_outcome": p.expected_outcome,
5079 "resolved": resolved_ids.contains(p.id.as_str()),
5080 })
5081 })
5082 .collect();
5083 println!(
5084 "{}",
5085 serde_json::to_string_pretty(&json!({
5086 "ok": true,
5087 "command": "predictions",
5088 "frontier": frontier.display().to_string(),
5089 "count": payload.len(),
5090 "predictions": payload,
5091 }))
5092 .expect("serialize predictions")
5093 );
5094 return;
5095 }
5096
5097 println!();
5098 println!(
5099 " {}",
5100 format!("VELA · PREDICTIONS · {}", frontier.display())
5101 .to_uppercase()
5102 .dimmed()
5103 );
5104 println!(" {}", style::tick_row(60));
5105 if filtered.is_empty() {
5106 println!(" (no predictions matching filters)");
5107 return;
5108 }
5109 for p in &filtered {
5110 let resolved = resolved_ids.contains(p.id.as_str());
5111 let chip = if resolved {
5112 style::ok("resolved")
5113 } else {
5114 style::warn("open")
5115 };
5116 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
5117 println!(
5118 " · {} {} by {} → {}",
5119 p.id.dimmed(),
5120 chip,
5121 p.made_by,
5122 deadline,
5123 );
5124 println!(" claim: {}", truncate(&p.claim_text, 90));
5125 println!(" confidence: {:.2}", p.confidence);
5126 }
5127}
5128
5129fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
5134 use chrono::DateTime;
5135
5136 let now_dt = match now_override {
5137 Some(s) => DateTime::parse_from_rfc3339(s)
5138 .map(|dt| dt.with_timezone(&chrono::Utc))
5139 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
5140 None => chrono::Utc::now(),
5141 };
5142
5143 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5144 if dry_run {
5145 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5147 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
5148 if json {
5149 println!(
5150 "{}",
5151 serde_json::to_string_pretty(&json!({
5152 "ok": true,
5153 "command": "predictions.expire",
5154 "dry_run": true,
5155 "report": report,
5156 }))
5157 .expect("serialize predictions.expire (dry-run)")
5158 );
5159 } else {
5160 println!(
5161 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
5162 style::ok("ok"),
5163 report.now,
5164 report.newly_expired.len(),
5165 report.already_expired.len(),
5166 report.already_resolved.len(),
5167 report.still_open.len(),
5168 );
5169 for id in &report.newly_expired {
5170 println!(" · {id}");
5171 }
5172 }
5173 return;
5174 }
5175
5176 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
5177 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5178
5179 if json {
5180 println!(
5181 "{}",
5182 serde_json::to_string_pretty(&json!({
5183 "ok": true,
5184 "command": "predictions.expire",
5185 "report": report,
5186 }))
5187 .expect("serialize predictions.expire")
5188 );
5189 } else {
5190 println!(
5191 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
5192 style::ok("expired"),
5193 report.now,
5194 report.newly_expired.len(),
5195 report.already_expired.len(),
5196 report.already_resolved.len(),
5197 report.still_open.len(),
5198 );
5199 for id in &report.newly_expired {
5200 println!(" · {id}");
5201 }
5202 }
5203}
5204
5205fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
5206 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5207 let records = match actor {
5208 Some(a) => {
5209 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
5210 .map(|r| vec![r])
5211 .unwrap_or_default()
5212 }
5213 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
5214 };
5215
5216 if json {
5217 println!(
5218 "{}",
5219 serde_json::to_string_pretty(&json!({
5220 "ok": true,
5221 "command": "calibration",
5222 "frontier": frontier.display().to_string(),
5223 "filter_actor": actor,
5224 "records": records,
5225 }))
5226 .expect("serialize calibration")
5227 );
5228 return;
5229 }
5230
5231 println!();
5232 println!(
5233 " {}",
5234 format!("VELA · CALIBRATION · {}", frontier.display())
5235 .to_uppercase()
5236 .dimmed()
5237 );
5238 println!(" {}", style::tick_row(60));
5239 if records.is_empty() {
5240 println!(" (no calibration records)");
5241 return;
5242 }
5243 for r in &records {
5244 println!(" · {}", r.actor);
5245 println!(
5246 " predictions: {} resolved: {} hits: {}",
5247 r.n_predictions, r.n_resolved, r.n_hit
5248 );
5249 match r.hit_rate {
5250 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
5251 None => println!(" hit rate: n/a"),
5252 }
5253 match r.brier_score {
5254 Some(b) => println!(
5255 " brier: {:.4} (lower is better; 0.25 = chance)",
5256 b
5257 ),
5258 None => println!(" brier: n/a"),
5259 }
5260 match r.log_score {
5261 Some(l) => println!(
5262 " log score: {:.4} (higher is better; 0 = perfect)",
5263 l
5264 ),
5265 None => println!(" log score: n/a"),
5266 }
5267 }
5268}
5269
5270#[allow(clippy::too_many_arguments)]
5272fn cmd_dataset_add(
5273 frontier: &Path,
5274 name: &str,
5275 version: Option<&str>,
5276 content_hash: &str,
5277 url: Option<&str>,
5278 license: Option<&str>,
5279 source_title: &str,
5280 doi: Option<&str>,
5281 row_count: Option<u64>,
5282 json: bool,
5283) {
5284 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5285
5286 let provenance = crate::bundle::Provenance {
5287 source_type: "data_release".to_string(),
5288 doi: doi.map(|s| s.to_string()),
5289 pmid: None,
5290 pmc: None,
5291 openalex_id: None,
5292 url: url.map(|s| s.to_string()),
5293 title: source_title.to_string(),
5294 authors: Vec::new(),
5295 year: None,
5296 journal: None,
5297 license: license.map(|s| s.to_string()),
5298 publisher: None,
5299 funders: Vec::new(),
5300 extraction: crate::bundle::Extraction {
5301 method: "manual_curation".to_string(),
5302 model: None,
5303 model_version: None,
5304 extracted_at: chrono::Utc::now().to_rfc3339(),
5305 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5306 },
5307 review: None,
5308 citation_count: None,
5309 };
5310
5311 let mut dataset = crate::bundle::Dataset::new(
5312 name.to_string(),
5313 version.map(|s| s.to_string()),
5314 content_hash.to_string(),
5315 url.map(|s| s.to_string()),
5316 license.map(|s| s.to_string()),
5317 provenance,
5318 );
5319 dataset.row_count = row_count;
5320
5321 if project.datasets.iter().any(|d| d.id == dataset.id) {
5322 if json {
5323 println!(
5324 "{}",
5325 serde_json::to_string_pretty(&json!({
5326 "ok": false,
5327 "command": "dataset.add",
5328 "reason": "dataset_already_exists",
5329 "id": dataset.id,
5330 }))
5331 .expect("serialize")
5332 );
5333 } else {
5334 println!(
5335 "{} dataset {} already exists in {}; skipping.",
5336 style::warn("dataset"),
5337 dataset.id,
5338 frontier.display()
5339 );
5340 }
5341 return;
5342 }
5343
5344 let new_id = dataset.id.clone();
5345 project.datasets.push(dataset);
5346 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5347
5348 if json {
5349 println!(
5350 "{}",
5351 serde_json::to_string_pretty(&json!({
5352 "ok": true,
5353 "command": "dataset.add",
5354 "id": new_id,
5355 "name": name,
5356 "version": version,
5357 "frontier": frontier.display().to_string(),
5358 }))
5359 .expect("failed to serialize dataset.add result")
5360 );
5361 } else {
5362 println!();
5363 println!(
5364 " {}",
5365 format!("VELA · DATASET · {}", new_id)
5366 .to_uppercase()
5367 .dimmed()
5368 );
5369 println!(" {}", style::tick_row(60));
5370 println!(" name: {name}");
5371 if let Some(v) = version {
5372 println!(" version: {v}");
5373 }
5374 println!(" content_hash: {content_hash}");
5375 if let Some(u) = url {
5376 println!(" url: {u}");
5377 }
5378 println!(" source: {source_title}");
5379 println!();
5380 println!(
5381 " {} dataset recorded in {}",
5382 style::ok("ok"),
5383 frontier.display()
5384 );
5385 }
5386}
5387
5388#[allow(clippy::too_many_arguments)]
5394fn cmd_negative_result_add(
5395 frontier: &Path,
5396 kind: &str,
5397 deposited_by: &str,
5398 reason: &str,
5399 conditions_text: &str,
5400 notes: &str,
5401 targets: Vec<String>,
5402 endpoint: Option<&str>,
5403 intervention: Option<&str>,
5404 comparator: Option<&str>,
5405 population: Option<&str>,
5406 n_enrolled: Option<u32>,
5407 power: Option<f64>,
5408 ci_lower: Option<f64>,
5409 ci_upper: Option<f64>,
5410 effect_size_threshold: Option<f64>,
5411 registry_id: Option<&str>,
5412 reagent: Option<&str>,
5413 observation: Option<&str>,
5414 attempts: Option<u32>,
5415 source_title: &str,
5416 doi: Option<&str>,
5417 url: Option<&str>,
5418 year: Option<i32>,
5419 json: bool,
5420) {
5421 let nr_kind = match kind {
5422 "registered_trial" => {
5423 let endpoint =
5424 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
5425 let intervention = intervention
5426 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
5427 let comparator = comparator
5428 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
5429 let population = population
5430 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
5431 let n_enrolled = n_enrolled
5432 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
5433 let power =
5434 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
5435 let ci_lower =
5436 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
5437 let ci_upper =
5438 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
5439 crate::bundle::NegativeResultKind::RegisteredTrial {
5440 endpoint: endpoint.to_string(),
5441 intervention: intervention.to_string(),
5442 comparator: comparator.to_string(),
5443 population: population.to_string(),
5444 n_enrolled,
5445 power,
5446 effect_size_ci: (ci_lower, ci_upper),
5447 effect_size_threshold,
5448 registry_id: registry_id.map(|s| s.to_string()),
5449 }
5450 }
5451 "exploratory" => {
5452 let reagent =
5453 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
5454 let observation = observation
5455 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
5456 let attempts =
5457 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
5458 crate::bundle::NegativeResultKind::Exploratory {
5459 reagent: reagent.to_string(),
5460 observation: observation.to_string(),
5461 attempts,
5462 }
5463 }
5464 other => fail_return(&format!(
5465 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
5466 )),
5467 };
5468
5469 let conditions = crate::bundle::Conditions {
5470 text: conditions_text.to_string(),
5471 species_verified: Vec::new(),
5472 species_unverified: Vec::new(),
5473 in_vitro: false,
5474 in_vivo: false,
5475 human_data: false,
5476 clinical_trial: matches!(kind, "registered_trial"),
5477 concentration_range: None,
5478 duration: None,
5479 age_group: None,
5480 cell_type: None,
5481 };
5482
5483 let provenance = crate::bundle::Provenance {
5484 source_type: if matches!(kind, "registered_trial") {
5485 "clinical_trial".to_string()
5486 } else {
5487 "lab_notebook".to_string()
5488 },
5489 doi: doi.map(|s| s.to_string()),
5490 pmid: None,
5491 pmc: None,
5492 openalex_id: None,
5493 url: url.map(|s| s.to_string()),
5494 title: source_title.to_string(),
5495 authors: Vec::new(),
5496 year,
5497 journal: None,
5498 license: None,
5499 publisher: None,
5500 funders: Vec::new(),
5501 extraction: crate::bundle::Extraction {
5502 method: "manual_curation".to_string(),
5503 model: None,
5504 model_version: None,
5505 extracted_at: chrono::Utc::now().to_rfc3339(),
5506 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5507 },
5508 review: None,
5509 citation_count: None,
5510 };
5511
5512 let report = state::add_negative_result(
5513 frontier,
5514 nr_kind,
5515 targets,
5516 deposited_by,
5517 conditions,
5518 provenance,
5519 notes,
5520 reason,
5521 )
5522 .unwrap_or_else(|e| fail_return(&e));
5523
5524 if json {
5525 println!(
5526 "{}",
5527 serde_json::to_string_pretty(&report).expect("serialize report")
5528 );
5529 } else {
5530 println!();
5531 println!(
5532 " {}",
5533 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5534 .to_uppercase()
5535 .dimmed()
5536 );
5537 println!(" {}", style::tick_row(60));
5538 println!(" kind: {kind}");
5539 println!(" deposited_by: {deposited_by}");
5540 if let Some(ev) = &report.applied_event_id {
5541 println!(" event: {ev}");
5542 }
5543 println!(
5544 " {} negative_result deposited in {}",
5545 style::ok("ok"),
5546 frontier.display()
5547 );
5548 }
5549}
5550
5551fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5554 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5555 let filtered: Vec<&crate::bundle::NegativeResult> = project
5556 .negative_results
5557 .iter()
5558 .filter(|nr| {
5559 target
5560 .map(|t| nr.target_findings.iter().any(|f| f == t))
5561 .unwrap_or(true)
5562 })
5563 .collect();
5564
5565 if json {
5566 println!(
5567 "{}",
5568 serde_json::to_string_pretty(&json!({
5569 "ok": true,
5570 "command": "negative_results",
5571 "frontier": frontier.display().to_string(),
5572 "count": filtered.len(),
5573 "negative_results": filtered,
5574 }))
5575 .expect("serialize negative_results")
5576 );
5577 return;
5578 }
5579
5580 if filtered.is_empty() {
5581 println!(" no negative_results in {}", frontier.display());
5582 return;
5583 }
5584
5585 println!();
5586 println!(
5587 " {} ({})",
5588 "VELA · NEGATIVE RESULTS".dimmed(),
5589 filtered.len()
5590 );
5591 println!(" {}", style::tick_row(60));
5592 for nr in &filtered {
5593 let kind_label = match &nr.kind {
5594 crate::bundle::NegativeResultKind::RegisteredTrial {
5595 endpoint, power, ..
5596 } => format!("trial · {endpoint} · power {power:.2}"),
5597 crate::bundle::NegativeResultKind::Exploratory {
5598 reagent, attempts, ..
5599 } => format!("exploratory · {reagent} · {attempts} attempts"),
5600 };
5601 let retracted = if nr.retracted { " [retracted]" } else { "" };
5602 let review = nr
5603 .review_state
5604 .as_ref()
5605 .map(|s| format!(" [{s:?}]"))
5606 .unwrap_or_default();
5607 println!(" {}{}{}", nr.id, retracted, review);
5608 println!(" {kind_label}");
5609 if !nr.target_findings.is_empty() {
5610 println!(" targets: {}", nr.target_findings.join(", "));
5611 }
5612 }
5613 println!();
5614}
5615
5616#[allow(clippy::too_many_arguments)]
5618fn cmd_tier_set(
5619 frontier: &Path,
5620 object_type: &str,
5621 object_id: &str,
5622 tier: &str,
5623 actor: &str,
5624 reason: &str,
5625 json: bool,
5626) {
5627 let parsed_tier =
5628 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5629 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5630 .unwrap_or_else(|e| fail_return(&e));
5631
5632 if json {
5633 println!(
5634 "{}",
5635 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5636 );
5637 } else {
5638 println!();
5639 println!(
5640 " {}",
5641 format!("VELA · TIER · {}", object_id)
5642 .to_uppercase()
5643 .dimmed()
5644 );
5645 println!(" {}", style::tick_row(60));
5646 println!(" object_type: {object_type}");
5647 println!(" new_tier: {}", parsed_tier.canonical());
5648 println!(" actor: {actor}");
5649 if let Some(ev) = &report.applied_event_id {
5650 println!(" event: {ev}");
5651 }
5652 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5653 }
5654}
5655
5656#[allow(clippy::too_many_arguments)]
5658fn cmd_trajectory_create(
5659 frontier: &Path,
5660 deposited_by: &str,
5661 reason: &str,
5662 targets: Vec<String>,
5663 notes: &str,
5664 json: bool,
5665) {
5666 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5667 .unwrap_or_else(|e| fail_return(&e));
5668
5669 if json {
5670 println!(
5671 "{}",
5672 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5673 );
5674 } else {
5675 println!();
5676 println!(
5677 " {}",
5678 format!("VELA · TRAJECTORY · {}", report.finding_id)
5679 .to_uppercase()
5680 .dimmed()
5681 );
5682 println!(" {}", style::tick_row(60));
5683 println!(" deposited_by: {deposited_by}");
5684 if let Some(ev) = &report.applied_event_id {
5685 println!(" event: {ev}");
5686 }
5687 println!(
5688 " {} trajectory opened in {}",
5689 style::ok("ok"),
5690 frontier.display()
5691 );
5692 }
5693}
5694
5695#[allow(clippy::too_many_arguments)]
5697fn cmd_trajectory_step(
5698 frontier: &Path,
5699 trajectory_id: &str,
5700 kind: &str,
5701 description: &str,
5702 actor: &str,
5703 reason: &str,
5704 references: Vec<String>,
5705 json: bool,
5706) {
5707 let parsed_kind = match kind {
5708 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5709 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5710 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5711 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5712 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5713 other => fail_return(&format!(
5714 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5715 )),
5716 };
5717 let report = state::append_trajectory_step(
5718 frontier,
5719 trajectory_id,
5720 parsed_kind,
5721 description,
5722 actor,
5723 references,
5724 reason,
5725 )
5726 .unwrap_or_else(|e| fail_return(&e));
5727
5728 if json {
5729 println!(
5730 "{}",
5731 serde_json::to_string_pretty(&report).expect("serialize step report")
5732 );
5733 } else {
5734 println!();
5735 println!(
5736 " {}",
5737 format!("VELA · STEP · {}", report.finding_id)
5738 .to_uppercase()
5739 .dimmed()
5740 );
5741 println!(" {}", style::tick_row(60));
5742 println!(" trajectory: {trajectory_id}");
5743 println!(" kind: {kind}");
5744 println!(" actor: {actor}");
5745 println!(
5746 " {} step appended in {}",
5747 style::ok("ok"),
5748 frontier.display()
5749 );
5750 }
5751}
5752
5753fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
5755 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5756 let filtered: Vec<&crate::bundle::Trajectory> = project
5757 .trajectories
5758 .iter()
5759 .filter(|t| {
5760 target
5761 .map(|tg| t.target_findings.iter().any(|f| f == tg))
5762 .unwrap_or(true)
5763 })
5764 .collect();
5765
5766 if json {
5767 println!(
5768 "{}",
5769 serde_json::to_string_pretty(&json!({
5770 "ok": true,
5771 "command": "trajectories",
5772 "frontier": frontier.display().to_string(),
5773 "count": filtered.len(),
5774 "trajectories": filtered,
5775 }))
5776 .expect("serialize trajectories")
5777 );
5778 return;
5779 }
5780
5781 if filtered.is_empty() {
5782 println!(" no trajectories in {}", frontier.display());
5783 return;
5784 }
5785
5786 println!();
5787 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
5788 println!(" {}", style::tick_row(60));
5789 for t in &filtered {
5790 let retracted = if t.retracted { " [retracted]" } else { "" };
5791 let review = t
5792 .review_state
5793 .as_ref()
5794 .map(|s| format!(" [{s:?}]"))
5795 .unwrap_or_default();
5796 println!(" {}{}{}", t.id, retracted, review);
5797 println!(
5798 " {} step(s){}",
5799 t.steps.len(),
5800 if t.target_findings.is_empty() {
5801 String::new()
5802 } else {
5803 format!(" · targets: {}", t.target_findings.join(", "))
5804 }
5805 );
5806 for step in &t.steps {
5807 let label = match step.kind {
5808 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
5809 crate::bundle::TrajectoryStepKind::Tried => "tried",
5810 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
5811 crate::bundle::TrajectoryStepKind::Observed => "observed",
5812 crate::bundle::TrajectoryStepKind::Refined => "refined",
5813 };
5814 let preview: String = step.description.chars().take(80).collect();
5815 println!(" [{label}] {preview}");
5816 }
5817 }
5818 println!();
5819}
5820
5821fn cmd_datasets(frontier: &Path, json: bool) {
5823 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5824 if json {
5825 println!(
5826 "{}",
5827 serde_json::to_string_pretty(&json!({
5828 "ok": true,
5829 "command": "datasets",
5830 "frontier": frontier.display().to_string(),
5831 "count": project.datasets.len(),
5832 "datasets": project.datasets,
5833 }))
5834 .expect("serialize datasets")
5835 );
5836 return;
5837 }
5838 println!();
5839 println!(
5840 " {}",
5841 format!("VELA · DATASETS · {}", frontier.display())
5842 .to_uppercase()
5843 .dimmed()
5844 );
5845 println!(" {}", style::tick_row(60));
5846 if project.datasets.is_empty() {
5847 println!(" (no datasets registered)");
5848 return;
5849 }
5850 for ds in &project.datasets {
5851 let v = ds
5852 .version
5853 .as_deref()
5854 .map(|s| format!("@{s}"))
5855 .unwrap_or_default();
5856 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
5857 if let Some(u) = &ds.url {
5858 println!(" url: {}", truncate(u, 80));
5859 }
5860 println!(" hash: {}", truncate(&ds.content_hash, 80));
5861 }
5862}
5863
5864#[allow(clippy::too_many_arguments)]
5866fn cmd_code_add(
5867 frontier: &Path,
5868 language: &str,
5869 repo_url: Option<&str>,
5870 commit: Option<&str>,
5871 path: &str,
5872 content_hash: &str,
5873 line_start: Option<u32>,
5874 line_end: Option<u32>,
5875 entry_point: Option<&str>,
5876 json: bool,
5877) {
5878 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5879
5880 let line_range = match (line_start, line_end) {
5881 (Some(a), Some(b)) => Some((a, b)),
5882 (Some(a), None) => Some((a, a)),
5883 _ => None,
5884 };
5885
5886 let artifact = crate::bundle::CodeArtifact::new(
5887 language.to_string(),
5888 repo_url.map(|s| s.to_string()),
5889 commit.map(|s| s.to_string()),
5890 path.to_string(),
5891 line_range,
5892 content_hash.to_string(),
5893 entry_point.map(|s| s.to_string()),
5894 );
5895
5896 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
5897 if json {
5898 println!(
5899 "{}",
5900 serde_json::to_string_pretty(&json!({
5901 "ok": false,
5902 "command": "code.add",
5903 "reason": "artifact_already_exists",
5904 "id": artifact.id,
5905 }))
5906 .expect("serialize")
5907 );
5908 } else {
5909 println!(
5910 "{} code artifact {} already exists in {}; skipping.",
5911 style::warn("code"),
5912 artifact.id,
5913 frontier.display()
5914 );
5915 }
5916 return;
5917 }
5918
5919 let new_id = artifact.id.clone();
5920 project.code_artifacts.push(artifact);
5921 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5922
5923 if json {
5924 println!(
5925 "{}",
5926 serde_json::to_string_pretty(&json!({
5927 "ok": true,
5928 "command": "code.add",
5929 "id": new_id,
5930 "language": language,
5931 "path": path,
5932 "frontier": frontier.display().to_string(),
5933 }))
5934 .expect("failed to serialize code.add result")
5935 );
5936 } else {
5937 println!();
5938 println!(
5939 " {}",
5940 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
5941 );
5942 println!(" {}", style::tick_row(60));
5943 println!(" language: {language}");
5944 if let Some(r) = repo_url {
5945 println!(" repo: {r}");
5946 }
5947 if let Some(c) = commit {
5948 println!(" commit: {c}");
5949 }
5950 println!(" path: {path}");
5951 if let Some((a, b)) = line_range {
5952 println!(" lines: {a}-{b}");
5953 }
5954 println!(" content_hash: {content_hash}");
5955 println!();
5956 println!(
5957 " {} code artifact recorded in {}",
5958 style::ok("ok"),
5959 frontier.display()
5960 );
5961 }
5962}
5963
5964fn cmd_code_artifacts(frontier: &Path, json: bool) {
5966 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5967 if json {
5968 println!(
5969 "{}",
5970 serde_json::to_string_pretty(&json!({
5971 "ok": true,
5972 "command": "code-artifacts",
5973 "frontier": frontier.display().to_string(),
5974 "count": project.code_artifacts.len(),
5975 "code_artifacts": project.code_artifacts,
5976 }))
5977 .expect("serialize code-artifacts")
5978 );
5979 return;
5980 }
5981 println!();
5982 println!(
5983 " {}",
5984 format!("VELA · CODE · {}", frontier.display())
5985 .to_uppercase()
5986 .dimmed()
5987 );
5988 println!(" {}", style::tick_row(60));
5989 if project.code_artifacts.is_empty() {
5990 println!(" (no code artifacts registered)");
5991 return;
5992 }
5993 for c in &project.code_artifacts {
5994 let lr = c
5995 .line_range
5996 .map(|(a, b)| format!(":{a}-{b}"))
5997 .unwrap_or_default();
5998 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
5999 if let Some(r) = &c.repo_url {
6000 println!(" repo: {}", truncate(r, 80));
6001 }
6002 if let Some(g) = &c.git_commit {
6003 println!(" commit: {g}");
6004 }
6005 }
6006}
6007
6008fn sha256_for_bytes(bytes: &[u8]) -> String {
6009 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
6010}
6011
6012fn sha256_hex_part(content_hash: &str) -> &str {
6013 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
6014}
6015
6016fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
6017 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
6018 return None;
6019 };
6020 let hex = sha256_hex_part(content_hash);
6021 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
6022 let path = root.join(&rel);
6023 if let Some(parent) = path.parent() {
6024 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
6025 fail(&format!(
6026 "Failed to create artifact blob directory {}: {e}",
6027 parent.display()
6028 ))
6029 });
6030 }
6031 if !path.is_file() {
6032 std::fs::write(&path, bytes)
6033 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
6034 }
6035 Some(rel)
6036}
6037
6038fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
6039 let mut out = BTreeMap::new();
6040 for pair in pairs {
6041 let Some((key, value)) = pair.split_once('=') else {
6042 fail(&format!("--metadata must be key=value, got {pair:?}"));
6043 };
6044 let key = key.trim();
6045 if key.is_empty() {
6046 fail("--metadata key must be non-empty");
6047 }
6048 out.insert(key.to_string(), Value::String(value.trim().to_string()));
6049 }
6050 out
6051}
6052
6053fn artifact_source_type(kind: &str) -> &'static str {
6054 match kind {
6055 "clinical_trial_record" | "protocol" => "clinical_trial",
6056 "dataset" => "data_release",
6057 "model_output" => "model_output",
6058 "registry_record" => "database_record",
6059 "lab_file" => "lab_notebook",
6060 _ => "database_record",
6061 }
6062}
6063
6064fn artifact_provenance(
6065 kind: &str,
6066 title: &str,
6067 url: Option<&str>,
6068 doi: Option<&str>,
6069 license: Option<&str>,
6070) -> crate::bundle::Provenance {
6071 crate::bundle::Provenance {
6072 source_type: artifact_source_type(kind).to_string(),
6073 doi: doi.map(str::to_string),
6074 pmid: None,
6075 pmc: None,
6076 openalex_id: None,
6077 url: url.map(str::to_string),
6078 title: title.to_string(),
6079 authors: Vec::new(),
6080 year: None,
6081 journal: None,
6082 license: license.map(str::to_string),
6083 publisher: None,
6084 funders: Vec::new(),
6085 extraction: crate::bundle::Extraction {
6086 method: "artifact_deposit".to_string(),
6087 model: None,
6088 model_version: None,
6089 extracted_at: chrono::Utc::now().to_rfc3339(),
6090 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6091 },
6092 review: None,
6093 citation_count: None,
6094 }
6095}
6096
6097#[allow(clippy::too_many_arguments)]
6098fn cmd_artifact_add(
6099 frontier: &Path,
6100 kind: &str,
6101 name: &str,
6102 file: Option<&Path>,
6103 url: Option<&str>,
6104 content_hash: Option<&str>,
6105 media_type: Option<&str>,
6106 license: Option<&str>,
6107 source_title: Option<&str>,
6108 source_url: Option<&str>,
6109 doi: Option<&str>,
6110 target: Vec<String>,
6111 metadata: Vec<String>,
6112 access_tier: &str,
6113 deposited_by: &str,
6114 reason: &str,
6115 json_out: bool,
6116) {
6117 let tier =
6118 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
6119 let mut size_bytes = None;
6120 let mut storage_mode = "pointer".to_string();
6121 let mut locator = url.map(str::to_string);
6122 let mut computed_hash = content_hash.map(str::to_string);
6123
6124 if let Some(path) = file {
6125 let bytes = std::fs::read(path)
6126 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
6127 let actual_hash = sha256_for_bytes(&bytes);
6128 if let Some(expected) = content_hash {
6129 let expected_hex = sha256_hex_part(expected);
6130 let actual_hex = sha256_hex_part(&actual_hash);
6131 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
6132 fail(&format!(
6133 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
6134 ));
6135 }
6136 }
6137 size_bytes = Some(bytes.len() as u64);
6138 computed_hash = Some(actual_hash.clone());
6139 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
6140 storage_mode = "local_blob".to_string();
6141 locator = Some(rel);
6142 } else {
6143 storage_mode = "local_file".to_string();
6144 locator = Some(path.display().to_string());
6145 }
6146 }
6147
6148 let Some(content_hash) = computed_hash else {
6149 fail("Provide --content-hash unless --file is present.");
6150 };
6151 let content_hash_for_print = content_hash.clone();
6152 if file.is_none() && url.is_some() {
6153 storage_mode = "remote".to_string();
6154 }
6155
6156 let source_url_effective = source_url.or(url);
6157 let source_title = source_title.unwrap_or(name);
6158 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
6159 let metadata = parse_metadata_pairs(metadata);
6160 let artifact = crate::bundle::Artifact::new(
6161 kind.to_string(),
6162 name.to_string(),
6163 content_hash,
6164 size_bytes,
6165 media_type.map(str::to_string),
6166 storage_mode,
6167 locator,
6168 source_url_effective.map(str::to_string),
6169 license.map(str::to_string),
6170 target,
6171 provenance,
6172 metadata,
6173 tier,
6174 )
6175 .unwrap_or_else(|e| fail_return(&e));
6176
6177 let artifact_id = artifact.id.clone();
6178 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6179 .unwrap_or_else(|e| fail_return(&e));
6180
6181 if json_out {
6182 println!(
6183 "{}",
6184 serde_json::to_string_pretty(&json!({
6185 "ok": true,
6186 "command": "artifact.add",
6187 "id": artifact_id,
6188 "frontier": frontier.display().to_string(),
6189 "event": report.applied_event_id,
6190 }))
6191 .expect("serialize artifact.add")
6192 );
6193 } else {
6194 println!();
6195 println!(
6196 " {}",
6197 format!("VELA · ARTIFACT · {}", artifact_id)
6198 .to_uppercase()
6199 .dimmed()
6200 );
6201 println!(" {}", style::tick_row(60));
6202 println!(" kind: {kind}");
6203 println!(" name: {name}");
6204 println!(" hash: {content_hash_for_print}");
6205 println!(
6206 " {} artifact recorded in {}",
6207 style::ok("ok"),
6208 frontier.display()
6209 );
6210 }
6211}
6212
6213fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
6214 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6215 let filtered: Vec<&crate::bundle::Artifact> = project
6216 .artifacts
6217 .iter()
6218 .filter(|artifact| {
6219 target
6220 .map(|t| artifact.target_findings.iter().any(|f| f == t))
6221 .unwrap_or(true)
6222 })
6223 .collect();
6224
6225 if json_out {
6226 println!(
6227 "{}",
6228 serde_json::to_string_pretty(&json!({
6229 "ok": true,
6230 "command": "artifacts",
6231 "frontier": frontier.display().to_string(),
6232 "count": filtered.len(),
6233 "artifacts": filtered,
6234 }))
6235 .expect("serialize artifacts")
6236 );
6237 return;
6238 }
6239
6240 println!();
6241 println!(
6242 " {}",
6243 format!("VELA · ARTIFACTS · {}", frontier.display())
6244 .to_uppercase()
6245 .dimmed()
6246 );
6247 println!(" {}", style::tick_row(60));
6248 if filtered.is_empty() {
6249 println!(" (no artifacts registered)");
6250 return;
6251 }
6252 for artifact in filtered {
6253 println!(
6254 " · {} {} · {}",
6255 artifact.id.dimmed(),
6256 artifact.kind,
6257 artifact.name
6258 );
6259 if let Some(locator) = &artifact.locator {
6260 println!(" locator: {}", truncate(locator, 88));
6261 }
6262 if !artifact.target_findings.is_empty() {
6263 println!(" targets: {}", artifact.target_findings.join(", "));
6264 }
6265 }
6266}
6267
6268fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
6269 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6270 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
6271 if json_out {
6272 println!(
6273 "{}",
6274 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
6275 );
6276 if !audit.ok {
6277 std::process::exit(1);
6278 }
6279 return;
6280 }
6281
6282 println!();
6283 println!(
6284 " {}",
6285 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
6286 .to_uppercase()
6287 .dimmed()
6288 );
6289 println!(" {}", style::tick_row(60));
6290 println!(" artifacts: {}", audit.artifact_count);
6291 println!(" checked local blobs: {}", audit.checked_local_blobs);
6292 println!(" local blob bytes: {}", audit.local_blob_bytes);
6293 if !audit.by_kind.is_empty() {
6294 let kinds = audit
6295 .by_kind
6296 .iter()
6297 .map(|(kind, count)| format!("{kind}:{count}"))
6298 .collect::<Vec<_>>()
6299 .join(", ");
6300 println!(" kinds: {kinds}");
6301 }
6302 if audit.ok {
6303 println!(" {} artifact audit passed.", style::ok("ok"));
6304 return;
6305 }
6306 for issue in &audit.issues {
6307 println!(
6308 " {} {} {}: {}",
6309 style::lost("invalid"),
6310 issue.id,
6311 issue.field,
6312 issue.message
6313 );
6314 }
6315 std::process::exit(1);
6316}
6317
6318fn cmd_decision_brief(frontier: &Path, json_out: bool) {
6319 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6320 let report = decision::load_decision_brief(frontier, &project);
6321 if json_out {
6322 println!(
6323 "{}",
6324 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
6325 );
6326 if !report.ok {
6327 std::process::exit(1);
6328 }
6329 return;
6330 }
6331 println!();
6332 println!(
6333 " {}",
6334 format!("VELA · DECISION BRIEF · {}", project.project.name)
6335 .to_uppercase()
6336 .dimmed()
6337 );
6338 println!(" {}", style::tick_row(60));
6339 if !report.ok {
6340 print_projection_issues(&report.issues, report.error.as_deref());
6341 std::process::exit(1);
6342 }
6343 let brief = report
6344 .projection
6345 .as_ref()
6346 .expect("ok decision report carries projection");
6347 for question in &brief.questions {
6348 println!(" · {} · {}", question.id.dimmed(), question.title);
6349 println!(" answer: {}", wrap_line(&question.short_answer, 82));
6350 println!(" caveat: {}", wrap_line(&question.caveat, 82));
6351 println!(" support: {}", question.supporting_findings.join(", "));
6352 if !question.tension_findings.is_empty() {
6353 println!(" tensions: {}", question.tension_findings.join(", "));
6354 }
6355 if !question.gap_findings.is_empty() {
6356 println!(" gaps: {}", question.gap_findings.join(", "));
6357 }
6358 if !question.artifact_ids.is_empty() {
6359 println!(" artifacts: {}", question.artifact_ids.join(", "));
6360 }
6361 println!(
6362 " would change: {}",
6363 wrap_line(&question.what_would_change_this_answer, 82)
6364 );
6365 }
6366}
6367
6368fn cmd_trial_summary(frontier: &Path, json_out: bool) {
6369 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6370 let report = decision::load_trial_outcomes(frontier, &project);
6371 if json_out {
6372 println!(
6373 "{}",
6374 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
6375 );
6376 if !report.ok {
6377 std::process::exit(1);
6378 }
6379 return;
6380 }
6381 println!();
6382 println!(
6383 " {}",
6384 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
6385 .to_uppercase()
6386 .dimmed()
6387 );
6388 println!(" {}", style::tick_row(60));
6389 if !report.ok {
6390 print_projection_issues(&report.issues, report.error.as_deref());
6391 std::process::exit(1);
6392 }
6393 let outcomes = report
6394 .projection
6395 .as_ref()
6396 .expect("ok trial report carries projection");
6397 for row in &outcomes.rows {
6398 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
6399 println!(" population: {}", wrap_line(&row.population, 82));
6400 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
6401 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
6402 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
6403 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
6404 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
6405 if !row.finding_ids.is_empty() {
6406 println!(" findings: {}", row.finding_ids.join(", "));
6407 }
6408 if !row.artifact_ids.is_empty() {
6409 println!(" artifacts: {}", row.artifact_ids.join(", "));
6410 }
6411 }
6412}
6413
6414fn cmd_source_verification(frontier: &Path, json_out: bool) {
6415 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6416 let report = decision::load_source_verification(frontier, &project);
6417 if json_out {
6418 println!(
6419 "{}",
6420 serde_json::to_string_pretty(&report).expect("serialize source verification report")
6421 );
6422 if !report.ok {
6423 std::process::exit(1);
6424 }
6425 return;
6426 }
6427 println!();
6428 println!(
6429 " {}",
6430 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
6431 .to_uppercase()
6432 .dimmed()
6433 );
6434 println!(" {}", style::tick_row(60));
6435 if !report.ok {
6436 print_projection_issues(&report.issues, report.error.as_deref());
6437 std::process::exit(1);
6438 }
6439 let verification = report
6440 .projection
6441 .as_ref()
6442 .expect("ok source verification report carries projection");
6443 println!(" verified_at: {}", verification.verified_at);
6444 for source in &verification.sources {
6445 println!(" · {} · {}", source.id.dimmed(), source.title);
6446 println!(" agency: {}", source.agency);
6447 println!(" url: {}", truncate(&source.url, 88));
6448 println!(" status: {}", wrap_line(&source.current_status, 82));
6449 }
6450}
6451
6452fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
6453 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6454 let report = decision::load_source_ingest_plan(frontier, &project);
6455 if json_out {
6456 println!(
6457 "{}",
6458 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
6459 );
6460 if !report.ok {
6461 std::process::exit(1);
6462 }
6463 return;
6464 }
6465 println!();
6466 println!(
6467 " {}",
6468 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6469 .to_uppercase()
6470 .dimmed()
6471 );
6472 println!(" {}", style::tick_row(60));
6473 if !report.ok {
6474 print_projection_issues(&report.issues, report.error.as_deref());
6475 std::process::exit(1);
6476 }
6477 let plan = report
6478 .projection
6479 .as_ref()
6480 .expect("ok source ingest plan report carries projection");
6481 println!(" verified_at: {}", plan.verified_at);
6482 println!(" entries: {}", plan.entries.len());
6483 for entry in &plan.entries {
6484 println!(
6485 " · {} · {} · {} · {}",
6486 entry.id.dimmed(),
6487 entry.category,
6488 entry.priority,
6489 entry.ingest_status
6490 );
6491 println!(" name: {}", wrap_line(&entry.name, 82));
6492 println!(" locator: {}", truncate(&entry.locator, 88));
6493 println!(" use: {}", wrap_line(&entry.target_use, 82));
6494 if let Some(id) = &entry.current_frontier_artifact_id {
6495 println!(" artifact: {id}");
6496 }
6497 if !entry.target_findings.is_empty() {
6498 println!(" findings: {}", entry.target_findings.join(", "));
6499 }
6500 }
6501}
6502
6503fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6504 if let Some(error) = error {
6505 println!(" {} {error}", style::lost("unavailable"));
6506 }
6507 for issue in issues {
6508 println!(
6509 " {} {}: {}",
6510 style::lost("invalid"),
6511 issue.path,
6512 issue.message
6513 );
6514 }
6515}
6516
6517fn wrap_line(text: &str, max_chars: usize) -> String {
6518 if text.chars().count() <= max_chars {
6519 return text.to_string();
6520 }
6521 let mut out = String::new();
6522 let mut line_len = 0usize;
6523 for word in text.split_whitespace() {
6524 let word_len = word.chars().count();
6525 if line_len > 0 && line_len + 1 + word_len > max_chars {
6526 out.push('\n');
6527 out.push_str(" ");
6528 out.push_str(word);
6529 line_len = word_len;
6530 } else {
6531 if line_len > 0 {
6532 out.push(' ');
6533 line_len += 1;
6534 }
6535 out.push_str(word);
6536 line_len += word_len;
6537 }
6538 }
6539 out
6540}
6541
6542fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6543 study.pointer(pointer).and_then(Value::as_str)
6544}
6545
6546fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6547 study
6548 .pointer(pointer)
6549 .and_then(Value::as_array)
6550 .map(|items| {
6551 items
6552 .iter()
6553 .filter_map(Value::as_str)
6554 .map(str::to_string)
6555 .collect()
6556 })
6557 .unwrap_or_default()
6558}
6559
6560fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6561 study
6562 .pointer(pointer)
6563 .and_then(Value::as_array)
6564 .map(|items| {
6565 items
6566 .iter()
6567 .filter_map(|item| item.get(field).and_then(Value::as_str))
6568 .map(str::to_string)
6569 .collect()
6570 })
6571 .unwrap_or_default()
6572}
6573
6574fn insert_string_vec_metadata(
6575 metadata: &mut BTreeMap<String, Value>,
6576 key: &str,
6577 values: Vec<String>,
6578) {
6579 if values.is_empty() {
6580 return;
6581 }
6582 metadata.insert(
6583 key.to_string(),
6584 Value::Array(values.into_iter().map(Value::String).collect()),
6585 );
6586}
6587
6588async fn cmd_clinical_trial_import(
6589 frontier: &Path,
6590 nct_id: &str,
6591 input_json: Option<&Path>,
6592 target: Vec<String>,
6593 deposited_by: &str,
6594 reason: &str,
6595 license: &str,
6596 json_out: bool,
6597) {
6598 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6599 let raw = if let Some(path) = input_json {
6600 std::fs::read_to_string(path)
6601 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6602 } else {
6603 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6604 fail(&format!(
6605 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6606 ))
6607 });
6608 let response = response.error_for_status().unwrap_or_else(|e| {
6609 fail(&format!(
6610 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6611 ))
6612 });
6613 response.text().await.unwrap_or_else(|e| {
6614 fail(&format!(
6615 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6616 ))
6617 })
6618 };
6619 let study: Value = serde_json::from_str(&raw)
6620 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6621 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6622 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6623 let content_hash = sha256_for_bytes(&canonical_bytes);
6624 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6625 .unwrap_or_else(|| api_url.clone());
6626 let storage_mode = if locator.starts_with(".vela/") {
6627 "local_blob"
6628 } else {
6629 "remote"
6630 };
6631
6632 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6633 .unwrap_or(nct_id)
6634 .to_string();
6635 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6636 .or_else(|| {
6637 clinical_str(
6638 &study,
6639 "/protocolSection/identificationModule/officialTitle",
6640 )
6641 })
6642 .unwrap_or(nct_id);
6643 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6644 let mut metadata = BTreeMap::new();
6645 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6646 metadata.insert(
6647 "source_api".to_string(),
6648 Value::String("clinicaltrials.gov-v2".to_string()),
6649 );
6650 metadata.insert(
6651 "retrieved_at".to_string(),
6652 Value::String(chrono::Utc::now().to_rfc3339()),
6653 );
6654 for (key, pointer) in [
6655 (
6656 "overall_status",
6657 "/protocolSection/statusModule/overallStatus",
6658 ),
6659 (
6660 "start_date",
6661 "/protocolSection/statusModule/startDateStruct/date",
6662 ),
6663 (
6664 "completion_date",
6665 "/protocolSection/statusModule/completionDateStruct/date",
6666 ),
6667 ] {
6668 if let Some(value) = clinical_str(&study, pointer) {
6669 metadata.insert(key.to_string(), Value::String(value.to_string()));
6670 }
6671 }
6672 insert_string_vec_metadata(
6673 &mut metadata,
6674 "phases",
6675 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6676 );
6677 insert_string_vec_metadata(
6678 &mut metadata,
6679 "conditions",
6680 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6681 );
6682 insert_string_vec_metadata(
6683 &mut metadata,
6684 "interventions",
6685 clinical_named_array(
6686 &study,
6687 "/protocolSection/armsInterventionsModule/interventions",
6688 "name",
6689 ),
6690 );
6691 insert_string_vec_metadata(
6692 &mut metadata,
6693 "primary_outcomes",
6694 clinical_named_array(
6695 &study,
6696 "/protocolSection/outcomesModule/primaryOutcomes",
6697 "measure",
6698 ),
6699 );
6700 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6701 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6702 }
6703
6704 let provenance = artifact_provenance(
6705 "clinical_trial_record",
6706 title,
6707 Some(&public_url),
6708 None,
6709 Some(license),
6710 );
6711 let artifact = crate::bundle::Artifact::new(
6712 "clinical_trial_record",
6713 title.to_string(),
6714 content_hash,
6715 Some(canonical_bytes.len() as u64),
6716 Some("application/json".to_string()),
6717 storage_mode.to_string(),
6718 Some(locator),
6719 Some(public_url.clone()),
6720 Some(license.to_string()),
6721 target,
6722 provenance,
6723 metadata,
6724 crate::access_tier::AccessTier::Public,
6725 )
6726 .unwrap_or_else(|e| fail_return(&e));
6727 let artifact_id = artifact.id.clone();
6728 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6729 .unwrap_or_else(|e| fail_return(&e));
6730
6731 if json_out {
6732 println!(
6733 "{}",
6734 serde_json::to_string_pretty(&json!({
6735 "ok": true,
6736 "command": "clinical-trial-import",
6737 "nct_id": parsed_nct,
6738 "id": artifact_id,
6739 "frontier": frontier.display().to_string(),
6740 "event": report.applied_event_id,
6741 "source_url": public_url,
6742 }))
6743 .expect("serialize clinical-trial-import")
6744 );
6745 } else {
6746 println!();
6747 println!(
6748 " {}",
6749 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
6750 .to_uppercase()
6751 .dimmed()
6752 );
6753 println!(" {}", style::tick_row(60));
6754 println!(" nct_id: {parsed_nct}");
6755 println!(" title: {}", truncate(title, 96));
6756 println!(" source: {public_url}");
6757 println!(
6758 " {} trial record imported into {}",
6759 style::ok("ok"),
6760 frontier.display()
6761 );
6762 }
6763}
6764
6765#[allow(clippy::too_many_arguments)]
6772fn cmd_replicate(
6773 frontier: &Path,
6774 target: &str,
6775 outcome: &str,
6776 attempted_by: &str,
6777 conditions_text: &str,
6778 source_title: &str,
6779 doi: Option<&str>,
6780 pmid: Option<&str>,
6781 sample_size: Option<&str>,
6782 note: &str,
6783 previous_attempt: Option<&str>,
6784 no_cascade: bool,
6785 json: bool,
6786) {
6787 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
6788 fail(&format!(
6789 "invalid outcome '{outcome}'; valid: {:?}",
6790 crate::bundle::VALID_REPLICATION_OUTCOMES
6791 ));
6792 }
6793 if !target.starts_with("vf_") {
6794 fail(&format!("target '{target}' is not a vf_ finding id"));
6795 }
6796
6797 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6798
6799 if !project.findings.iter().any(|f| f.id == target) {
6800 fail(&format!(
6801 "target finding '{target}' not present in frontier '{}'",
6802 frontier.display()
6803 ));
6804 }
6805
6806 let lower = conditions_text.to_lowercase();
6811 let conditions = crate::bundle::Conditions {
6812 text: conditions_text.to_string(),
6813 species_verified: Vec::new(),
6814 species_unverified: Vec::new(),
6815 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
6816 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
6817 human_data: lower.contains("human")
6818 || lower.contains("clinical")
6819 || lower.contains("patient"),
6820 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
6821 concentration_range: None,
6822 duration: None,
6823 age_group: None,
6824 cell_type: None,
6825 };
6826
6827 let evidence = crate::bundle::Evidence {
6828 evidence_type: "experimental".to_string(),
6829 model_system: String::new(),
6830 species: None,
6831 method: "replication_attempt".to_string(),
6832 sample_size: sample_size.map(|s| s.to_string()),
6833 effect_size: None,
6834 p_value: None,
6835 replicated: outcome == "replicated",
6836 replication_count: None,
6837 evidence_spans: Vec::new(),
6838 };
6839
6840 let provenance = crate::bundle::Provenance {
6841 source_type: "published_paper".to_string(),
6842 doi: doi.map(|s| s.to_string()),
6843 pmid: pmid.map(|s| s.to_string()),
6844 pmc: None,
6845 openalex_id: None,
6846 url: None,
6847 title: source_title.to_string(),
6848 authors: Vec::new(),
6849 year: None,
6850 journal: None,
6851 license: None,
6852 publisher: None,
6853 funders: Vec::new(),
6854 extraction: crate::bundle::Extraction {
6855 method: "manual_curation".to_string(),
6856 model: None,
6857 model_version: None,
6858 extracted_at: chrono::Utc::now().to_rfc3339(),
6859 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6860 },
6861 review: None,
6862 citation_count: None,
6863 };
6864
6865 let mut rep = crate::bundle::Replication::new(
6866 target.to_string(),
6867 attempted_by.to_string(),
6868 outcome.to_string(),
6869 evidence,
6870 conditions,
6871 provenance,
6872 note.to_string(),
6873 );
6874 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
6875
6876 if project.replications.iter().any(|r| r.id == rep.id) {
6879 if json {
6880 println!(
6881 "{}",
6882 serde_json::to_string_pretty(&json!({
6883 "ok": false,
6884 "command": "replicate",
6885 "reason": "replication_already_exists",
6886 "id": rep.id,
6887 }))
6888 .expect("serialize")
6889 );
6890 } else {
6891 println!(
6892 "{} replication {} already exists in {}; skipping.",
6893 style::warn("replicate"),
6894 rep.id,
6895 frontier.display()
6896 );
6897 }
6898 return;
6899 }
6900
6901 let new_id = rep.id.clone();
6902 project.replications.push(rep);
6903
6904 let cascade_result = if no_cascade {
6911 None
6912 } else {
6913 let result = propagate::propagate_correction(
6914 &mut project,
6915 target,
6916 propagate::PropagationAction::ReplicationOutcome {
6917 outcome: outcome.to_string(),
6918 vrep_id: new_id.clone(),
6919 },
6920 );
6921 project.review_events.extend(result.events.clone());
6924 project::recompute_stats(&mut project);
6925 Some(result)
6926 };
6927
6928 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6929
6930 if json {
6931 let cascade_json = cascade_result.as_ref().map(|r| {
6932 json!({
6933 "affected": r.affected,
6934 "events": r.events.len(),
6935 })
6936 });
6937 println!(
6938 "{}",
6939 serde_json::to_string_pretty(&json!({
6940 "ok": true,
6941 "command": "replicate",
6942 "id": new_id,
6943 "target": target,
6944 "outcome": outcome,
6945 "attempted_by": attempted_by,
6946 "cascade": cascade_json,
6947 "frontier": frontier.display().to_string(),
6948 }))
6949 .expect("failed to serialize replicate result")
6950 );
6951 } else {
6952 println!();
6953 println!(
6954 " {}",
6955 format!("VELA · REPLICATE · {}", new_id)
6956 .to_uppercase()
6957 .dimmed()
6958 );
6959 println!(" {}", style::tick_row(60));
6960 println!(" target: {target}");
6961 println!(" outcome: {outcome}");
6962 println!(" attempted by: {attempted_by}");
6963 println!(" conditions: {conditions_text}");
6964 println!(" source: {source_title}");
6965 if let Some(d) = doi {
6966 println!(" doi: {d}");
6967 }
6968 println!();
6969 println!(
6970 " {} replication recorded in {}",
6971 style::ok("ok"),
6972 frontier.display()
6973 );
6974 if let Some(result) = cascade_result {
6975 println!(
6976 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
6977 style::ok("ok"),
6978 result.affected,
6979 result.events.len()
6980 );
6981 } else {
6982 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
6983 }
6984 }
6985}
6986
6987fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
6989 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6990 let filtered: Vec<&crate::bundle::Replication> = project
6991 .replications
6992 .iter()
6993 .filter(|r| target.is_none_or(|t| r.target_finding == t))
6994 .collect();
6995
6996 if json {
6997 let payload = json!({
6998 "ok": true,
6999 "command": "replications",
7000 "frontier": frontier.display().to_string(),
7001 "filter_target": target,
7002 "count": filtered.len(),
7003 "replications": filtered,
7004 });
7005 println!(
7006 "{}",
7007 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
7008 );
7009 return;
7010 }
7011
7012 println!();
7013 let header = match target {
7014 Some(t) => format!("VELA · REPLICATIONS · {t}"),
7015 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
7016 };
7017 println!(" {}", header.to_uppercase().dimmed());
7018 println!(" {}", style::tick_row(60));
7019 if filtered.is_empty() {
7020 println!(" (no replications recorded)");
7021 return;
7022 }
7023 for rep in &filtered {
7024 let outcome_chip = match rep.outcome.as_str() {
7025 "replicated" => style::ok(&rep.outcome),
7026 "failed" => style::lost(&rep.outcome),
7027 "partial" => style::warn(&rep.outcome),
7028 _ => rep.outcome.clone().normal().to_string(),
7029 };
7030 println!(
7031 " · {} {} by {}",
7032 rep.id.dimmed(),
7033 outcome_chip,
7034 rep.attempted_by
7035 );
7036 println!(" target: {}", rep.target_finding);
7037 if !rep.conditions.text.is_empty() {
7038 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
7039 }
7040 if !rep.provenance.title.is_empty() {
7041 println!(" source: {}", truncate(&rep.provenance.title, 80));
7042 }
7043 }
7044}
7045
7046async fn cmd_ingest(
7059 path: &str,
7060 frontier: &Path,
7061 backend: Option<&str>,
7062 actor: Option<&str>,
7063 dry_run: bool,
7064 json: bool,
7065) {
7066 let lowered = path.trim().to_lowercase();
7068 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
7069 cmd_source_fetch(path.trim(), None, None, false, json).await;
7070 if !json {
7076 eprintln!();
7077 eprintln!(
7078 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7079 );
7080 eprintln!(
7081 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7082 frontier.display()
7083 );
7084 }
7085 return;
7086 }
7087
7088 let p = std::path::PathBuf::from(path);
7089 if !p.exists() {
7090 fail(&format!(
7091 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7092 ));
7093 }
7094
7095 let ext = p
7097 .extension()
7098 .and_then(|s| s.to_str())
7099 .map(|s| s.to_ascii_lowercase());
7100
7101 if p.is_file() {
7102 match ext.as_deref() {
7103 Some("pdf") => {
7104 cmd_scout(&p, frontier, backend, dry_run, json).await;
7108 }
7109 Some("md") | Some("markdown") => {
7110 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7113 }
7114 Some("csv") | Some("tsv") => {
7115 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7118 }
7119 Some("json") => {
7120 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7122 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
7123 }
7124 other => {
7125 fail(&format!(
7126 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
7127 other.unwrap_or("(none)")
7128 ));
7129 }
7130 }
7131 return;
7132 }
7133
7134 if p.is_dir() {
7135 let mut pdf_count = 0usize;
7142 let mut md_count = 0usize;
7143 let mut data_count = 0usize;
7144 let mut json_count = 0usize;
7145 let mut unhandled_exts: std::collections::BTreeSet<String> =
7146 std::collections::BTreeSet::new();
7147 if let Ok(entries) = std::fs::read_dir(&p) {
7148 for entry in entries.flatten() {
7149 let path = entry.path();
7150 if !path.is_file() {
7151 continue;
7152 }
7153 if let Some(name) = entry.file_name().to_str()
7154 && let Some(dot) = name.rfind('.')
7155 {
7156 let ext = name[dot + 1..].to_ascii_lowercase();
7157 match ext.as_str() {
7158 "pdf" => pdf_count += 1,
7159 "md" | "markdown" => md_count += 1,
7160 "csv" | "tsv" => data_count += 1,
7161 "json" => json_count += 1,
7162 other => {
7163 if !name.starts_with('.') {
7166 unhandled_exts.insert(other.to_string());
7167 }
7168 }
7169 }
7170 }
7171 }
7172 }
7173
7174 let dispatched_types = (pdf_count > 0) as usize
7175 + (md_count > 0) as usize
7176 + (data_count > 0) as usize
7177 + (json_count > 0) as usize;
7178
7179 if dispatched_types == 0 {
7180 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
7183 return;
7184 }
7185
7186 if dispatched_types > 1 {
7187 eprintln!(
7188 " vela ingest · folder has multiple handlable types; running each in sequence"
7189 );
7190 eprintln!(
7191 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
7192 );
7193 }
7194
7195 if pdf_count > 0 {
7202 cmd_scout(&p, frontier, backend, dry_run, json).await;
7203 }
7204 if md_count > 0 {
7205 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7206 }
7207 if data_count > 0 {
7208 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7209 }
7210 if json_count > 0 {
7211 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7214 if let Ok(entries) = std::fs::read_dir(&p) {
7215 for entry in entries.flatten() {
7216 let path = entry.path();
7217 if path.is_file()
7218 && path
7219 .extension()
7220 .and_then(|s| s.to_str())
7221 .map(|s| s.eq_ignore_ascii_case("json"))
7222 .unwrap_or(false)
7223 {
7224 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
7225 }
7226 }
7227 }
7228 }
7229
7230 if !unhandled_exts.is_empty() {
7231 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
7232 eprintln!(
7233 " vela ingest · skipped {} file extension(s) with no handler: {}",
7234 kinds.len(),
7235 kinds.join(", ")
7236 );
7237 }
7238 return;
7239 }
7240
7241 fail(&format!(
7242 "ingest: path '{path}' is neither a file nor a directory"
7243 ));
7244}
7245
7246#[allow(clippy::too_many_arguments)]
7247async fn cmd_compile_data(
7249 root: &Path,
7250 frontier: &Path,
7251 backend: Option<&str>,
7252 sample_rows: Option<usize>,
7253 dry_run: bool,
7254 json_out: bool,
7255) {
7256 match DATASETS_HANDLER.get() {
7257 Some(handler) => {
7258 handler(
7259 root.to_path_buf(),
7260 frontier.to_path_buf(),
7261 backend.map(String::from),
7262 sample_rows,
7263 dry_run,
7264 json_out,
7265 )
7266 .await;
7267 }
7268 None => {
7269 eprintln!(
7270 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
7271 style::err_prefix()
7272 );
7273 std::process::exit(1);
7274 }
7275 }
7276}
7277
7278async fn cmd_review_pending(
7281 frontier: &Path,
7282 backend: Option<&str>,
7283 max_proposals: Option<usize>,
7284 batch_size: usize,
7285 dry_run: bool,
7286 json_out: bool,
7287) {
7288 match REVIEWER_HANDLER.get() {
7289 Some(handler) => {
7290 handler(
7291 frontier.to_path_buf(),
7292 backend.map(String::from),
7293 max_proposals,
7294 batch_size,
7295 dry_run,
7296 json_out,
7297 )
7298 .await;
7299 }
7300 None => {
7301 eprintln!(
7302 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
7303 style::err_prefix()
7304 );
7305 std::process::exit(1);
7306 }
7307 }
7308}
7309
7310async fn cmd_find_tensions(
7313 frontier: &Path,
7314 backend: Option<&str>,
7315 max_findings: Option<usize>,
7316 dry_run: bool,
7317 json_out: bool,
7318) {
7319 match TENSIONS_HANDLER.get() {
7320 Some(handler) => {
7321 handler(
7322 frontier.to_path_buf(),
7323 backend.map(String::from),
7324 max_findings,
7325 dry_run,
7326 json_out,
7327 )
7328 .await;
7329 }
7330 None => {
7331 eprintln!(
7332 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
7333 style::err_prefix()
7334 );
7335 std::process::exit(1);
7336 }
7337 }
7338}
7339
7340async fn cmd_plan_experiments(
7343 frontier: &Path,
7344 backend: Option<&str>,
7345 max_findings: Option<usize>,
7346 dry_run: bool,
7347 json_out: bool,
7348) {
7349 match EXPERIMENTS_HANDLER.get() {
7350 Some(handler) => {
7351 handler(
7352 frontier.to_path_buf(),
7353 backend.map(String::from),
7354 max_findings,
7355 dry_run,
7356 json_out,
7357 )
7358 .await;
7359 }
7360 None => {
7361 eprintln!(
7362 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
7363 style::err_prefix()
7364 );
7365 std::process::exit(1);
7366 }
7367 }
7368}
7369
7370async fn cmd_compile_code(
7373 root: &Path,
7374 frontier: &Path,
7375 backend: Option<&str>,
7376 max_files: Option<usize>,
7377 dry_run: bool,
7378 json_out: bool,
7379) {
7380 match CODE_HANDLER.get() {
7381 Some(handler) => {
7382 handler(
7383 root.to_path_buf(),
7384 frontier.to_path_buf(),
7385 backend.map(String::from),
7386 max_files,
7387 dry_run,
7388 json_out,
7389 )
7390 .await;
7391 }
7392 None => {
7393 eprintln!(
7394 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
7395 style::err_prefix()
7396 );
7397 std::process::exit(1);
7398 }
7399 }
7400}
7401
7402async fn cmd_compile_notes(
7407 vault: &Path,
7408 frontier: &Path,
7409 backend: Option<&str>,
7410 max_files: Option<usize>,
7411 max_items_per_category: Option<usize>,
7412 dry_run: bool,
7413 json_out: bool,
7414) {
7415 match NOTES_HANDLER.get() {
7416 Some(handler) => {
7417 handler(
7418 vault.to_path_buf(),
7419 frontier.to_path_buf(),
7420 backend.map(String::from),
7421 max_files,
7422 max_items_per_category,
7423 dry_run,
7424 json_out,
7425 )
7426 .await;
7427 }
7428 None => {
7429 eprintln!(
7430 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
7431 style::err_prefix()
7432 );
7433 std::process::exit(1);
7434 }
7435 }
7436}
7437
7438async fn cmd_scout(
7445 folder: &Path,
7446 frontier: &Path,
7447 backend: Option<&str>,
7448 dry_run: bool,
7449 json_out: bool,
7450) {
7451 match SCOUT_HANDLER.get() {
7452 Some(handler) => {
7453 handler(
7454 folder.to_path_buf(),
7455 frontier.to_path_buf(),
7456 backend.map(String::from),
7457 dry_run,
7458 json_out,
7459 )
7460 .await;
7461 }
7462 None => {
7463 eprintln!(
7464 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
7465 style::err_prefix()
7466 );
7467 std::process::exit(1);
7468 }
7469 }
7470}
7471
7472#[allow(clippy::too_many_arguments)]
7473fn cmd_check(
7474 source: Option<&Path>,
7475 schema: bool,
7476 stats: bool,
7477 conformance_flag: bool,
7478 conformance_dir: &Path,
7479 all: bool,
7480 schema_only: bool,
7481 strict: bool,
7482 fix: bool,
7483 json_output: bool,
7484) {
7485 if json_output {
7486 let Some(src) = source else {
7487 fail("--json requires a frontier source");
7488 };
7489 let payload = check_json_payload(src, schema_only, strict);
7490 println!(
7491 "{}",
7492 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
7493 );
7494 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
7495 std::process::exit(1);
7496 }
7497 return;
7498 }
7499
7500 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
7501 if run_all || schema || schema_only {
7502 let Some(src) = source else {
7503 fail("check requires a frontier source");
7504 };
7505 validate::run(src);
7506 }
7507 if !schema_only && (run_all || stats) {
7508 let Some(src) = source else {
7509 fail("--stats requires a frontier source");
7510 };
7511 let frontier = load_frontier_or_fail(src);
7512 let report = lint::lint(&frontier, None, None);
7513 lint::print_report(&report);
7514 let replay_report = events::replay_report(&frontier);
7515 println!("event replay: {}", replay_report.status);
7516 if !replay_report.conflicts.is_empty() {
7517 for conflict in &replay_report.conflicts {
7518 println!(" - {conflict}");
7519 }
7520 }
7521 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
7522 && signature_report.signed > 0
7523 {
7524 println!(
7525 "Signatures: {} valid / {} invalid / {} unsigned",
7526 signature_report.valid, signature_report.invalid, signature_report.unsigned
7527 );
7528 }
7529 let signal_report = signals::analyze(&frontier, &[]);
7530 print_signal_summary(&signal_report, strict);
7531 if !replay_report.ok
7532 || (strict
7533 && (!signal_report.review_queue.is_empty()
7534 || signal_report.proof_readiness.status != "ready"))
7535 {
7536 std::process::exit(1);
7537 }
7538 }
7539 if run_all || conformance_flag {
7540 if conformance_flag || conformance_dir.is_dir() {
7550 conformance::run(conformance_dir);
7551 } else {
7552 eprintln!(
7553 " conformance: skipped ({} not present; pass --conformance-dir <path> to point at the source repo's tests/conformance)",
7554 conformance_dir.display()
7555 );
7556 }
7557 }
7558 let _ = fix;
7559}
7560
7561fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
7562 let report = validate::validate(src);
7563 let loaded = repo::load_from_path(src).ok();
7564 let (method_report, graph_report) = if schema_only {
7565 (None, None)
7566 } else if let Some(frontier) = loaded.as_ref() {
7567 (
7568 Some(lint::lint(frontier, None, None)),
7569 Some(lint::lint_frontier(frontier)),
7570 )
7571 } else {
7572 (None, None)
7573 };
7574 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7575 let mut diagnostics = Vec::new();
7576 diagnostics.extend(report.errors.iter().map(|e| {
7577 json!({
7578 "severity": "error",
7579 "rule_id": "schema",
7580 "finding_id": null,
7581 "file": &e.file,
7582 "field_path": null,
7583 "message": &e.error,
7584 "suggestion": schema_error_suggestion(&e.error),
7585 "fixable": schema_error_fix(&e.error),
7586 "normalize_action": schema_error_action(&e.error),
7587 })
7588 }));
7589 for (check_id, lint_report) in [
7590 ("methodology", method_report.as_ref()),
7591 ("frontier_graph", graph_report.as_ref()),
7592 ] {
7593 if let Some(lint_report) = lint_report {
7594 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7595 json!({
7596 "severity": d.severity.to_string(),
7597 "rule_id": &d.rule_id,
7598 "check": check_id,
7599 "finding_id": &d.finding_id,
7600 "field_path": null,
7601 "message": &d.message,
7602 "suggestion": &d.suggestion,
7603 "fixable": false,
7604 "normalize_action": null,
7605 })
7606 }));
7607 }
7608 }
7609 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7610 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7611 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7612 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7613 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7614 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7615 let replay_report = loaded.as_ref().map(events::replay_report);
7616 let state_integrity_report = if schema_only {
7617 loaded.as_ref().map(state_integrity::analyze)
7618 } else {
7619 state_integrity::analyze_path(src).ok()
7620 };
7621 if let Some(replay) = replay_report.as_ref()
7622 && !replay.ok
7623 {
7624 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7625 json!({
7626 "severity": "error",
7627 "rule_id": "event_replay",
7628 "check": "events",
7629 "finding_id": null,
7630 "field_path": null,
7631 "message": conflict,
7632 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
7633 "fixable": false,
7634 "normalize_action": null,
7635 })
7636 }));
7637 }
7638 let event_errors = replay_report
7639 .as_ref()
7640 .map_or(0, |replay| usize::from(!replay.ok));
7641 let state_integrity_errors = state_integrity_report
7642 .as_ref()
7643 .map_or(0, |report| report.structural_errors.len());
7644 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
7645 .as_ref()
7646 .map(|frontier| {
7647 (
7648 sources::source_summary(frontier),
7649 sources::evidence_summary(frontier),
7650 sources::condition_summary(frontier),
7651 proposals::summary(frontier),
7652 proposals::proof_state_json(&frontier.proof_state),
7653 )
7654 })
7655 .unwrap_or_else(|| {
7656 (
7657 sources::SourceRegistrySummary::default(),
7658 sources::EvidenceAtomSummary::default(),
7659 sources::ConditionSummary::default(),
7660 proposals::ProposalSummary::default(),
7661 Value::Null,
7662 )
7663 });
7664 let signature_report = loaded
7665 .as_ref()
7666 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
7667 if let Some(frontier) = loaded.as_ref()
7668 && !schema_only
7669 {
7670 let projection = sources::derive_projection(frontier);
7671 let existing_sources = frontier
7672 .sources
7673 .iter()
7674 .map(|source| source.id.as_str())
7675 .collect::<std::collections::BTreeSet<_>>();
7676 let existing_atoms = frontier
7677 .evidence_atoms
7678 .iter()
7679 .map(|atom| atom.id.as_str())
7680 .collect::<std::collections::BTreeSet<_>>();
7681 let existing_conditions = frontier
7682 .condition_records
7683 .iter()
7684 .map(|record| record.id.as_str())
7685 .collect::<std::collections::BTreeSet<_>>();
7686 for source in projection
7687 .sources
7688 .iter()
7689 .filter(|source| !existing_sources.contains(source.id.as_str()))
7690 {
7691 diagnostics.push(json!({
7692 "severity": "warning",
7693 "rule_id": "missing_source_record",
7694 "check": "source_registry",
7695 "finding_id": source.finding_ids.first(),
7696 "field_path": "sources",
7697 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
7698 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
7699 "fixable": true,
7700 "normalize_action": "materialize_source_record",
7701 }));
7702 }
7703 for atom in projection
7704 .evidence_atoms
7705 .iter()
7706 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
7707 {
7708 diagnostics.push(json!({
7709 "severity": "warning",
7710 "rule_id": "missing_evidence_atom",
7711 "check": "evidence_atoms",
7712 "finding_id": atom.finding_id,
7713 "field_path": "evidence_atoms",
7714 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
7715 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
7716 "fixable": true,
7717 "normalize_action": "materialize_evidence_atom",
7718 }));
7719 }
7720 for atom in projection
7721 .evidence_atoms
7722 .iter()
7723 .filter(|atom| atom.locator.is_none())
7724 {
7725 diagnostics.push(json!({
7726 "severity": "warning",
7727 "rule_id": "missing_evidence_locator",
7728 "check": "evidence_atoms",
7729 "finding_id": atom.finding_id,
7730 "field_path": "evidence_atoms[].locator",
7731 "message": format!("Evidence atom {} has no source locator.", atom.id),
7732 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
7733 "fixable": false,
7734 "normalize_action": null,
7735 }));
7736 }
7737 for condition in projection
7738 .condition_records
7739 .iter()
7740 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
7741 {
7742 diagnostics.push(json!({
7743 "severity": "warning",
7744 "rule_id": "condition_record_missing",
7745 "check": "conditions",
7746 "finding_id": condition.finding_id,
7747 "field_path": "condition_records",
7748 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
7749 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
7750 "fixable": true,
7751 "normalize_action": "materialize_condition_record",
7752 }));
7753 }
7754 for proposal in frontier.proposals.iter().filter(|proposal| {
7755 matches!(proposal.status.as_str(), "accepted" | "applied")
7756 && proposal
7757 .reviewed_by
7758 .as_deref()
7759 .is_none_or(proposals::is_placeholder_reviewer)
7760 }) {
7761 diagnostics.push(json!({
7762 "severity": "error",
7763 "rule_id": "reviewer_identity_missing",
7764 "check": "proposals",
7765 "finding_id": proposal.target.id,
7766 "field_path": "proposals[].reviewed_by",
7767 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
7768 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
7769 "fixable": false,
7770 "normalize_action": null,
7771 }));
7772 }
7773 }
7774 let signal_report = loaded
7775 .as_ref()
7776 .map(|frontier| signals::analyze(frontier, &diagnostics))
7777 .unwrap_or_else(empty_signal_report);
7778 let errors =
7779 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
7780 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
7781 let infos = method_infos + graph_infos;
7782 let strict_blockers = signal_report
7783 .signals
7784 .iter()
7785 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
7786 .count();
7787 let fixable = diagnostics
7788 .iter()
7789 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
7790 .count();
7791 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
7792
7793 json!({
7794 "ok": ok,
7795 "command": "check",
7796 "schema_version": project::VELA_SCHEMA_VERSION,
7797 "source": {
7798 "path": src.display().to_string(),
7799 "hash": format!("sha256:{source_hash}"),
7800 },
7801 "summary": {
7802 "status": if ok { "pass" } else { "fail" },
7803 "checked_findings": report.total_files,
7804 "valid_findings": report.valid,
7805 "invalid_findings": report.invalid,
7806 "errors": errors,
7807 "warnings": warnings,
7808 "info": infos,
7809 "fixable": fixable,
7810 "strict": strict,
7811 "schema_only": schema_only,
7812 },
7813 "checks": [
7814 {
7815 "id": "schema",
7816 "status": if report.invalid == 0 { "pass" } else { "fail" },
7817 "checked": report.total_files,
7818 "failed": report.invalid,
7819 "errors": report.errors.iter().map(|e| json!({
7820 "file": e.file,
7821 "message": e.error,
7822 })).collect::<Vec<_>>(),
7823 },
7824 {
7825 "id": "methodology",
7826 "status": if method_errors == 0 { "pass" } else { "fail" },
7827 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
7828 "failed": method_errors,
7829 "warnings": method_warnings,
7830 "info": method_infos,
7831 "skipped": schema_only,
7832 },
7833 {
7834 "id": "frontier_graph",
7835 "status": if graph_errors == 0 { "pass" } else { "fail" },
7836 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
7837 "failed": graph_errors,
7838 "warnings": graph_warnings,
7839 "info": graph_infos,
7840 "skipped": schema_only,
7841 },
7842 {
7843 "id": "signals",
7844 "status": if strict_blockers == 0 { "pass" } else { "fail" },
7845 "checked": signal_report.signals.len(),
7846 "failed": strict_blockers,
7847 "warnings": signal_report.proof_readiness.warnings,
7848 "skipped": loaded.is_none(),
7849 "blockers": signal_report.signals.iter()
7850 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
7851 .map(|s| json!({
7852 "id": s.id,
7853 "kind": s.kind,
7854 "severity": s.severity,
7855 "reason": s.reason,
7856 }))
7857 .collect::<Vec<_>>(),
7858 },
7859 {
7860 "id": "events",
7861 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
7862 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
7863 "failed": event_errors,
7864 "skipped": schema_only || loaded.is_none(),
7865 },
7866 {
7867 "id": "state_integrity",
7868 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
7869 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
7870 "failed": state_integrity_errors,
7871 "skipped": schema_only || loaded.is_none(),
7872 }
7873 ],
7874 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
7875 "replay": replay_report,
7876 "state_integrity": state_integrity_report,
7877 "source_registry": source_registry,
7878 "evidence_atoms": evidence_atoms,
7879 "conditions": conditions,
7880 "proposals": proposal_summary,
7881 "proof_state": proof_state,
7882 "signatures": signature_report,
7883 "diagnostics": diagnostics,
7884 "signals": signal_report.signals,
7885 "review_queue": signal_report.review_queue,
7886 "proof_readiness": signal_report.proof_readiness,
7887 "repair_plan": build_repair_plan(&diagnostics),
7888 })
7889}
7890
7891#[allow(clippy::too_many_arguments)]
7892fn cmd_normalize(
7893 source: &Path,
7894 out: Option<&Path>,
7895 write: bool,
7896 dry_run: bool,
7897 rewrite_ids: bool,
7898 id_map: Option<&Path>,
7899 resync_provenance: bool,
7900 json_output: bool,
7901) {
7902 if write && out.is_some() {
7903 fail("Use either --write or --out, not both.");
7904 }
7905 if dry_run && (write || out.is_some()) {
7906 fail("--dry-run cannot be combined with --write or --out.");
7907 }
7908 if id_map.is_some() && !rewrite_ids {
7909 fail("--id-map requires --rewrite-ids.");
7910 }
7911
7912 let detected = repo::detect(source).unwrap_or_else(|e| {
7913 eprintln!("{e}");
7914 std::process::exit(1);
7915 });
7916 if matches!(detected, repo::VelaSource::PacketDir(_)) {
7917 fail(
7918 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
7919 );
7920 }
7921 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
7922 let has_substantive_events = frontier
7927 .events
7928 .iter()
7929 .any(|event| event.kind != "frontier.created");
7930 if has_substantive_events && (write || out.is_some()) {
7931 fail(
7932 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
7933 );
7934 }
7935 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
7936 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7937 let (entity_type_fixes, entity_name_fixes) =
7938 normalize::normalize_findings(&mut frontier.findings);
7939 let confidence_updates =
7940 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
7941 let provenance_resync_count = if resync_provenance {
7945 sources::resync_provenance_from_sources(&mut frontier)
7946 } else {
7947 0
7948 };
7949 let before_source_count = frontier.sources.len();
7950 let before_evidence_atom_count = frontier.evidence_atoms.len();
7951 let before_condition_record_count = frontier.condition_records.len();
7952
7953 let mut id_rewrites = Vec::new();
7954 if rewrite_ids {
7955 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
7956 for finding in &frontier.findings {
7957 let expected =
7958 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
7959 if expected != finding.id {
7960 id_map_values.insert(finding.id.clone(), expected);
7961 }
7962 }
7963 let new_ids = id_map_values
7964 .values()
7965 .map(String::as_str)
7966 .collect::<std::collections::HashSet<_>>();
7967 if new_ids.len() != id_map_values.len() {
7968 fail("Refusing to rewrite IDs because two findings map to the same content address.");
7969 }
7970 for finding in &mut frontier.findings {
7971 if let Some(new_id) = id_map_values.get(&finding.id) {
7972 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
7973 finding.previous_version = Some(finding.id.clone());
7974 finding.id = new_id.clone();
7975 }
7976 }
7977 for finding in &mut frontier.findings {
7978 for link in &mut finding.links {
7979 if let Some(new_target) = id_map_values.get(&link.target) {
7980 link.target = new_target.clone();
7981 }
7982 }
7983 }
7984 if let Some(path) = id_map {
7985 std::fs::write(
7986 path,
7987 serde_json::to_string_pretty(&id_map_values)
7988 .expect("failed to serialize normalize id map"),
7989 )
7990 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
7991 }
7992 }
7993
7994 sources::materialize_project(&mut frontier);
7995 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
7996 let evidence_atoms_materialized = frontier
7997 .evidence_atoms
7998 .len()
7999 .saturating_sub(before_evidence_atom_count);
8000 let condition_records_materialized = frontier
8001 .condition_records
8002 .len()
8003 .saturating_sub(before_condition_record_count);
8004 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8005 let id_rewrite_count = id_rewrites.len();
8006 let wrote_to = if write {
8007 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
8008 Some(source.display().to_string())
8009 } else if let Some(out_path) = out {
8010 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
8011 Some(out_path.display().to_string())
8012 } else {
8013 None
8014 };
8015 let wrote = wrote_to.is_some();
8016 let planned_changes = entity_type_fixes
8017 + entity_name_fixes
8018 + confidence_updates
8019 + id_rewrite_count
8020 + source_records_materialized
8021 + evidence_atoms_materialized
8022 + condition_records_materialized
8023 + provenance_resync_count;
8024 let payload = json!({
8025 "ok": true,
8026 "command": "normalize",
8027 "schema_version": project::VELA_SCHEMA_VERSION,
8028 "source": {
8029 "path": source.display().to_string(),
8030 "hash": format!("sha256:{source_hash}"),
8031 },
8032 "dry_run": wrote_to.is_none(),
8033 "wrote_to": wrote_to,
8034 "summary": {
8035 "planned": planned_changes,
8036 "safe": planned_changes,
8037 "unsafe": 0,
8038 "applied": if wrote { planned_changes } else { 0 },
8039 },
8040 "changes": {
8041 "entity_type_fixes": entity_type_fixes,
8042 "entity_name_fixes": entity_name_fixes,
8043 "confidence_updates": confidence_updates,
8044 "id_rewrites": id_rewrite_count,
8045 "source_records_materialized": source_records_materialized,
8046 "evidence_atoms_materialized": evidence_atoms_materialized,
8047 "condition_records_materialized": condition_records_materialized,
8048 "provenance_resyncs": provenance_resync_count,
8049 "stats_changed": before_stats != after_stats,
8050 },
8051 "id_rewrites": id_rewrites,
8052 "repair_plan": if wrote { Vec::<Value>::new() } else {
8053 vec![json!({
8054 "action": "apply_normalization",
8055 "command": "vela normalize <frontier> --out frontier.normalized.json"
8056 })]
8057 },
8058 });
8059 if json_output {
8060 println!(
8061 "{}",
8062 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
8063 );
8064 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
8065 println!("{} normalized frontier written to {path}", style::ok("ok"));
8066 println!(
8067 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8068 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8069 );
8070 } else {
8071 println!("normalize dry run for {}", source.display());
8072 println!(
8073 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8074 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8075 );
8076 }
8077}
8078
8079fn cmd_proof(
8080 frontier: &Path,
8081 out: &Path,
8082 template: &str,
8083 gold: Option<&Path>,
8084 record_proof_state: bool,
8085 json_output: bool,
8086) {
8087 if template != "bbb-alzheimer" {
8088 fail(&format!(
8089 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
8090 ));
8091 }
8092 let mut loaded = load_frontier_or_fail(frontier);
8093 let source_hash = hash_path_or_fail(frontier);
8094 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
8095 .unwrap_or_else(|e| fail(&e));
8096 let benchmark_summary = gold.map(|gold_path| {
8097 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
8098 fail(&format!(
8099 "Failed to run proof benchmark '{}': {e}",
8100 gold_path.display()
8101 ))
8102 });
8103 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
8104 fail(&format!("Failed to write benchmark summary: {e}"));
8105 });
8106 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
8107 fail(&format!(
8108 "Proof benchmark failed for {}",
8109 gold_path.display()
8110 ));
8111 }
8112 summary
8113 });
8114 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
8115 fail(&format!("Proof packet validation failed: {e}"));
8116 });
8117 proposals::record_proof_export(
8118 &mut loaded,
8119 proposals::ProofPacketRecord {
8120 generated_at: export_record.generated_at.clone(),
8121 snapshot_hash: export_record.snapshot_hash.clone(),
8122 event_log_hash: export_record.event_log_hash.clone(),
8123 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
8124 },
8125 );
8126 project::recompute_stats(&mut loaded);
8127 if record_proof_state {
8128 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
8129 }
8130 let signal_report = signals::analyze(&loaded, &[]);
8131 if json_output {
8132 let payload = json!({
8133 "ok": true,
8134 "command": "proof",
8135 "schema_version": project::VELA_SCHEMA_VERSION,
8136 "recorded_proof_state": record_proof_state,
8137 "frontier": {
8138 "name": &loaded.project.name,
8139 "source": frontier.display().to_string(),
8140 "hash": format!("sha256:{source_hash}"),
8141 },
8142 "template": template,
8143 "gold": gold.map(|p| p.display().to_string()),
8144 "benchmark": benchmark_summary,
8145 "output": out.display().to_string(),
8146 "packet": {
8147 "manifest_path": out.join("manifest.json").display().to_string(),
8148 },
8149 "validation": {
8150 "status": "ok",
8151 "summary": validation_summary,
8152 },
8153 "proposals": proposals::summary(&loaded),
8154 "proof_state": loaded.proof_state,
8155 "signals": signal_report.signals,
8156 "review_queue": signal_report.review_queue,
8157 "proof_readiness": signal_report.proof_readiness,
8158 "trace_path": out.join("proof-trace.json").display().to_string(),
8159 });
8160 println!(
8161 "{}",
8162 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
8163 );
8164 } else {
8165 println!("vela proof");
8166 println!(" source: {}", frontier.display());
8167 println!(" template: {template}");
8168 println!(" output: {}", out.display());
8169 println!(" trace: {}", out.join("proof-trace.json").display());
8170 println!(
8171 " proof state: {}",
8172 if record_proof_state {
8173 "recorded"
8174 } else {
8175 "not recorded"
8176 }
8177 );
8178 println!();
8179 println!("{validation_summary}");
8180 }
8181}
8182
8183fn cmd_status(path: &Path, json: bool) {
8187 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8188
8189 let mut pending_total = 0usize;
8191 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
8192 std::collections::BTreeMap::new();
8193 for p in &project.proposals {
8194 if p.status == "pending_review" {
8195 pending_total += 1;
8196 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8197 }
8198 }
8199
8200 let audit = crate::causal_reasoning::audit_frontier(&project);
8202 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
8203
8204 let mut last_sync: Option<&crate::events::StateEvent> = None;
8206 let mut last_conflict: Option<&crate::events::StateEvent> = None;
8207 let mut total_conflicts = 0usize;
8208 for e in &project.events {
8209 match e.kind.as_str() {
8210 "frontier.synced_with_peer" => {
8211 if last_sync
8212 .map(|prev| e.timestamp > prev.timestamp)
8213 .unwrap_or(true)
8214 {
8215 last_sync = Some(e);
8216 }
8217 }
8218 "frontier.conflict_detected" => {
8219 total_conflicts += 1;
8220 if last_conflict
8221 .map(|prev| e.timestamp > prev.timestamp)
8222 .unwrap_or(true)
8223 {
8224 last_conflict = Some(e);
8225 }
8226 }
8227 _ => {}
8228 }
8229 }
8230
8231 let mut targets_with_success = std::collections::HashSet::new();
8233 let mut failed_replications = 0usize;
8234 for r in &project.replications {
8235 if r.outcome == "replicated" {
8236 targets_with_success.insert(r.target_finding.clone());
8237 } else if r.outcome == "failed" {
8238 failed_replications += 1;
8239 }
8240 }
8241
8242 if json {
8243 println!(
8244 "{}",
8245 serde_json::to_string_pretty(&json!({
8246 "ok": true,
8247 "command": "status",
8248 "frontier": frontier_label(&project),
8249 "vfr_id": project.frontier_id(),
8250 "findings": project.findings.len(),
8251 "events": project.events.len(),
8252 "actors": project.actors.len(),
8253 "peers": project.peers.len(),
8254 "inbox": {
8255 "pending_total": pending_total,
8256 "pending_by_kind": pending_by_kind,
8257 },
8258 "causal_audit": {
8259 "identified": audit_summary.identified,
8260 "conditional": audit_summary.conditional,
8261 "underidentified": audit_summary.underidentified,
8262 "underdetermined": audit_summary.underdetermined,
8263 },
8264 "replications": {
8265 "total": project.replications.len(),
8266 "findings_with_success": targets_with_success.len(),
8267 "failed": failed_replications,
8268 },
8269 "federation": {
8270 "peers": project.peers.len(),
8271 "last_sync": last_sync.map(|e| e.timestamp.clone()),
8272 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
8273 "total_conflicts": total_conflicts,
8274 },
8275 }))
8276 .expect("serialize status")
8277 );
8278 return;
8279 }
8280
8281 println!();
8282 println!(
8283 " {}",
8284 format!("VELA · STATUS · {}", path.display())
8285 .to_uppercase()
8286 .dimmed()
8287 );
8288 println!(" {}", style::tick_row(60));
8289 println!();
8290 println!(" frontier: {}", frontier_label(&project));
8291 println!(" vfr_id: {}", project.frontier_id());
8292 println!(
8293 " findings: {} events: {} peers: {} actors: {}",
8294 project.findings.len(),
8295 project.events.len(),
8296 project.peers.len(),
8297 project.actors.len(),
8298 );
8299 println!();
8300 if pending_total > 0 {
8301 println!(
8302 " {} {pending_total} pending proposals",
8303 style::warn("inbox")
8304 );
8305 for (k, n) in &pending_by_kind {
8306 println!(" · {n:>3} {k}");
8307 }
8308 } else {
8309 println!(" {} inbox clean", style::ok("ok"));
8310 }
8311 println!();
8312 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
8313 let chip = if audit_summary.underidentified > 0 {
8314 style::lost("audit")
8315 } else {
8316 style::warn("audit")
8317 };
8318 println!(
8319 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
8320 chip,
8321 audit_summary.identified,
8322 audit_summary.conditional,
8323 audit_summary.underidentified,
8324 audit_summary.underdetermined,
8325 );
8326 if audit_summary.underidentified > 0 {
8327 println!(
8328 " next: vela causal audit {} --problems-only",
8329 path.display()
8330 );
8331 }
8332 } else if audit_summary.underdetermined == 0 {
8333 println!(
8334 " {} causal audit: all {} identified",
8335 style::ok("ok"),
8336 audit_summary.identified
8337 );
8338 } else {
8339 println!(
8340 " {} causal audit: {} identified, {} ungraded",
8341 style::warn("audit"),
8342 audit_summary.identified,
8343 audit_summary.underdetermined,
8344 );
8345 }
8346 println!();
8347 if !project.replications.is_empty() {
8348 println!(
8349 " {} {} records · {} findings replicated · {} failed",
8350 style::ok("replications"),
8351 project.replications.len(),
8352 targets_with_success.len(),
8353 failed_replications,
8354 );
8355 }
8356 if project.peers.is_empty() {
8357 println!(
8358 " {} no federation peers registered",
8359 style::warn("federation")
8360 );
8361 } else {
8362 let last = last_sync
8363 .map(|e| fmt_timestamp(&e.timestamp))
8364 .unwrap_or_else(|| "never".to_string());
8365 let chip = if total_conflicts > 0 {
8366 style::warn("federation")
8367 } else {
8368 style::ok("federation")
8369 };
8370 println!(
8371 " {} {} peer(s) · last sync {} · {} conflict events",
8372 chip,
8373 project.peers.len(),
8374 last,
8375 total_conflicts,
8376 );
8377 }
8378 println!();
8379}
8380
8381fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
8383 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8384 let mut events: Vec<&crate::events::StateEvent> = project
8385 .events
8386 .iter()
8387 .filter(|e| match kind_filter {
8388 Some(k) => e.kind.contains(k),
8389 None => true,
8390 })
8391 .collect();
8392 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8393 events.truncate(limit);
8394
8395 if json {
8396 let payload: Vec<_> = events
8397 .iter()
8398 .map(|e| {
8399 json!({
8400 "id": e.id,
8401 "kind": e.kind,
8402 "actor": e.actor.id,
8403 "target": &e.target.id,
8404 "target_type": &e.target.r#type,
8405 "timestamp": e.timestamp,
8406 "reason": e.reason,
8407 })
8408 })
8409 .collect();
8410 println!(
8411 "{}",
8412 serde_json::to_string_pretty(&json!({
8413 "ok": true,
8414 "command": "log",
8415 "events": payload,
8416 }))
8417 .expect("serialize log")
8418 );
8419 return;
8420 }
8421
8422 println!();
8423 println!(
8424 " {}",
8425 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
8426 .to_uppercase()
8427 .dimmed()
8428 );
8429 println!(" {}", style::tick_row(60));
8430 if events.is_empty() {
8431 println!(" (no events)");
8432 return;
8433 }
8434 for e in &events {
8435 let when = fmt_timestamp(&e.timestamp);
8436 let target_short = if e.target.id.len() > 22 {
8437 format!("{}…", &e.target.id[..21])
8438 } else {
8439 e.target.id.clone()
8440 };
8441 let reason: String = e.reason.chars().take(70).collect();
8442 println!(
8443 " {:<19} {:<32} {:<24} {}",
8444 when, e.kind, target_short, reason
8445 );
8446 }
8447 println!();
8448}
8449
8450fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
8452 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8453
8454 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
8457 std::collections::HashMap::new();
8458 for p in &project.proposals {
8459 if p.kind != "finding.note" {
8460 continue;
8461 }
8462 if p.actor.id != "agent:reviewer-agent" {
8463 continue;
8464 }
8465 let reason = &p.reason;
8466 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
8467 continue;
8468 };
8469 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
8470 let extract = |k: &str| -> f64 {
8471 let pat = format!("{k} ");
8472 text.find(&pat)
8473 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
8474 .and_then(|t| t.parse::<f64>().ok())
8475 .unwrap_or(0.0)
8476 };
8477 score_map.insert(
8478 target.to_string(),
8479 (
8480 extract("plausibility"),
8481 extract("evidence"),
8482 extract("scope"),
8483 extract("duplicate-risk"),
8484 ),
8485 );
8486 }
8487
8488 let mut pending: Vec<&crate::proposals::StateProposal> = project
8489 .proposals
8490 .iter()
8491 .filter(|p| {
8492 p.status == "pending_review"
8493 && match kind_filter {
8494 Some(k) => p.kind.contains(k),
8495 None => true,
8496 }
8497 })
8498 .collect();
8499 pending.sort_by(|a, b| {
8501 let sa = score_map
8502 .get(&a.id)
8503 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8504 let sb = score_map
8505 .get(&b.id)
8506 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8507 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
8508 });
8509 pending.truncate(limit);
8510
8511 if json {
8512 let payload: Vec<_> = pending
8513 .iter()
8514 .map(|p| {
8515 let assertion_text = p
8516 .payload
8517 .get("finding")
8518 .and_then(|f| f.get("assertion"))
8519 .and_then(|a| a.get("text"))
8520 .and_then(|t| t.as_str());
8521 let assertion_type = p
8522 .payload
8523 .get("finding")
8524 .and_then(|f| f.get("assertion"))
8525 .and_then(|a| a.get("type"))
8526 .and_then(|t| t.as_str());
8527 let composite = score_map
8528 .get(&p.id)
8529 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8530 json!({
8531 "proposal_id": p.id,
8532 "kind": p.kind,
8533 "actor": p.actor,
8534 "reason": p.reason,
8535 "assertion_text": assertion_text,
8536 "assertion_type": assertion_type,
8537 "reviewer_composite": composite,
8538 })
8539 })
8540 .collect();
8541 println!(
8542 "{}",
8543 serde_json::to_string_pretty(&json!({
8544 "ok": true,
8545 "command": "inbox",
8546 "shown": pending.len(),
8547 "proposals": payload,
8548 }))
8549 .expect("serialize inbox")
8550 );
8551 return;
8552 }
8553
8554 println!();
8555 println!(
8556 " {}",
8557 format!(
8558 "VELA · INBOX · {} ({} pending shown)",
8559 path.display(),
8560 pending.len()
8561 )
8562 .to_uppercase()
8563 .dimmed()
8564 );
8565 println!(" {}", style::tick_row(60));
8566 if pending.is_empty() {
8567 println!(" (inbox clean)");
8568 return;
8569 }
8570 for p in &pending {
8571 let assertion_text = p
8572 .payload
8573 .get("finding")
8574 .and_then(|f| f.get("assertion"))
8575 .and_then(|a| a.get("text"))
8576 .and_then(|t| t.as_str())
8577 .unwrap_or("");
8578 let assertion_type = p
8579 .payload
8580 .get("finding")
8581 .and_then(|f| f.get("assertion"))
8582 .and_then(|a| a.get("type"))
8583 .and_then(|t| t.as_str())
8584 .unwrap_or("");
8585 let composite = score_map
8586 .get(&p.id)
8587 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8588 let score_str = composite
8589 .map(|c| format!("[{:.2}]", c))
8590 .unwrap_or_else(|| "[—] ".to_string());
8591 let kind_short = if p.kind.len() > 12 {
8592 format!("{}…", &p.kind[..11])
8593 } else {
8594 p.kind.clone()
8595 };
8596 let summary: String = if !assertion_text.is_empty() {
8597 assertion_text.chars().take(80).collect()
8598 } else {
8599 p.reason.chars().take(80).collect()
8600 };
8601 println!(
8602 " {} {} {:<13} {:<18} {}",
8603 score_str, p.id, kind_short, assertion_type, summary
8604 );
8605 }
8606 println!();
8607}
8608
8609fn cmd_ask(path: &Path, question: &str, json: bool) {
8614 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8615
8616 if question.trim().is_empty() {
8617 use std::io::{BufRead, Write};
8619 println!();
8620 println!(
8621 " {}",
8622 format!("VELA · ASK · {}", path.display())
8623 .to_uppercase()
8624 .dimmed()
8625 );
8626 println!(" {}", style::tick_row(60));
8627 println!(" Ask a question. Type `exit` to quit.");
8628 println!(" Examples:");
8629 println!(" · what's pending?");
8630 println!(" · what's underidentified?");
8631 println!(" · how many findings?");
8632 println!(" · what changed recently?");
8633 println!(" · who has what calibration?");
8634 println!();
8635 let stdin = std::io::stdin();
8636 let mut stdout = std::io::stdout();
8637 loop {
8638 print!(" ask> ");
8639 stdout.flush().ok();
8640 let mut line = String::new();
8641 if stdin.lock().read_line(&mut line).is_err() {
8642 break;
8643 }
8644 let q = line.trim();
8645 if q.is_empty() {
8646 continue;
8647 }
8648 if matches!(q, "exit" | "quit" | "q") {
8649 break;
8650 }
8651 answer(&project, q, false);
8652 }
8653 return;
8654 }
8655
8656 answer(&project, question, json);
8657}
8658
8659fn answer(project: &crate::project::Project, q: &str, json: bool) {
8660 let lower = q.to_lowercase();
8661
8662 if lower.contains("pending")
8664 || lower.contains("inbox")
8665 || lower.contains("queue")
8666 || lower.contains("to review")
8667 {
8668 let pending: Vec<&crate::proposals::StateProposal> = project
8669 .proposals
8670 .iter()
8671 .filter(|p| p.status == "pending_review")
8672 .collect();
8673 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
8674 for p in &pending {
8675 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8676 }
8677 if json {
8678 println!(
8679 "{}",
8680 serde_json::to_string_pretty(&json!({
8681 "answer": "pending",
8682 "total": pending.len(),
8683 "by_kind": by_kind,
8684 }))
8685 .unwrap()
8686 );
8687 } else {
8688 println!(" {} pending proposals.", pending.len());
8689 for (k, n) in &by_kind {
8690 println!(" · {n:>3} {k}");
8691 }
8692 if pending.is_empty() {
8693 println!(" Inbox is clean.");
8694 } else {
8695 println!(" Run `vela inbox <frontier>` to triage.");
8696 }
8697 }
8698 return;
8699 }
8700
8701 if lower.contains("underident")
8703 || lower.contains("audit")
8704 || lower.contains("identif")
8705 || lower.contains("causal")
8706 {
8707 let entries = crate::causal_reasoning::audit_frontier(project);
8708 let summary = crate::causal_reasoning::summarize_audit(&entries);
8709 if json {
8710 println!(
8711 "{}",
8712 serde_json::to_string_pretty(&json!({
8713 "answer": "audit",
8714 "summary": {
8715 "identified": summary.identified,
8716 "conditional": summary.conditional,
8717 "underidentified": summary.underidentified,
8718 "underdetermined": summary.underdetermined,
8719 },
8720 }))
8721 .unwrap()
8722 );
8723 } else {
8724 println!(
8725 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
8726 summary.identified,
8727 summary.conditional,
8728 summary.underidentified,
8729 summary.underdetermined,
8730 );
8731 if summary.underidentified > 0 {
8732 println!(
8733 " The {} underidentified findings are concrete review items:",
8734 summary.underidentified
8735 );
8736 for e in entries
8737 .iter()
8738 .filter(|e| {
8739 matches!(
8740 e.verdict,
8741 crate::causal_reasoning::Identifiability::Underidentified
8742 )
8743 })
8744 .take(8)
8745 {
8746 let txt: String = e.assertion_text.chars().take(70).collect();
8747 println!(" · {} {}", e.finding_id, txt);
8748 }
8749 }
8750 }
8751 return;
8752 }
8753
8754 if lower.contains("recent")
8756 || lower.contains("changed")
8757 || lower.contains("latest")
8758 || lower.contains("happen")
8759 {
8760 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
8761 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8762 events.truncate(8);
8763 if json {
8764 println!(
8765 "{}",
8766 serde_json::to_string_pretty(&json!({
8767 "answer": "recent_events",
8768 "events": events.iter().map(|e| json!({
8769 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
8770 "actor": e.actor.id, "target": e.target.id,
8771 })).collect::<Vec<_>>(),
8772 }))
8773 .unwrap()
8774 );
8775 } else {
8776 println!(" Most recent {} events:", events.len());
8777 for e in &events {
8778 let when = fmt_timestamp(&e.timestamp);
8779 println!(" · {when} {:<28} {}", e.kind, e.target.id);
8780 }
8781 }
8782 return;
8783 }
8784
8785 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
8787 let n = project.findings.len();
8788 let evs = project.events.len();
8789 let peers = project.peers.len();
8790 let actors = project.actors.len();
8791 if json {
8792 println!(
8793 "{}",
8794 serde_json::to_string_pretty(&json!({
8795 "answer": "counts",
8796 "findings": n,
8797 "events": evs,
8798 "peers": peers,
8799 "actors": actors,
8800 "replications": project.replications.len(),
8801 "predictions": project.predictions.len(),
8802 }))
8803 .unwrap()
8804 );
8805 } else {
8806 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
8807 println!(
8808 " {} replications · {} predictions · {} datasets · {} code artifacts.",
8809 project.replications.len(),
8810 project.predictions.len(),
8811 project.datasets.len(),
8812 project.code_artifacts.len(),
8813 );
8814 }
8815 return;
8816 }
8817
8818 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
8820 let records =
8821 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
8822 if json {
8823 println!("{}", serde_json::to_string_pretty(&records).unwrap());
8824 } else if records.is_empty() {
8825 println!(" No predictions yet. The calibration ledger is empty.");
8826 } else {
8827 println!(" Calibration over {} actor(s):", records.len());
8828 for r in &records {
8829 let brier = r
8830 .brier_score
8831 .map(|b| format!("{:.3}", b))
8832 .unwrap_or_else(|| "—".into());
8833 println!(
8834 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
8835 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
8836 );
8837 }
8838 }
8839 return;
8840 }
8841
8842 if lower.contains("peer")
8844 || lower.contains("federat")
8845 || lower.contains("sync")
8846 || lower.contains("conflict")
8847 {
8848 let mut total_conflicts = 0usize;
8849 for e in &project.events {
8850 if e.kind == "frontier.conflict_detected" {
8851 total_conflicts += 1;
8852 }
8853 }
8854 if json {
8855 println!(
8856 "{}",
8857 serde_json::to_string_pretty(&json!({
8858 "answer": "federation",
8859 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
8860 "total_conflicts": total_conflicts,
8861 }))
8862 .unwrap()
8863 );
8864 } else {
8865 println!(" {} peer(s) registered:", project.peers.len());
8866 for p in &project.peers {
8867 println!(" · {:<24} {}", p.id, p.url);
8868 }
8869 println!(" {total_conflicts} conflict events on the canonical log.");
8870 }
8871 return;
8872 }
8873
8874 if json {
8876 println!(
8877 "{}",
8878 serde_json::to_string_pretty(&json!({
8879 "answer": "unknown_question",
8880 "question": q,
8881 "hint": "Try: pending, audit, recent, how many, calibration, peers."
8882 }))
8883 .unwrap()
8884 );
8885 } else {
8886 println!(" Don't know how to route that question yet.");
8887 println!(" Try: pending · audit · recent · how many · calibration · peers");
8888 }
8889}
8890
8891fn frontier_label(p: &crate::project::Project) -> String {
8892 if p.project.name.trim().is_empty() {
8893 "(unnamed)".to_string()
8894 } else {
8895 p.project.name.clone()
8896 }
8897}
8898
8899fn fmt_timestamp(ts: &str) -> String {
8900 chrono::DateTime::parse_from_rfc3339(ts)
8903 .map(|dt| dt.format("%m-%d %H:%M").to_string())
8904 .unwrap_or_else(|_| ts.chars().take(16).collect())
8905}
8906
8907fn cmd_stats(path: &Path) {
8908 let frontier = load_frontier_or_fail(path);
8909 let s = &frontier.stats;
8910 println!();
8911 println!(" {}", "FRONTIER · V0.36.0".dimmed());
8912 println!(" {}", frontier.project.name.bold());
8913 println!(" {}", style::tick_row(60));
8914 println!(" id: {}", frontier.frontier_id());
8915 println!(" compiled: {}", frontier.project.compiled_at);
8916 println!(" papers: {}", frontier.project.papers_processed);
8917 println!(" findings: {}", s.findings);
8918 println!(" links: {}", s.links);
8919 println!(" replicated: {}", s.replicated);
8920 println!(" avg confidence: {}", s.avg_confidence);
8921 println!(" gaps: {}", s.gaps);
8922 println!(" contested: {}", s.contested);
8923 println!(" reviewed: {}", s.human_reviewed);
8924 println!(" proposals: {}", s.proposal_count);
8925 println!(
8926 " recorded proof: {}",
8927 frontier.proof_state.latest_packet.status
8928 );
8929 if frontier.proof_state.latest_packet.status != "never_exported" {
8930 println!(
8931 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
8932 );
8933 }
8934 if !s.categories.is_empty() {
8935 println!();
8936 println!(" {}", "categories".dimmed());
8937 let mut categories = s.categories.iter().collect::<Vec<_>>();
8938 categories.sort_by(|a, b| b.1.cmp(a.1));
8939 for (category, count) in categories {
8940 println!(" {category}: {}", count);
8941 }
8942 }
8943 println!();
8944 println!(" {}", style::tick_row(60));
8945 println!();
8946}
8947
8948fn cmd_proposals(action: ProposalAction) {
8949 match action {
8950 ProposalAction::List {
8951 frontier,
8952 status,
8953 json,
8954 } => {
8955 let frontier_state =
8956 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8957 let proposals_list = proposals::list(&frontier_state, status.as_deref());
8958 let payload = json!({
8959 "ok": true,
8960 "command": "proposals.list",
8961 "frontier": frontier_state.project.name,
8962 "status_filter": status,
8963 "summary": proposals::summary(&frontier_state),
8964 "proposals": proposals_list,
8965 });
8966 if json {
8967 println!(
8968 "{}",
8969 serde_json::to_string_pretty(&payload)
8970 .expect("failed to serialize proposals list")
8971 );
8972 } else {
8973 println!("vela proposals list");
8974 println!(" frontier: {}", frontier_state.project.name);
8975 println!(
8976 " proposals: {}",
8977 payload["proposals"].as_array().map_or(0, Vec::len)
8978 );
8979 }
8980 }
8981 ProposalAction::Show {
8982 frontier,
8983 proposal_id,
8984 json,
8985 } => {
8986 let frontier_state =
8987 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8988 let proposal =
8989 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
8990 let payload = json!({
8991 "ok": true,
8992 "command": "proposals.show",
8993 "frontier": frontier_state.project.name,
8994 "proposal": proposal,
8995 });
8996 if json {
8997 println!(
8998 "{}",
8999 serde_json::to_string_pretty(&payload)
9000 .expect("failed to serialize proposal show")
9001 );
9002 } else {
9003 println!("vela proposals show");
9004 println!(" frontier: {}", frontier_state.project.name);
9005 println!(" proposal: {}", proposal_id);
9006 println!(" kind: {}", proposal.kind);
9007 println!(" status: {}", proposal.status);
9008 }
9009 }
9010 ProposalAction::Preview {
9011 frontier,
9012 proposal_id,
9013 reviewer,
9014 json,
9015 } => {
9016 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
9017 .unwrap_or_else(|e| fail_return(&e));
9018 let payload = json!({
9019 "ok": true,
9020 "command": "proposals.preview",
9021 "frontier": frontier.display().to_string(),
9022 "preview": preview,
9023 });
9024 if json {
9025 println!(
9026 "{}",
9027 serde_json::to_string_pretty(&payload)
9028 .expect("failed to serialize proposal preview")
9029 );
9030 } else {
9031 println!("vela proposals preview");
9032 println!(" proposal: {}", proposal_id);
9033 println!(" kind: {}", preview.kind);
9034 println!(
9035 " findings: {} -> {}",
9036 preview.findings_before, preview.findings_after
9037 );
9038 println!(
9039 " artifacts: {} -> {}",
9040 preview.artifacts_before, preview.artifacts_after
9041 );
9042 println!(
9043 " events: {} -> {}",
9044 preview.events_before, preview.events_after
9045 );
9046 if !preview.changed_findings.is_empty() {
9047 println!(
9048 " findings changed: {}",
9049 preview.changed_findings.join(", ")
9050 );
9051 }
9052 if !preview.changed_artifacts.is_empty() {
9053 println!(
9054 " artifacts changed: {}",
9055 preview.changed_artifacts.join(", ")
9056 );
9057 }
9058 if !preview.event_kinds.is_empty() {
9059 println!(" event kinds: {}", preview.event_kinds.join(", "));
9060 }
9061 println!(" event: {}", preview.applied_event_id);
9062 }
9063 }
9064 ProposalAction::Import {
9065 frontier,
9066 source,
9067 json,
9068 } => {
9069 let report =
9070 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
9071 let payload = json!({
9072 "ok": true,
9073 "command": "proposals.import",
9074 "frontier": frontier.display().to_string(),
9075 "source": source.display().to_string(),
9076 "summary": {
9077 "imported": report.imported,
9078 "applied": report.applied,
9079 "rejected": report.rejected,
9080 "duplicates": report.duplicates,
9081 },
9082 });
9083 if json {
9084 println!(
9085 "{}",
9086 serde_json::to_string_pretty(&payload)
9087 .expect("failed to serialize proposal import")
9088 );
9089 } else {
9090 println!(
9091 "Imported {} proposals into {}",
9092 report.imported, report.wrote_to
9093 );
9094 }
9095 }
9096 ProposalAction::Validate { source, json } => {
9097 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
9098 let payload = json!({
9099 "ok": report.ok,
9100 "command": "proposals.validate",
9101 "source": source.display().to_string(),
9102 "summary": {
9103 "checked": report.checked,
9104 "valid": report.valid,
9105 "invalid": report.invalid,
9106 },
9107 "proposal_ids": report.proposal_ids,
9108 "errors": report.errors,
9109 });
9110 if json {
9111 println!(
9112 "{}",
9113 serde_json::to_string_pretty(&payload)
9114 .expect("failed to serialize proposal validation")
9115 );
9116 } else if report.ok {
9117 println!("{} validated {} proposals", style::ok("ok"), report.valid);
9118 } else {
9119 println!(
9120 "{} validated {} proposals, {} invalid",
9121 style::lost("lost"),
9122 report.valid,
9123 report.invalid
9124 );
9125 for error in &report.errors {
9126 println!(" · {error}");
9127 }
9128 std::process::exit(1);
9129 }
9130 }
9131 ProposalAction::Export {
9132 frontier,
9133 output,
9134 status,
9135 json,
9136 } => {
9137 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
9138 .unwrap_or_else(|e| fail_return(&e));
9139 let payload = json!({
9140 "ok": true,
9141 "command": "proposals.export",
9142 "frontier": frontier.display().to_string(),
9143 "output": output.display().to_string(),
9144 "status": status,
9145 "exported": count,
9146 });
9147 if json {
9148 println!(
9149 "{}",
9150 serde_json::to_string_pretty(&payload)
9151 .expect("failed to serialize proposal export")
9152 );
9153 } else {
9154 println!("sealed · {count} proposals · {}", output.display());
9155 }
9156 }
9157 ProposalAction::Accept {
9158 frontier,
9159 proposal_id,
9160 reviewer,
9161 reason,
9162 json,
9163 } => {
9164 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
9165 .unwrap_or_else(|e| fail_return(&e));
9166 let payload = json!({
9167 "ok": true,
9168 "command": "proposals.accept",
9169 "frontier": frontier.display().to_string(),
9170 "proposal_id": proposal_id,
9171 "reviewer": reviewer,
9172 "applied_event_id": event_id,
9173 });
9174 if json {
9175 println!(
9176 "{}",
9177 serde_json::to_string_pretty(&payload)
9178 .expect("failed to serialize proposal accept")
9179 );
9180 } else {
9181 println!(
9182 "{} accepted and applied proposal {}",
9183 style::ok("ok"),
9184 proposal_id
9185 );
9186 println!(" event: {}", event_id);
9187 }
9188 }
9189 ProposalAction::Reject {
9190 frontier,
9191 proposal_id,
9192 reviewer,
9193 reason,
9194 json,
9195 } => {
9196 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
9197 .unwrap_or_else(|e| fail_return(&e));
9198 let payload = json!({
9199 "ok": true,
9200 "command": "proposals.reject",
9201 "frontier": frontier.display().to_string(),
9202 "proposal_id": proposal_id,
9203 "reviewer": reviewer,
9204 "status": "rejected",
9205 });
9206 if json {
9207 println!(
9208 "{}",
9209 serde_json::to_string_pretty(&payload)
9210 .expect("failed to serialize proposal reject")
9211 );
9212 } else {
9213 println!(
9214 "{} rejected proposal {}",
9215 style::warn("rejected"),
9216 proposal_id
9217 );
9218 }
9219 }
9220 }
9221}
9222
9223fn cmd_artifact_to_state(
9224 frontier: &Path,
9225 packet: &Path,
9226 actor: &str,
9227 apply_artifacts: bool,
9228 json: bool,
9229) {
9230 let report =
9231 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
9232 .unwrap_or_else(|e| fail_return(&e));
9233 if json {
9234 println!(
9235 "{}",
9236 serde_json::to_string_pretty(&report)
9237 .expect("failed to serialize artifact-to-state report")
9238 );
9239 } else {
9240 println!("vela artifact-to-state");
9241 println!(" packet: {}", report.packet_id);
9242 println!(" frontier: {}", report.frontier);
9243 println!(" artifact proposals: {}", report.artifact_proposals);
9244 println!(" finding proposals: {}", report.finding_proposals);
9245 println!(" gap proposals: {}", report.gap_proposals);
9246 println!(
9247 " applied artifact events: {}",
9248 report.applied_artifact_events
9249 );
9250 println!(
9251 " pending truth proposals: {}",
9252 report.pending_truth_proposals
9253 );
9254 }
9255}
9256
9257async fn cmd_bridge_kit(action: BridgeKitAction) {
9258 match action {
9259 BridgeKitAction::Validate { source, json } => {
9260 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
9261 if json {
9262 println!(
9263 "{}",
9264 serde_json::to_string_pretty(&report)
9265 .expect("failed to serialize bridge-kit validation report")
9266 );
9267 } else {
9268 println!("vela bridge-kit validate");
9269 println!(" source: {}", report.source);
9270 println!(" packets: {}", report.packet_count);
9271 println!(" valid: {}", report.valid_packet_count);
9272 println!(" invalid: {}", report.invalid_packet_count);
9273 for packet in &report.packets {
9274 if packet.ok {
9275 println!(
9276 " ok: {} · {} artifacts · {} claims · {} needs",
9277 packet
9278 .packet_id
9279 .as_deref()
9280 .unwrap_or("packet id unavailable"),
9281 packet.artifact_count,
9282 packet.candidate_claim_count,
9283 packet.open_need_count
9284 );
9285 } else {
9286 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
9287 }
9288 }
9289 for error in &report.errors {
9290 println!(" error: {error}");
9291 }
9292 }
9293 if !report.ok {
9294 std::process::exit(1);
9295 }
9296 }
9297 BridgeKitAction::VerifyProvenance { packet, json } => {
9298 let report = verify_packet_provenance(&packet).await;
9299 if json {
9300 println!(
9301 "{}",
9302 serde_json::to_string_pretty(&report)
9303 .expect("failed to serialize provenance verification report")
9304 );
9305 } else {
9306 println!("vela bridge-kit verify-provenance");
9307 println!(" packet: {}", report.packet);
9308 println!(" identifiers: {}", report.identifiers.len());
9309 println!(" resolved: {}", report.resolved_count);
9310 println!(" unresolved: {}", report.unresolved_count);
9311 println!(" skipped: {}", report.skipped_count);
9312 for entry in &report.identifiers {
9313 let status = match entry.status.as_str() {
9314 "resolved" => "ok ",
9315 "unresolved" => "FAIL",
9316 "skipped" => "skip",
9317 _ => "? ",
9318 };
9319 println!(
9320 " {} {} ({})",
9321 status,
9322 entry.identifier,
9323 entry.note.as_deref().unwrap_or(entry.kind.as_str())
9324 );
9325 }
9326 }
9327 if report.unresolved_count > 0 {
9328 std::process::exit(1);
9329 }
9330 }
9331 }
9332}
9333
9334#[derive(Debug, Clone, Serialize)]
9335struct ProvenanceVerificationReport {
9336 command: String,
9337 packet: String,
9338 identifiers: Vec<ProvenanceVerificationEntry>,
9339 resolved_count: usize,
9340 unresolved_count: usize,
9341 skipped_count: usize,
9342}
9343
9344#[derive(Debug, Clone, Serialize)]
9345struct ProvenanceVerificationEntry {
9346 identifier: String,
9347 kind: String,
9348 status: String,
9349 #[serde(skip_serializing_if = "Option::is_none")]
9350 note: Option<String>,
9351}
9352
9353async fn verify_packet_provenance(packet_path: &Path) -> ProvenanceVerificationReport {
9358 use crate::artifact_to_state::ArtifactPacket;
9359 let raw = std::fs::read_to_string(packet_path)
9360 .unwrap_or_else(|e| fail_return(&format!("read packet: {e}")));
9361 let parsed: ArtifactPacket =
9362 serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse packet: {e}")));
9363 let packet = parsed
9364 .validate()
9365 .unwrap_or_else(|e| fail_return(&format!("validate packet: {e}")));
9366
9367 let mut candidates: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
9369 for artifact in &packet.artifacts {
9370 if let Some(ident) = extract_identifier(&artifact.locator) {
9371 candidates.insert(ident);
9372 }
9373 }
9374 for claim in &packet.candidate_claims {
9375 for source_ref in &claim.source_refs {
9376 if let Some(ident) = extract_identifier(source_ref) {
9377 candidates.insert(ident);
9378 }
9379 }
9380 }
9381
9382 let client = reqwest::Client::builder()
9383 .user_agent("vela/0.108 (+https://github.com/vela-science/vela)")
9384 .timeout(std::time::Duration::from_secs(15))
9385 .build()
9386 .unwrap_or_else(|e| fail_return(&format!("build http client: {e}")));
9387
9388 let mut entries: Vec<ProvenanceVerificationEntry> = Vec::new();
9389 let mut resolved = 0usize;
9390 let mut unresolved = 0usize;
9391 let mut skipped = 0usize;
9392 for candidate in &candidates {
9393 let entry = if let Some(doi) = candidate.strip_prefix("doi:") {
9394 verify_doi(&client, doi).await
9395 } else if let Some(pmid) = candidate.strip_prefix("pmid:") {
9396 verify_pmid(&client, pmid).await
9397 } else {
9398 ProvenanceVerificationEntry {
9399 identifier: candidate.clone(),
9400 kind: "unknown".to_string(),
9401 status: "skipped".to_string(),
9402 note: Some("no recognized identifier prefix".to_string()),
9403 }
9404 };
9405 match entry.status.as_str() {
9406 "resolved" => resolved += 1,
9407 "unresolved" => unresolved += 1,
9408 _ => skipped += 1,
9409 }
9410 entries.push(entry);
9411 }
9412
9413 ProvenanceVerificationReport {
9414 command: "bridge-kit.verify-provenance".to_string(),
9415 packet: packet_path.display().to_string(),
9416 identifiers: entries,
9417 resolved_count: resolved,
9418 unresolved_count: unresolved,
9419 skipped_count: skipped,
9420 }
9421}
9422
9423fn extract_identifier(s: &str) -> Option<String> {
9428 let trimmed = s.trim();
9429 if trimmed.is_empty() {
9430 return None;
9431 }
9432 if trimmed.starts_with("doi:") || trimmed.starts_with("pmid:") {
9434 return Some(trimmed.to_string());
9435 }
9436 for prefix in ["https://doi.org/", "http://doi.org/", "https://dx.doi.org/"] {
9438 if let Some(rest) = trimmed.strip_prefix(prefix) {
9439 return Some(format!("doi:{rest}"));
9440 }
9441 }
9442 for prefix in [
9444 "https://pubmed.ncbi.nlm.nih.gov/",
9445 "http://pubmed.ncbi.nlm.nih.gov/",
9446 ] {
9447 if let Some(rest) = trimmed.strip_prefix(prefix) {
9448 let pmid = rest.trim_end_matches('/');
9449 return Some(format!("pmid:{pmid}"));
9450 }
9451 }
9452 if trimmed.starts_with("10.") && trimmed.contains('/') && !trimmed.contains(' ') {
9454 return Some(format!("doi:{trimmed}"));
9455 }
9456 None
9457}
9458
9459async fn verify_doi(client: &reqwest::Client, doi: &str) -> ProvenanceVerificationEntry {
9460 let url = format!("https://api.crossref.org/works/{doi}");
9461 match client.get(&url).send().await {
9462 Ok(resp) if resp.status().is_success() => ProvenanceVerificationEntry {
9463 identifier: format!("doi:{doi}"),
9464 kind: "doi".to_string(),
9465 status: "resolved".to_string(),
9466 note: None,
9467 },
9468 Ok(resp) => ProvenanceVerificationEntry {
9469 identifier: format!("doi:{doi}"),
9470 kind: "doi".to_string(),
9471 status: "unresolved".to_string(),
9472 note: Some(format!("crossref returned {}", resp.status())),
9473 },
9474 Err(e) => ProvenanceVerificationEntry {
9475 identifier: format!("doi:{doi}"),
9476 kind: "doi".to_string(),
9477 status: "skipped".to_string(),
9478 note: Some(format!("crossref unreachable: {e}")),
9479 },
9480 }
9481}
9482
9483async fn verify_pmid(client: &reqwest::Client, pmid: &str) -> ProvenanceVerificationEntry {
9484 let url = format!(
9485 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id={pmid}&retmode=json"
9486 );
9487 match client.get(&url).send().await {
9488 Ok(resp) if resp.status().is_success() => {
9489 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
9492 let result = body.get("result");
9493 let uids = result
9494 .and_then(|r| r.get("uids"))
9495 .and_then(|u| u.as_array());
9496 let resolved = uids.is_some_and(|a| !a.is_empty());
9497 if resolved {
9498 ProvenanceVerificationEntry {
9499 identifier: format!("pmid:{pmid}"),
9500 kind: "pmid".to_string(),
9501 status: "resolved".to_string(),
9502 note: None,
9503 }
9504 } else {
9505 ProvenanceVerificationEntry {
9506 identifier: format!("pmid:{pmid}"),
9507 kind: "pmid".to_string(),
9508 status: "unresolved".to_string(),
9509 note: Some("eutils returned empty uids".to_string()),
9510 }
9511 }
9512 }
9513 Ok(resp) => ProvenanceVerificationEntry {
9514 identifier: format!("pmid:{pmid}"),
9515 kind: "pmid".to_string(),
9516 status: "unresolved".to_string(),
9517 note: Some(format!("eutils returned {}", resp.status())),
9518 },
9519 Err(e) => ProvenanceVerificationEntry {
9520 identifier: format!("pmid:{pmid}"),
9521 kind: "pmid".to_string(),
9522 status: "skipped".to_string(),
9523 note: Some(format!("eutils unreachable: {e}")),
9524 },
9525 }
9526}
9527
9528async fn cmd_source_adapter(action: SourceAdapterAction) {
9529 match action {
9530 SourceAdapterAction::Run {
9531 frontier,
9532 adapter,
9533 actor,
9534 entries,
9535 priority,
9536 include_excluded,
9537 allow_partial,
9538 dry_run,
9539 input_dir,
9540 apply_artifacts,
9541 json,
9542 } => {
9543 let report = crate::source_adapters::run(
9544 &frontier,
9545 crate::source_adapters::SourceAdapterRunOptions {
9546 adapter,
9547 actor,
9548 entries,
9549 priority,
9550 include_excluded,
9551 allow_partial,
9552 dry_run,
9553 input_dir,
9554 apply_artifacts,
9555 },
9556 )
9557 .await
9558 .unwrap_or_else(|e| fail_return(&e));
9559 if json {
9560 println!(
9561 "{}",
9562 serde_json::to_string_pretty(&report)
9563 .expect("failed to serialize source adapter report")
9564 );
9565 } else {
9566 println!("vela source-adapter run");
9567 println!(" adapter: {}", report.adapter);
9568 println!(" run: {}", report.run_id);
9569 println!(" frontier: {}", report.frontier);
9570 println!(" selected entries: {}", report.selected_entries);
9571 println!(" fetched records: {}", report.fetched_records);
9572 println!(" changed records: {}", report.changed_records);
9573 println!(" unchanged records: {}", report.unchanged_records);
9574 println!(" failed records: {}", report.failed_records.len());
9575 if let Some(packet_id) = report.packet_id {
9576 println!(" packet: {packet_id}");
9577 }
9578 println!(" artifact proposals: {}", report.artifact_proposals);
9579 println!(" review note proposals: {}", report.review_note_proposals);
9580 println!(" applied events: {}", report.applied_event_ids.len());
9581 }
9582 }
9583 }
9584}
9585
9586fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
9587 match action {
9588 RuntimeAdapterAction::Run {
9589 frontier,
9590 adapter,
9591 input,
9592 actor,
9593 dry_run,
9594 apply_artifacts,
9595 json,
9596 } => {
9597 let report = crate::runtime_adapters::run(
9598 &frontier,
9599 crate::runtime_adapters::RuntimeAdapterRunOptions {
9600 adapter,
9601 input,
9602 actor,
9603 dry_run,
9604 apply_artifacts,
9605 },
9606 )
9607 .unwrap_or_else(|e| fail_return(&e));
9608 if json {
9609 println!(
9610 "{}",
9611 serde_json::to_string_pretty(&report)
9612 .expect("failed to serialize runtime adapter report")
9613 );
9614 } else {
9615 println!("vela runtime-adapter run");
9616 println!(" adapter: {}", report.adapter);
9617 println!(" run: {}", report.run_id);
9618 println!(" frontier: {}", report.frontier);
9619 if let Some(packet_id) = report.packet_id {
9620 println!(" packet: {packet_id}");
9621 }
9622 println!(" artifact proposals: {}", report.artifact_proposals);
9623 println!(" finding proposals: {}", report.finding_proposals);
9624 println!(" gap proposals: {}", report.gap_proposals);
9625 println!(" review note proposals: {}", report.review_note_proposals);
9626 println!(
9627 " applied artifact events: {}",
9628 report.applied_artifact_events
9629 );
9630 println!(
9631 " pending truth proposals: {}",
9632 report.pending_truth_proposals
9633 );
9634 }
9635 }
9636 }
9637}
9638
9639fn cmd_sign(action: SignAction) {
9640 match action {
9641 SignAction::GenerateKeypair { out, json } => {
9642 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
9643 let payload = json!({
9644 "ok": true,
9645 "command": "sign.generate-keypair",
9646 "output_dir": out.display().to_string(),
9647 "public_key": public_key,
9648 });
9649 if json {
9650 println!(
9651 "{}",
9652 serde_json::to_string_pretty(&payload)
9653 .expect("failed to serialize sign.generate-keypair")
9654 );
9655 } else {
9656 println!("{} keypair · {}", style::ok("generated"), out.display());
9657 println!(" public key: {public_key}");
9658 }
9659 }
9660 SignAction::Apply {
9661 frontier,
9662 private_key,
9663 json,
9664 } => {
9665 let count =
9666 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
9667 let payload = json!({
9668 "ok": true,
9669 "command": "sign.apply",
9670 "frontier": frontier.display().to_string(),
9671 "private_key": private_key.display().to_string(),
9672 "signed": count,
9673 });
9674 if json {
9675 println!(
9676 "{}",
9677 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
9678 );
9679 } else {
9680 println!(
9681 "{} {count} findings in {}",
9682 style::ok("signed"),
9683 frontier.display()
9684 );
9685 }
9686 }
9687 SignAction::Verify {
9688 frontier,
9689 public_key,
9690 json,
9691 } => {
9692 let report = sign::verify_frontier(&frontier, public_key.as_deref())
9693 .unwrap_or_else(|e| fail_return(&e));
9694 if json {
9695 println!(
9696 "{}",
9697 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
9698 );
9699 } else {
9700 println!();
9701 println!(
9702 " {}",
9703 format!("VELA · SIGN · VERIFY · {}", frontier.display())
9704 .to_uppercase()
9705 .dimmed()
9706 );
9707 println!(" {}", style::tick_row(60));
9708 println!(" total findings: {}", report.total_findings);
9709 println!(" signed: {}", report.signed);
9710 println!(" unsigned: {}", report.unsigned);
9711 println!(" valid: {}", report.valid);
9712 println!(" invalid: {}", report.invalid);
9713 if report.findings_with_threshold > 0 {
9714 println!(" with threshold: {}", report.findings_with_threshold);
9715 println!(" jointly accepted: {}", report.jointly_accepted);
9716 }
9717 }
9718 }
9719 SignAction::ThresholdSet {
9720 frontier,
9721 finding_id,
9722 to,
9723 json,
9724 } => {
9725 if to == 0 {
9726 fail("--to must be >= 1");
9727 }
9728 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9729 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
9730 fail(&format!("finding '{finding_id}' not present in frontier"));
9731 };
9732 project.findings[idx].flags.signature_threshold = Some(to);
9733 sign::refresh_jointly_accepted(&mut project);
9737 let met = project.findings[idx].flags.jointly_accepted;
9738 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9739
9740 if json {
9741 println!(
9742 "{}",
9743 serde_json::to_string_pretty(&json!({
9744 "ok": true,
9745 "command": "sign.threshold-set",
9746 "finding_id": finding_id,
9747 "threshold": to,
9748 "jointly_accepted": met,
9749 "frontier": frontier.display().to_string(),
9750 }))
9751 .expect("failed to serialize sign.threshold-set")
9752 );
9753 } else {
9754 println!(
9755 "{} signature_threshold={to} on {finding_id} ({})",
9756 style::ok("set"),
9757 if met {
9758 "jointly accepted"
9759 } else {
9760 "awaiting signatures"
9761 }
9762 );
9763 }
9764 }
9765 }
9766}
9767
9768fn cmd_actor(action: ActorAction) {
9769 match action {
9770 ActorAction::Add {
9771 frontier,
9772 id,
9773 pubkey,
9774 tier,
9775 orcid,
9776 clearance,
9777 json,
9778 } => {
9779 let trimmed = pubkey.trim();
9781 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
9782 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
9783 }
9784 let orcid_normalized = orcid
9786 .as_deref()
9787 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
9788 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
9791 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
9792 });
9793
9794 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9795 if project.actors.iter().any(|actor| actor.id == id) {
9796 fail(&format!(
9797 "Actor '{id}' already registered in this frontier."
9798 ));
9799 }
9800 project.actors.push(sign::ActorRecord {
9801 id: id.clone(),
9802 public_key: trimmed.to_string(),
9803 algorithm: "ed25519".to_string(),
9804 created_at: chrono::Utc::now().to_rfc3339(),
9805 tier: tier.clone(),
9806 orcid: orcid_normalized.clone(),
9807 access_clearance: clearance,
9808 });
9809 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9810 let payload = json!({
9811 "ok": true,
9812 "command": "actor.add",
9813 "frontier": frontier.display().to_string(),
9814 "actor_id": id,
9815 "public_key": trimmed,
9816 "tier": tier,
9817 "orcid": orcid_normalized,
9818 "registered_count": project.actors.len(),
9819 });
9820 if json {
9821 println!(
9822 "{}",
9823 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
9824 );
9825 } else {
9826 let tier_suffix = tier
9827 .as_deref()
9828 .map_or_else(String::new, |t| format!(" tier={t}"));
9829 println!(
9830 "{} actor {} (pubkey {}{tier_suffix})",
9831 style::ok("registered"),
9832 id,
9833 &trimmed[..16]
9834 );
9835 }
9836 }
9837 ActorAction::List { frontier, json } => {
9838 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9839 if json {
9840 let payload = json!({
9841 "ok": true,
9842 "command": "actor.list",
9843 "frontier": frontier.display().to_string(),
9844 "actors": project.actors,
9845 });
9846 println!(
9847 "{}",
9848 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
9849 );
9850 } else {
9851 println!();
9852 println!(
9853 " {}",
9854 format!("VELA · ACTOR · LIST · {}", frontier.display())
9855 .to_uppercase()
9856 .dimmed()
9857 );
9858 println!(" {}", style::tick_row(60));
9859 if project.actors.is_empty() {
9860 println!(" (no actors registered)");
9861 } else {
9862 for actor in &project.actors {
9863 println!(
9864 " {:<28} {}… registered {}",
9865 actor.id,
9866 &actor.public_key[..16],
9867 actor.created_at
9868 );
9869 }
9870 }
9871 }
9872 }
9873 }
9874}
9875
9876fn cmd_causal(action: CausalAction) {
9878 use crate::causal_reasoning;
9879
9880 match action {
9881 CausalAction::Audit {
9882 frontier,
9883 problems_only,
9884 json,
9885 } => {
9886 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9887 let mut entries = causal_reasoning::audit_frontier(&project);
9888 if problems_only {
9889 entries.retain(|e| e.verdict.needs_reviewer_attention());
9890 }
9891 let summary = causal_reasoning::summarize_audit(&entries);
9892
9893 if json {
9894 println!(
9895 "{}",
9896 serde_json::to_string_pretty(&json!({
9897 "ok": true,
9898 "command": "causal.audit",
9899 "frontier": frontier.display().to_string(),
9900 "summary": summary,
9901 "entries": entries,
9902 }))
9903 .expect("serialize causal.audit")
9904 );
9905 return;
9906 }
9907
9908 println!();
9909 println!(
9910 " {}",
9911 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
9912 .to_uppercase()
9913 .dimmed()
9914 );
9915 println!(" {}", style::tick_row(60));
9916 println!(
9917 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
9918 summary.total,
9919 summary.identified,
9920 summary.conditional,
9921 summary.underidentified,
9922 summary.underdetermined,
9923 );
9924 if entries.is_empty() {
9925 println!(" (no entries to report)");
9926 return;
9927 }
9928 for e in &entries {
9929 let chip = match e.verdict {
9930 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
9931 crate::causal_reasoning::Identifiability::Conditional => {
9932 style::warn("conditional")
9933 }
9934 crate::causal_reasoning::Identifiability::Underidentified => {
9935 style::lost("underidentified")
9936 }
9937 crate::causal_reasoning::Identifiability::Underdetermined => {
9938 style::warn("underdetermined")
9939 }
9940 };
9941 let claim = e
9942 .causal_claim
9943 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
9944 let grade = e
9945 .causal_evidence_grade
9946 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
9947 println!();
9948 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
9949 let assertion_short: String = e.assertion_text.chars().take(78).collect();
9950 println!(" {assertion_short}");
9951 println!(" {} {}", style::ok("why:"), e.rationale);
9952 if e.verdict.needs_reviewer_attention()
9953 || matches!(
9954 e.verdict,
9955 crate::causal_reasoning::Identifiability::Underdetermined
9956 )
9957 {
9958 println!(" {} {}", style::ok("fix:"), e.remediation);
9959 }
9960 }
9961 }
9962 CausalAction::Effect {
9963 frontier,
9964 source,
9965 on: target,
9966 json,
9967 } => {
9968 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
9969
9970 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9971 let verdict = identify_effect(&project, &source, &target);
9972
9973 if json {
9974 println!(
9975 "{}",
9976 serde_json::to_string_pretty(&json!({
9977 "ok": true,
9978 "command": "causal.effect",
9979 "frontier": frontier.display().to_string(),
9980 "source": source,
9981 "target": target,
9982 "verdict": verdict,
9983 }))
9984 .expect("serialize causal.effect")
9985 );
9986 return;
9987 }
9988
9989 println!();
9990 println!(
9991 " {}",
9992 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
9993 .to_uppercase()
9994 .dimmed()
9995 );
9996 println!(" {}", style::tick_row(60));
9997 match verdict {
9998 CausalEffectVerdict::Identified {
9999 adjustment_set,
10000 back_door_paths_considered,
10001 } => {
10002 if adjustment_set.is_empty() {
10003 println!(
10004 " {} no back-door adjustment needed",
10005 style::ok("identified")
10006 );
10007 } else {
10008 println!(" {} identified by adjusting on:", style::ok("identified"));
10009 for z in &adjustment_set {
10010 println!(" · {z}");
10011 }
10012 }
10013 println!(
10014 " back-door paths considered: {}",
10015 back_door_paths_considered
10016 );
10017 }
10018 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
10019 println!(
10020 " {} identified via front-door criterion (Pearl 1995 §3.3)",
10021 style::ok("identified")
10022 );
10023 println!(" mediators that intercept all directed paths:");
10024 for m in &mediator_set {
10025 println!(" · {m}");
10026 }
10027 println!(
10028 " applies when source-target confounders are unobserved but the mediator chain is."
10029 );
10030 }
10031 CausalEffectVerdict::NoCausalPath { reason } => {
10032 println!(" {} no causal path: {reason}", style::warn("no_path"));
10033 }
10034 CausalEffectVerdict::Underidentified {
10035 unblocked_back_door_paths,
10036 candidates_tried,
10037 } => {
10038 println!(
10039 " {} no observational adjustment set found ({} candidates tried)",
10040 style::lost("underidentified"),
10041 candidates_tried
10042 );
10043 println!(" open back-door paths:");
10044 for path in unblocked_back_door_paths.iter().take(5) {
10045 println!(" · {}", path.join(" — "));
10046 }
10047 println!(
10048 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
10049 );
10050 }
10051 CausalEffectVerdict::UnknownNode { which } => {
10052 fail(&which);
10053 }
10054 }
10055 println!();
10056 }
10057 CausalAction::Graph {
10058 frontier,
10059 node,
10060 json,
10061 } => {
10062 use crate::causal_graph::CausalGraph;
10063 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10064 let graph = CausalGraph::from_project(&project);
10065
10066 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
10069 if !graph.contains(n) {
10070 fail(&format!("node not in frontier: {n}"));
10071 }
10072 vec![n]
10073 } else {
10074 project.findings.iter().map(|f| f.id.as_str()).collect()
10075 };
10076
10077 if json {
10078 let payload: Vec<_> = nodes
10079 .iter()
10080 .map(|n| {
10081 let parents: Vec<&str> = graph.parents_of(n).collect();
10082 let children: Vec<&str> = graph.children_of(n).collect();
10083 json!({
10084 "node": n,
10085 "parents": parents,
10086 "children": children,
10087 })
10088 })
10089 .collect();
10090 println!(
10091 "{}",
10092 serde_json::to_string_pretty(&json!({
10093 "ok": true,
10094 "command": "causal.graph",
10095 "node_count": graph.node_count(),
10096 "edge_count": graph.edge_count(),
10097 "nodes": payload,
10098 }))
10099 .expect("serialize causal.graph")
10100 );
10101 return;
10102 }
10103
10104 println!();
10105 println!(
10106 " {}",
10107 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
10108 .to_uppercase()
10109 .dimmed()
10110 );
10111 println!(" {}", style::tick_row(60));
10112 println!(
10113 " {} nodes · {} edges",
10114 graph.node_count(),
10115 graph.edge_count()
10116 );
10117 println!();
10118 for n in &nodes {
10119 let parents: Vec<&str> = graph.parents_of(n).collect();
10120 let children: Vec<&str> = graph.children_of(n).collect();
10121 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
10122 continue; }
10124 println!(" {n}");
10125 if !parents.is_empty() {
10126 println!(" parents: {}", parents.join(", "));
10127 }
10128 if !children.is_empty() {
10129 println!(" children: {}", children.join(", "));
10130 }
10131 }
10132 }
10133 CausalAction::Counterfactual {
10134 frontier,
10135 intervene_on,
10136 set_to,
10137 target,
10138 json,
10139 } => {
10140 use crate::counterfactual::{
10141 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
10142 };
10143
10144 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10145 let query = CounterfactualQuery {
10146 intervene_on: intervene_on.clone(),
10147 set_to,
10148 target: target.clone(),
10149 };
10150 let verdict = answer_counterfactual(&project, &query);
10151
10152 if json {
10153 println!(
10154 "{}",
10155 serde_json::to_string_pretty(&json!({
10156 "ok": true,
10157 "command": "causal.counterfactual",
10158 "frontier": frontier.display().to_string(),
10159 "query": query,
10160 "verdict": verdict,
10161 }))
10162 .expect("serialize causal.counterfactual")
10163 );
10164 return;
10165 }
10166
10167 println!();
10168 println!(
10169 " {}",
10170 format!(
10171 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
10172 )
10173 .to_uppercase()
10174 .dimmed()
10175 );
10176 println!(" {}", style::tick_row(72));
10177 match verdict {
10178 CounterfactualVerdict::Resolved {
10179 factual,
10180 counterfactual,
10181 delta,
10182 paths_used,
10183 } => {
10184 println!(
10185 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
10186 style::ok("resolved")
10187 );
10188 println!(
10189 " twin-network propagation through {} causal path(s):",
10190 paths_used.len()
10191 );
10192 for p in paths_used.iter().take(5) {
10193 println!(" · {}", p.join(" → "));
10194 }
10195 println!(
10196 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
10197 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
10198 );
10199 }
10200 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
10201 println!(
10202 " {} causal path exists but {} edge(s) lack a mechanism annotation",
10203 style::warn("mechanism_unspecified"),
10204 unspecified_edges.len()
10205 );
10206 for (parent, child) in unspecified_edges.iter().take(8) {
10207 println!(" · {parent} → {child}");
10208 }
10209 println!(
10210 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
10211 );
10212 }
10213 CounterfactualVerdict::NoCausalPath { factual } => {
10214 println!(
10215 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
10216 style::warn("no_path")
10217 );
10218 }
10219 CounterfactualVerdict::UnknownNode { which } => {
10220 fail(&format!("node not in frontier: {which}"));
10221 }
10222 CounterfactualVerdict::InvalidIntervention { reason } => {
10223 fail(&reason);
10224 }
10225 }
10226 println!();
10227 }
10228 }
10229}
10230
10231fn cmd_bridges(action: BridgesAction) {
10234 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
10235 use std::collections::HashMap;
10236
10237 fn bridges_dir(frontier: &Path) -> PathBuf {
10238 frontier.join(".vela/bridges")
10239 }
10240
10241 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
10242 let path = bridges_dir(frontier).join(format!("{id}.json"));
10243 if !path.is_file() {
10244 return Err(format!("bridge not found: {id}"));
10245 }
10246 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
10247 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
10248 }
10249
10250 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
10251 let dir = bridges_dir(frontier);
10252 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
10253 let path = dir.join(format!("{}.json", b.id));
10254 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
10255 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
10256 }
10257
10258 fn default_reviewer_id() -> String {
10261 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
10262 }
10263
10264 fn emit_bridge_reviewed_event(
10275 frontier: &Path,
10276 bridge_id: &str,
10277 status: &str,
10278 reviewer_id: &str,
10279 note: Option<&str>,
10280 ) -> Result<(), String> {
10281 let mut payload = serde_json::json!({
10282 "bridge_id": bridge_id,
10283 "status": status,
10284 });
10285 if let Some(n) = note
10286 && !n.trim().is_empty()
10287 {
10288 payload["note"] = serde_json::Value::String(n.to_string());
10289 }
10290 let known_ids: Vec<String> = list_bridges(frontier)
10292 .unwrap_or_default()
10293 .into_iter()
10294 .map(|b| b.id)
10295 .collect();
10296 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
10297 let event = crate::events::new_bridge_reviewed_event(
10298 bridge_id,
10299 reviewer_id,
10300 "human",
10301 &format!("Bridge {status} by {reviewer_id}"),
10302 payload,
10303 Vec::new(),
10304 );
10305 let events_dir = frontier.join(".vela/events");
10306 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
10307 let event_path = events_dir.join(format!("{}.json", event.id));
10308 let data =
10309 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
10310 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
10311 }
10312
10313 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
10314 let dir = bridges_dir(frontier);
10315 if !dir.is_dir() {
10316 return Ok(Vec::new());
10317 }
10318 let mut out = Vec::new();
10319 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
10320 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
10321 let path = entry.path();
10322 if path.extension().and_then(|s| s.to_str()) != Some("json") {
10323 continue;
10324 }
10325 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
10326 let b: Bridge =
10327 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
10328 out.push(b);
10329 }
10330 out.sort_by(|a, b| {
10331 b.finding_refs
10332 .len()
10333 .cmp(&a.finding_refs.len())
10334 .then(a.entity_name.cmp(&b.entity_name))
10335 });
10336 Ok(out)
10337 }
10338
10339 match action {
10340 BridgesAction::Derive {
10341 frontier_a,
10342 label_a,
10343 frontier_b,
10344 label_b,
10345 json,
10346 } => {
10347 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
10348 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
10349 let now = chrono::Utc::now().to_rfc3339();
10350 let new_bridges =
10351 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
10352
10353 let existing = list_bridges(&frontier_a).unwrap_or_default();
10357 let existing_by_id: HashMap<String, Bridge> =
10358 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
10359 let mut written = 0;
10360 let mut preserved = 0;
10361 let mut new_ids = Vec::new();
10362 for mut bridge in new_bridges {
10363 if let Some(prev) = existing_by_id.get(&bridge.id)
10364 && prev.status != BridgeStatus::Derived
10365 {
10366 bridge.status = prev.status;
10368 bridge.derived_at = prev.derived_at.clone();
10369 preserved += 1;
10370 }
10371 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
10372 new_ids.push(bridge.id.clone());
10373 written += 1;
10374 }
10375
10376 if json {
10377 println!(
10378 "{}",
10379 serde_json::to_string_pretty(&json!({
10380 "ok": true,
10381 "command": "bridges.derive",
10382 "frontier_a": frontier_a.display().to_string(),
10383 "frontier_b": frontier_b.display().to_string(),
10384 "bridges_written": written,
10385 "reviewer_judgments_preserved": preserved,
10386 "ids": new_ids,
10387 }))
10388 .expect("serialize bridges.derive")
10389 );
10390 return;
10391 }
10392
10393 println!();
10394 println!(
10395 " {}",
10396 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
10397 .to_uppercase()
10398 .dimmed()
10399 );
10400 println!(" {}", style::tick_row(60));
10401 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
10402 if preserved > 0 {
10403 println!(
10404 " {} {} reviewer judgment(s) preserved",
10405 style::ok("kept"),
10406 preserved
10407 );
10408 }
10409 for id in new_ids.iter().take(10) {
10410 println!(" · {id}");
10411 }
10412 if new_ids.len() > 10 {
10413 println!(" … and {} more", new_ids.len() - 10);
10414 }
10415 println!();
10416 }
10417 BridgesAction::List {
10418 frontier,
10419 status,
10420 json,
10421 } => {
10422 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
10423 if let Some(s) = status.as_deref() {
10424 let want = match s.to_lowercase().as_str() {
10425 "derived" => BridgeStatus::Derived,
10426 "confirmed" => BridgeStatus::Confirmed,
10427 "refuted" => BridgeStatus::Refuted,
10428 other => fail_return(&format!(
10429 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
10430 )),
10431 };
10432 bridges.retain(|b| b.status == want);
10433 }
10434 if json {
10435 println!(
10436 "{}",
10437 serde_json::to_string_pretty(&json!({
10438 "ok": true,
10439 "command": "bridges.list",
10440 "frontier": frontier.display().to_string(),
10441 "count": bridges.len(),
10442 "bridges": bridges,
10443 }))
10444 .expect("serialize bridges.list")
10445 );
10446 return;
10447 }
10448 println!();
10449 println!(
10450 " {}",
10451 format!("VELA · BRIDGES · LIST · {}", frontier.display())
10452 .to_uppercase()
10453 .dimmed()
10454 );
10455 println!(" {}", style::tick_row(60));
10456 println!(" {} bridge(s)", bridges.len());
10457 for b in &bridges {
10458 let chip = match b.status {
10459 BridgeStatus::Derived => style::warn("derived"),
10460 BridgeStatus::Confirmed => style::ok("confirmed"),
10461 BridgeStatus::Refuted => style::lost("refuted"),
10462 };
10463 println!();
10464 println!(
10465 " {chip} {} {} ↔ findings:{}",
10466 b.id,
10467 b.entity_name,
10468 b.finding_refs.len()
10469 );
10470 println!(" frontiers: {}", b.frontiers.join(", "));
10471 if let Some(t) = &b.tension {
10472 println!(" tension: {t}");
10473 }
10474 }
10475 println!();
10476 }
10477 BridgesAction::Show {
10478 frontier,
10479 bridge_id,
10480 json,
10481 } => {
10482 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10483 if json {
10484 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10485 return;
10486 }
10487 println!();
10488 println!(
10489 " {}",
10490 format!("VELA · BRIDGES · SHOW · {}", b.id)
10491 .to_uppercase()
10492 .dimmed()
10493 );
10494 println!(" {}", style::tick_row(60));
10495 println!(" entity: {}", b.entity_name);
10496 println!(" status: {:?}", b.status);
10497 println!(" frontiers: {}", b.frontiers.join(", "));
10498 if !b.frontier_ids.is_empty() {
10499 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
10500 }
10501 if let Some(t) = &b.tension {
10502 println!(" tension: {t}");
10503 }
10504 println!(" derived_at: {}", b.derived_at);
10505 println!(" finding refs ({}):", b.finding_refs.len());
10506 for r in &b.finding_refs {
10507 let dir = r.direction.as_deref().unwrap_or("—");
10508 let truncated: String = r.assertion_text.chars().take(72).collect();
10509 println!(
10510 " · [{}] {} (conf={:.2}, dir={})",
10511 r.frontier, r.finding_id, r.confidence, dir
10512 );
10513 println!(" {truncated}");
10514 }
10515 println!();
10516 }
10517 BridgesAction::Confirm {
10518 frontier,
10519 bridge_id,
10520 reviewer,
10521 note,
10522 json,
10523 } => {
10524 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10525 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10526 b.status = BridgeStatus::Confirmed;
10527 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10528 let _ = emit_bridge_reviewed_event(
10532 &frontier,
10533 &bridge_id,
10534 "confirmed",
10535 &reviewer_id,
10536 note.as_deref(),
10537 );
10538 if json {
10539 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10540 return;
10541 }
10542 println!();
10543 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
10544 println!();
10545 }
10546 BridgesAction::Refute {
10547 frontier,
10548 bridge_id,
10549 reviewer,
10550 note,
10551 json,
10552 } => {
10553 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10554 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10555 b.status = BridgeStatus::Refuted;
10556 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10557 let _ = emit_bridge_reviewed_event(
10558 &frontier,
10559 &bridge_id,
10560 "refuted",
10561 &reviewer_id,
10562 note.as_deref(),
10563 );
10564 if json {
10565 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10566 return;
10567 }
10568 println!();
10569 println!(" {} {} now refuted", style::lost("refuted"), b.id);
10570 println!();
10571 }
10572 }
10573}
10574
10575fn cmd_federation(action: FederationAction) {
10577 use crate::federation::PeerHub;
10578
10579 match action {
10580 FederationAction::PeerAdd {
10581 frontier,
10582 id,
10583 url,
10584 pubkey,
10585 note,
10586 json,
10587 } => {
10588 let peer = PeerHub {
10589 id: id.clone(),
10590 url: url.clone(),
10591 public_key: pubkey.trim().to_string(),
10592 added_at: chrono::Utc::now().to_rfc3339(),
10593 note: note.clone(),
10594 };
10595 peer.validate().unwrap_or_else(|e| fail_return(&e));
10596
10597 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10598 if project.peers.iter().any(|p| p.id == id) {
10599 fail(&format!("peer '{id}' already in registry"));
10600 }
10601 project.peers.push(peer.clone());
10602 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10603
10604 if json {
10605 println!(
10606 "{}",
10607 serde_json::to_string_pretty(&json!({
10608 "ok": true,
10609 "command": "federation.peer-add",
10610 "frontier": frontier.display().to_string(),
10611 "peer": peer,
10612 "registered_count": project.peers.len(),
10613 }))
10614 .expect("serialize federation.peer-add")
10615 );
10616 } else {
10617 println!(
10618 "{} peer {} (pubkey {}…) at {}",
10619 style::ok("registered"),
10620 id,
10621 &peer.public_key[..16],
10622 peer.url
10623 );
10624 }
10625 }
10626 FederationAction::PeerList { frontier, json } => {
10627 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10628 if json {
10629 println!(
10630 "{}",
10631 serde_json::to_string_pretty(&json!({
10632 "ok": true,
10633 "command": "federation.peer-list",
10634 "frontier": frontier.display().to_string(),
10635 "peers": project.peers,
10636 }))
10637 .expect("serialize federation.peer-list")
10638 );
10639 } else {
10640 println!();
10641 println!(
10642 " {}",
10643 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
10644 .to_uppercase()
10645 .dimmed()
10646 );
10647 println!(" {}", style::tick_row(60));
10648 if project.peers.is_empty() {
10649 println!(" (no peers registered)");
10650 } else {
10651 for p in &project.peers {
10652 let note_suffix = if p.note.is_empty() {
10653 String::new()
10654 } else {
10655 format!(" · {}", p.note)
10656 };
10657 println!(
10658 " {:<24} {} {}…{note_suffix}",
10659 p.id,
10660 p.url,
10661 &p.public_key[..16]
10662 );
10663 }
10664 }
10665 }
10666 }
10667 FederationAction::Sync {
10668 frontier,
10669 peer_id,
10670 url,
10671 via_hub,
10672 vfr_id,
10673 allow_cross_vfr,
10674 dry_run,
10675 json,
10676 } => {
10677 use crate::federation::{self, DiscoveryResult};
10678
10679 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10680 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
10681 fail(&format!(
10682 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
10683 ));
10684 };
10685 let local_frontier_id = project.frontier_id();
10686
10687 if via_hub
10694 && let Some(target) = vfr_id.as_deref()
10695 && target != local_frontier_id
10696 && !allow_cross_vfr
10697 {
10698 fail(&format!(
10699 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
10700 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
10701 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
10702 ));
10703 }
10704
10705 #[derive(Debug)]
10707 enum SyncOutcome {
10708 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
10712 }
10713
10714 let outcome = if via_hub {
10715 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
10716 match federation::discover_peer_frontier(
10717 &peer.url,
10718 &target_vfr,
10719 Some(&peer.public_key),
10720 ) {
10721 DiscoveryResult::Resolved(p) => {
10722 let src =
10723 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
10724 SyncOutcome::Resolved(p, src)
10725 }
10726 DiscoveryResult::BrokenLocator {
10727 vfr_id,
10728 locator,
10729 status,
10730 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
10731 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
10732 SyncOutcome::UnverifiedEntry(vfr_id, reason)
10733 }
10734 DiscoveryResult::EntryNotFound { vfr_id, status } => {
10735 SyncOutcome::EntryNotFound(vfr_id, status)
10736 }
10737 DiscoveryResult::Unreachable { url, error } => {
10738 fail(&format!("peer hub unreachable ({url}): {error}"));
10739 }
10740 }
10741 } else {
10742 let resolved_url = url.unwrap_or_else(|| {
10743 let base = peer.url.trim_end_matches('/');
10744 format!("{base}/manifest/{local_frontier_id}.json")
10745 });
10746 match federation::fetch_peer_frontier(&resolved_url) {
10747 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
10748 Err(e) => fail(&format!("direct fetch failed: {e}")),
10749 }
10750 };
10751
10752 let peer_source: String;
10755 let peer_state = match outcome {
10756 SyncOutcome::Resolved(p, src) => {
10757 if !json {
10758 println!(" · resolved via {src}");
10759 }
10760 peer_source = src;
10761 p
10762 }
10763 SyncOutcome::BrokenLocator(vfr, locator, status) => {
10764 if dry_run {
10765 if json {
10766 println!(
10767 "{}",
10768 serde_json::to_string_pretty(&json!({
10769 "ok": true,
10770 "command": "federation.sync",
10771 "dry_run": true,
10772 "outcome": "broken_locator",
10773 "vfr_id": vfr,
10774 "locator": locator,
10775 "http_status": status,
10776 }))
10777 .expect("serialize")
10778 );
10779 } else {
10780 println!(
10781 "{} dry-run: peer entry resolved but locator dead",
10782 style::warn("broken_locator")
10783 );
10784 println!(" vfr_id: {vfr}");
10785 println!(" locator: {locator} (HTTP {status})");
10786 }
10787 return;
10788 }
10789 let report = federation::record_locator_failure(
10790 &mut project,
10791 &peer_id,
10792 &vfr,
10793 &locator,
10794 status,
10795 );
10796 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10797 if json {
10798 println!(
10799 "{}",
10800 serde_json::to_string_pretty(&json!({
10801 "ok": true,
10802 "command": "federation.sync",
10803 "outcome": "broken_locator",
10804 "report": report,
10805 }))
10806 .expect("serialize")
10807 );
10808 } else {
10809 println!(
10810 "{} sync recorded broken-locator conflict against {peer_id}",
10811 style::warn("broken_locator")
10812 );
10813 println!(" vfr_id: {vfr}");
10814 println!(" locator: {locator} (HTTP {status})");
10815 println!(" events appended: {}", report.events_appended);
10816 }
10817 return;
10818 }
10819 SyncOutcome::UnverifiedEntry(vfr, reason) => {
10820 if dry_run {
10821 if json {
10822 println!(
10823 "{}",
10824 serde_json::to_string_pretty(&json!({
10825 "ok": true,
10826 "command": "federation.sync",
10827 "dry_run": true,
10828 "outcome": "unverified_peer_entry",
10829 "vfr_id": vfr,
10830 "reason": reason,
10831 }))
10832 .expect("serialize")
10833 );
10834 } else {
10835 println!(
10836 "{} dry-run: peer entry signature did not verify",
10837 style::lost("unverified_peer_entry")
10838 );
10839 println!(" vfr_id: {vfr}");
10840 println!(" reason: {reason}");
10841 }
10842 return;
10843 }
10844 let report =
10845 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
10846 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10847 if json {
10848 println!(
10849 "{}",
10850 serde_json::to_string_pretty(&json!({
10851 "ok": true,
10852 "command": "federation.sync",
10853 "outcome": "unverified_peer_entry",
10854 "report": report,
10855 }))
10856 .expect("serialize")
10857 );
10858 } else {
10859 println!(
10860 "{} sync halted; peer's registry entry signature did not verify",
10861 style::lost("unverified_peer_entry")
10862 );
10863 println!(" vfr_id: {vfr}");
10864 println!(" reason: {reason}");
10865 }
10866 return;
10867 }
10868 SyncOutcome::EntryNotFound(vfr, status) => {
10869 if json {
10870 println!(
10871 "{}",
10872 serde_json::to_string_pretty(&json!({
10873 "ok": false,
10874 "command": "federation.sync",
10875 "outcome": "entry_not_found",
10876 "vfr_id": vfr,
10877 "http_status": status,
10878 }))
10879 .expect("serialize")
10880 );
10881 } else {
10882 println!(
10883 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
10884 style::warn("entry_not_found")
10885 );
10886 }
10887 return;
10888 }
10889 };
10890
10891 if dry_run {
10892 let conflicts = federation::diff_frontiers(&project, &peer_state);
10893 if json {
10894 println!(
10895 "{}",
10896 serde_json::to_string_pretty(&json!({
10897 "ok": true,
10898 "command": "federation.sync",
10899 "dry_run": true,
10900 "peer_id": peer_id,
10901 "peer_source": peer_source,
10902 "conflicts": conflicts,
10903 }))
10904 .expect("serialize federation.sync (dry-run)")
10905 );
10906 } else {
10907 println!(
10908 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
10909 style::ok("ok"),
10910 peer_source,
10911 conflicts.len()
10912 );
10913 for c in &conflicts {
10914 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10915 }
10916 }
10917 return;
10918 }
10919
10920 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
10921 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10922
10923 if json {
10924 println!(
10925 "{}",
10926 serde_json::to_string_pretty(&json!({
10927 "ok": true,
10928 "command": "federation.sync",
10929 "peer_id": peer_id,
10930 "peer_source": peer_source,
10931 "report": report,
10932 }))
10933 .expect("serialize federation.sync")
10934 );
10935 } else {
10936 println!(
10937 "{} synced with {} ({})",
10938 style::ok("ok"),
10939 peer_id,
10940 peer_source
10941 );
10942 println!(
10943 " our: {}",
10944 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
10945 );
10946 println!(
10947 " peer: {}",
10948 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
10949 );
10950 println!(
10951 " conflicts: {} events appended: {}",
10952 report.conflicts.len(),
10953 report.events_appended
10954 );
10955 for c in &report.conflicts {
10956 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10957 }
10958 }
10959 }
10960 FederationAction::PushResolution {
10961 frontier,
10962 conflict_event_id,
10963 to,
10964 key,
10965 vfr_id,
10966 json,
10967 } => {
10968 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
10969 }
10970 FederationAction::PeerRemove { frontier, id, json } => {
10971 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10972 let before = project.peers.len();
10973 project.peers.retain(|p| p.id != id);
10974 if project.peers.len() == before {
10975 fail(&format!("peer '{id}' not found in registry"));
10976 }
10977 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10978
10979 if json {
10980 println!(
10981 "{}",
10982 serde_json::to_string_pretty(&json!({
10983 "ok": true,
10984 "command": "federation.peer-remove",
10985 "frontier": frontier.display().to_string(),
10986 "removed": id,
10987 "remaining": project.peers.len(),
10988 }))
10989 .expect("serialize federation.peer-remove")
10990 );
10991 } else {
10992 println!(
10993 "{} peer {} ({} remaining)",
10994 style::ok("removed"),
10995 id,
10996 project.peers.len()
10997 );
10998 }
10999 }
11000 }
11001}
11002
11003fn cmd_federation_push_resolution(
11015 frontier: PathBuf,
11016 conflict_event_id: String,
11017 to: String,
11018 key: Option<PathBuf>,
11019 vfr_id: Option<String>,
11020 json: bool,
11021) {
11022 use crate::canonical;
11023 use crate::sign;
11024
11025 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11026
11027 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
11028 fail(&format!(
11029 "peer '{to}' not in registry; run `vela federation peer-add` first"
11030 ));
11031 };
11032
11033 let Some(resolution) = project
11035 .events
11036 .iter()
11037 .find(|e| {
11038 e.kind == "frontier.conflict_resolved"
11039 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
11040 == Some(conflict_event_id.as_str())
11041 })
11042 .cloned()
11043 else {
11044 fail(&format!(
11045 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
11046 frontier.display()
11047 ));
11048 };
11049
11050 let actor_id = resolution.actor.id.clone();
11053 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
11054 fail(&format!(
11055 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
11056 register the reviewer with `vela actor add` before pushing"
11057 ));
11058 };
11059
11060 let key_path = key.unwrap_or_else(|| {
11063 let home = std::env::var("HOME").unwrap_or_default();
11064 let base = PathBuf::from(home)
11065 .join(".config")
11066 .join("vela")
11067 .join("keys");
11068 let safe_id = actor.id.replace([':', '/'], "_");
11069 let by_actor = base.join(format!("{safe_id}.key"));
11070 if by_actor.exists() {
11071 by_actor
11072 } else {
11073 base.join("private.key")
11074 }
11075 });
11076
11077 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
11078 fail_return(&format!(
11079 "load private key from {}: {e}",
11080 key_path.display()
11081 ))
11082 });
11083 let pubkey_hex = sign::pubkey_hex(&signing_key);
11084 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
11085 fail(&format!(
11086 "private key at {} does not match actor {}'s registered public key. \
11087 Loaded pubkey {}, expected {}.",
11088 key_path.display(),
11089 actor.id,
11090 &pubkey_hex[..16],
11091 &actor.public_key[..16]
11092 ));
11093 }
11094
11095 let signature_hex = sign::sign_event(&resolution, &signing_key)
11098 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
11099
11100 let mut body = resolution.clone();
11105 body.signature = None;
11106 let body_value =
11107 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
11108 let _canonical_check = canonical::to_canonical_bytes(&body_value)
11109 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
11110
11111 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
11112 let url = format!(
11113 "{}/entries/{}/events",
11114 peer.url.trim_end_matches('/'),
11115 target_vfr
11116 );
11117
11118 let url_owned = url.clone();
11120 let pubkey_owned = pubkey_hex.clone();
11121 let signature_owned = signature_hex.clone();
11122 let body_owned = body_value.clone();
11123 let response: Result<(u16, String), String> = std::thread::spawn(move || {
11124 let client = reqwest::blocking::Client::new();
11125 let resp = client
11126 .post(&url_owned)
11127 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
11128 .header("X-Vela-Signature", &signature_owned)
11129 .json(&body_owned)
11130 .send()
11131 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
11132 let status = resp.status().as_u16();
11133 let text = resp.text().unwrap_or_default();
11134 Ok((status, text))
11135 })
11136 .join()
11137 .map_err(|_| "push thread panicked".to_string())
11138 .unwrap_or_else(|e| fail_return(&e));
11139
11140 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
11141 let parsed: serde_json::Value =
11142 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
11143
11144 let accepted = matches!(status, 200..=202);
11145 if json {
11146 println!(
11147 "{}",
11148 serde_json::to_string_pretty(&json!({
11149 "ok": accepted,
11150 "command": "federation.push-resolution",
11151 "frontier": frontier.display().to_string(),
11152 "peer_id": to,
11153 "url": url,
11154 "conflict_event_id": conflict_event_id,
11155 "event_id": resolution.id,
11156 "actor_id": actor.id,
11157 "http_status": status,
11158 "response": parsed,
11159 }))
11160 .expect("serialize federation.push-resolution")
11161 );
11162 } else if accepted {
11163 println!(
11164 "{} resolution {} pushed to {} (HTTP {})",
11165 style::ok("ok"),
11166 &resolution.id[..16.min(resolution.id.len())],
11167 to,
11168 status
11169 );
11170 println!(" url: {url}");
11171 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
11172 } else {
11173 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
11174 println!(" url: {url}");
11175 println!(" response: {text}");
11176 std::process::exit(1);
11177 }
11178}
11179
11180fn cmd_queue(action: QueueAction) {
11185 use crate::queue;
11186 match action {
11187 QueueAction::List { queue_file, json } => {
11188 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11189 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11190 if json {
11191 let payload = json!({
11192 "ok": true,
11193 "command": "queue.list",
11194 "queue_file": path.display().to_string(),
11195 "schema": q.schema,
11196 "actions": q.actions,
11197 });
11198 println!(
11199 "{}",
11200 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
11201 );
11202 } else {
11203 println!();
11204 println!(
11205 " {}",
11206 format!("VELA · QUEUE · LIST · {}", path.display())
11207 .to_uppercase()
11208 .dimmed()
11209 );
11210 println!(" {}", style::tick_row(60));
11211 if q.actions.is_empty() {
11212 println!(" (queue is empty)");
11213 } else {
11214 for (idx, action) in q.actions.iter().enumerate() {
11215 println!(
11216 " [{idx}] {} → {} queued {}",
11217 action.kind,
11218 action.frontier.display(),
11219 action.queued_at
11220 );
11221 }
11222 }
11223 }
11224 }
11225 QueueAction::Clear { queue_file, json } => {
11226 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11227 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
11228 if json {
11229 let payload = json!({
11230 "ok": true,
11231 "command": "queue.clear",
11232 "queue_file": path.display().to_string(),
11233 "dropped": dropped,
11234 });
11235 println!(
11236 "{}",
11237 serde_json::to_string_pretty(&payload)
11238 .expect("failed to serialize queue.clear")
11239 );
11240 } else {
11241 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
11242 }
11243 }
11244 QueueAction::Sign {
11245 actor,
11246 key,
11247 queue_file,
11248 yes_to_all,
11249 json,
11250 } => {
11251 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11252 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11253 if q.actions.is_empty() {
11254 if json {
11255 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
11256 } else {
11257 println!("{} queue is empty", style::ok("ok"));
11258 }
11259 return;
11260 }
11261 let key_hex = std::fs::read_to_string(&key)
11262 .map(|s| s.trim().to_string())
11263 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
11264 let signing_key = parse_signing_key(&key_hex);
11265 let mut signed_count = 0usize;
11266 let mut remaining = Vec::new();
11267 for action in q.actions.iter() {
11268 if !yes_to_all && !confirm_action(action) {
11269 remaining.push(action.clone());
11270 continue;
11271 }
11272 match sign_and_apply(&signing_key, &actor, action) {
11273 Ok(report) => {
11274 signed_count += 1;
11275 if !json {
11276 println!(
11277 "{} {} on {} → {}",
11278 style::ok("signed"),
11279 action.kind,
11280 action.frontier.display(),
11281 report
11282 );
11283 }
11284 }
11285 Err(error) => {
11286 remaining.push(action.clone());
11288 if !json {
11289 eprintln!(
11290 "{} {} on {}: {error}",
11291 style::warn("failed"),
11292 action.kind,
11293 action.frontier.display()
11294 );
11295 }
11296 }
11297 }
11298 }
11299 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
11300 if json {
11301 let payload = json!({
11302 "ok": true,
11303 "command": "queue.sign",
11304 "signed": signed_count,
11305 "remaining": remaining.len(),
11306 });
11307 println!(
11308 "{}",
11309 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
11310 );
11311 } else {
11312 println!(
11313 "{} signed {signed_count} action(s); {} remaining in queue",
11314 style::ok("ok"),
11315 remaining.len()
11316 );
11317 }
11318 }
11319 }
11320}
11321
11322fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
11323 let bytes = hex::decode(hex_str)
11324 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
11325 let key_bytes: [u8; 32] = bytes
11326 .try_into()
11327 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
11328 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
11329}
11330
11331fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
11332 use std::io::{self, BufRead, Write};
11333 let mut stdout = io::stdout().lock();
11334 let _ = writeln!(
11335 stdout,
11336 " sign {} on {}? [y/N] ",
11337 action.kind,
11338 action.frontier.display()
11339 );
11340 let _ = stdout.flush();
11341 drop(stdout);
11342 let stdin = io::stdin();
11343 let mut line = String::new();
11344 if stdin.lock().read_line(&mut line).is_err() {
11345 return false;
11346 }
11347 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
11348}
11349
11350fn sign_and_apply(
11355 signing_key: &ed25519_dalek::SigningKey,
11356 actor: &str,
11357 action: &crate::queue::QueuedAction,
11358) -> Result<String, String> {
11359 use crate::events::StateTarget;
11360 use crate::proposals;
11361 let args = &action.args;
11362 match action.kind.as_str() {
11363 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
11364 let kind = match action.kind.as_str() {
11365 "propose_review" => "finding.review",
11366 "propose_note" => "finding.note",
11367 "propose_revise_confidence" => "finding.confidence_revise",
11368 "propose_retract" => "finding.retract",
11369 _ => unreachable!(),
11370 };
11371 let target_id = args
11372 .get("target_finding_id")
11373 .and_then(Value::as_str)
11374 .ok_or("target_finding_id missing")?;
11375 let reason = args
11376 .get("reason")
11377 .and_then(Value::as_str)
11378 .ok_or("reason missing")?;
11379 let payload = match action.kind.as_str() {
11380 "propose_review" => {
11381 let status = args
11382 .get("status")
11383 .and_then(Value::as_str)
11384 .ok_or("status missing")?;
11385 json!({"status": status})
11386 }
11387 "propose_note" => {
11388 let text = args
11389 .get("text")
11390 .and_then(Value::as_str)
11391 .ok_or("text missing")?;
11392 json!({"text": text})
11393 }
11394 "propose_revise_confidence" => {
11395 let new_score = args
11396 .get("new_score")
11397 .and_then(Value::as_f64)
11398 .ok_or("new_score missing")?;
11399 json!({"new_score": new_score})
11400 }
11401 "propose_retract" => json!({}),
11402 _ => unreachable!(),
11403 };
11404 let created_at = args
11405 .get("created_at")
11406 .and_then(Value::as_str)
11407 .map(String::from)
11408 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11409 let mut proposal = proposals::new_proposal(
11410 kind,
11411 StateTarget {
11412 r#type: "finding".to_string(),
11413 id: target_id.to_string(),
11414 },
11415 actor,
11416 "human",
11417 reason,
11418 payload,
11419 Vec::new(),
11420 Vec::new(),
11421 );
11422 proposal.created_at = created_at;
11423 proposal.id = proposals::proposal_id(&proposal);
11424 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
11428 let result = proposals::create_or_apply(&action.frontier, proposal, false)
11429 .map_err(|e| format!("create_or_apply: {e}"))?;
11430 Ok(format!("proposal {}", result.proposal_id))
11431 }
11432 "accept_proposal" | "reject_proposal" => {
11433 let proposal_id = args
11434 .get("proposal_id")
11435 .and_then(Value::as_str)
11436 .ok_or("proposal_id missing")?;
11437 let reason = args
11438 .get("reason")
11439 .and_then(Value::as_str)
11440 .ok_or("reason missing")?;
11441 let timestamp = args
11442 .get("timestamp")
11443 .and_then(Value::as_str)
11444 .map(String::from)
11445 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11446 let preimage = json!({
11448 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
11449 "proposal_id": proposal_id,
11450 "reviewer_id": actor,
11451 "reason": reason,
11452 "timestamp": timestamp,
11453 });
11454 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
11455 use ed25519_dalek::Signer;
11456 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
11457 if action.kind == "accept_proposal" {
11458 let event_id =
11459 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
11460 .map_err(|e| format!("accept_at_path: {e}"))?;
11461 Ok(format!("event {event_id}"))
11462 } else {
11463 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
11464 .map_err(|e| format!("reject_at_path: {e}"))?;
11465 Ok(format!("rejected {proposal_id}"))
11466 }
11467 }
11468 other => Err(format!("unsupported queued action kind '{other}'")),
11469 }
11470}
11471
11472fn cmd_entity(action: EntityAction) {
11484 use crate::entity_resolve;
11485 match action {
11486 EntityAction::Resolve {
11487 frontier,
11488 force,
11489 json,
11490 } => {
11491 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11492 let report = entity_resolve::resolve_frontier(&mut p, force);
11493 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11494 if json {
11495 println!(
11496 "{}",
11497 serde_json::to_string_pretty(&serde_json::json!({
11498 "ok": true,
11499 "command": "entity.resolve",
11500 "frontier_path": frontier.display().to_string(),
11501 "report": report,
11502 }))
11503 .expect("serialize")
11504 );
11505 } else {
11506 println!(
11507 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
11508 style::ok("entity"),
11509 report.resolved,
11510 report.total_entities,
11511 report.already_resolved,
11512 report.unresolved_count,
11513 report.findings_touched,
11514 );
11515 let unresolved_summary: std::collections::BTreeSet<&str> = report
11516 .per_finding
11517 .iter()
11518 .flat_map(|f| f.unresolved.iter().map(String::as_str))
11519 .collect();
11520 if !unresolved_summary.is_empty() {
11521 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
11522 println!(
11523 " unresolved (first {}): {}",
11524 take.len(),
11525 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
11526 );
11527 }
11528 }
11529 }
11530 EntityAction::List { json } => {
11531 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
11532 .map(|(name, etype, source, id)| {
11533 serde_json::json!({
11534 "canonical_name": name,
11535 "entity_type": etype,
11536 "source": source,
11537 "id": id,
11538 })
11539 })
11540 .collect();
11541 if json {
11542 println!(
11543 "{}",
11544 serde_json::to_string_pretty(&serde_json::json!({
11545 "ok": true,
11546 "command": "entity.list",
11547 "count": entries.len(),
11548 "entries": entries,
11549 }))
11550 .expect("serialize")
11551 );
11552 } else {
11553 println!("{} {} bundled entries", style::ok("entity"), entries.len());
11554 for e in &entries {
11555 println!(
11556 " {:32} {:18} {} {}",
11557 e["canonical_name"].as_str().unwrap_or("?"),
11558 e["entity_type"].as_str().unwrap_or("?"),
11559 e["source"].as_str().unwrap_or("?"),
11560 e["id"].as_str().unwrap_or("?"),
11561 );
11562 }
11563 }
11564 }
11565 }
11566}
11567
11568fn cmd_link(action: LinkAction) {
11569 use crate::bundle::{Link, LinkRef};
11570 match action {
11571 LinkAction::Add {
11572 frontier,
11573 from,
11574 to,
11575 r#type,
11576 note,
11577 inferred_by,
11578 no_check_target,
11579 json,
11580 } => {
11581 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
11582 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
11583 fail(&format!(
11584 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
11585 ));
11586 }
11587 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
11588 fail(&format!(
11589 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
11590 ))
11591 });
11592 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11593 let source_idx = p
11594 .findings
11595 .iter()
11596 .position(|f| f.id == from)
11597 .unwrap_or_else(|| {
11598 fail_return(&format!("--from finding '{from}' not in frontier"))
11599 });
11600 if let LinkRef::Local { vf_id } = &parsed
11601 && !p.findings.iter().any(|f| &f.id == vf_id)
11602 {
11603 fail(&format!(
11604 "local --to target '{vf_id}' not in frontier; add the target finding first"
11605 ));
11606 }
11607 if let LinkRef::Cross { vfr_id, .. } = &parsed
11608 && p.dep_for_vfr(vfr_id).is_none()
11609 {
11610 fail(&format!(
11611 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
11612 ));
11613 }
11614
11615 let mut target_warning: Option<String> = None;
11621 if let LinkRef::Cross {
11622 vfr_id: target_vfr,
11623 vf_id: target_vf,
11624 } = &parsed
11625 && !no_check_target
11626 && let Some(dep) = p.dep_for_vfr(target_vfr)
11627 && let Some(locator) = dep.locator.as_deref()
11628 && (locator.starts_with("http://") || locator.starts_with("https://"))
11629 {
11630 let client = reqwest::blocking::Client::builder()
11631 .timeout(std::time::Duration::from_secs(15))
11632 .build()
11633 .ok();
11634 if let Some(client) = client
11635 && let Ok(resp) = client.get(locator).send()
11636 && resp.status().is_success()
11637 && let Ok(dep_project) = resp.json::<crate::project::Project>()
11638 {
11639 if let Some(target_finding) =
11640 dep_project.findings.iter().find(|f| &f.id == target_vf)
11641 {
11642 if target_finding.flags.superseded {
11643 target_warning = Some(format!(
11644 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
11645You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
11646Use --no-check-target to skip this check."
11647 ));
11648 }
11649 } else {
11650 target_warning = Some(format!(
11651 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
11652The target may have been removed or never existed in the pinned snapshot."
11653 ));
11654 }
11655 }
11656 }
11657
11658 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11659 let link = Link {
11660 target: to.clone(),
11661 link_type: r#type.clone(),
11662 note: note.clone(),
11663 inferred_by: inferred_by.clone(),
11664 created_at: now,
11665 mechanism: None,
11666 };
11667 p.findings[source_idx].links.push(link);
11668 project::recompute_stats(&mut p);
11669 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11670 let payload = json!({
11671 "ok": true,
11672 "command": "link.add",
11673 "frontier": frontier.display().to_string(),
11674 "from": from,
11675 "to": to,
11676 "type": r#type,
11677 "cross_frontier": parsed.is_cross_frontier(),
11678 });
11679 if json {
11680 let mut p2 = payload.clone();
11681 if let Some(w) = &target_warning
11682 && let serde_json::Value::Object(m) = &mut p2
11683 {
11684 m.insert(
11685 "target_warning".to_string(),
11686 serde_json::Value::String(w.clone()),
11687 );
11688 }
11689 println!(
11690 "{}",
11691 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
11692 );
11693 } else {
11694 println!(
11695 "{} {} --[{}]--> {}{}",
11696 style::ok("link"),
11697 from,
11698 r#type,
11699 to,
11700 if parsed.is_cross_frontier() {
11701 " (cross-frontier)"
11702 } else {
11703 ""
11704 }
11705 );
11706 if let Some(w) = target_warning {
11707 println!(" {w}");
11708 }
11709 }
11710 }
11711 }
11712}
11713
11714fn cmd_frontier(action: FrontierAction) {
11715 use crate::project::ProjectDependency;
11716 use crate::repo;
11717 match action {
11718 FrontierAction::New {
11719 path,
11720 name,
11721 description,
11722 force,
11723 json,
11724 } => {
11725 if path.exists() && !force {
11726 fail(&format!(
11727 "{} already exists; pass --force to overwrite",
11728 path.display()
11729 ));
11730 }
11731 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11732 let project = project::Project {
11733 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
11734 schema: project::VELA_SCHEMA_URL.to_string(),
11735 frontier_id: None,
11736 project: project::ProjectMeta {
11737 name: name.clone(),
11738 description: description.clone(),
11739 compiled_at: now,
11740 compiler: project::VELA_COMPILER_VERSION.to_string(),
11741 papers_processed: 0,
11742 errors: 0,
11743 dependencies: Vec::new(),
11744 },
11745 stats: project::ProjectStats::default(),
11746 findings: Vec::new(),
11747 sources: Vec::new(),
11748 evidence_atoms: Vec::new(),
11749 condition_records: Vec::new(),
11750 review_events: Vec::new(),
11751 confidence_updates: Vec::new(),
11752 events: Vec::new(),
11753 proposals: Vec::new(),
11754 proof_state: proposals::ProofState::default(),
11755 signatures: Vec::new(),
11756 actors: Vec::new(),
11757 replications: Vec::new(),
11758 datasets: Vec::new(),
11759 code_artifacts: Vec::new(),
11760 artifacts: Vec::new(),
11761 predictions: Vec::new(),
11762 resolutions: Vec::new(),
11763 peers: Vec::new(),
11764 negative_results: Vec::new(),
11765 trajectories: Vec::new(),
11766 };
11767 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
11768 let payload = json!({
11769 "ok": true,
11770 "command": "frontier.new",
11771 "path": path.display().to_string(),
11772 "name": name,
11773 "schema": project::VELA_SCHEMA_URL,
11774 "vela_version": env!("CARGO_PKG_VERSION"),
11775 "next_steps": [
11776 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
11777 "vela sign generate-keypair --out keys",
11778 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
11779 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11780 ],
11781 });
11782 if json {
11783 println!(
11784 "{}",
11785 serde_json::to_string_pretty(&payload)
11786 .expect("failed to serialize frontier.new")
11787 );
11788 } else {
11789 println!(
11790 "{} scaffolded frontier '{name}' at {}",
11791 style::ok("frontier"),
11792 path.display()
11793 );
11794 println!(" next steps:");
11795 println!(
11796 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
11797 path.display()
11798 );
11799 println!(" 2. vela sign generate-keypair --out keys");
11800 println!(
11801 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
11802 path.display()
11803 );
11804 println!(
11805 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11806 path.display()
11807 );
11808 }
11809 }
11810 FrontierAction::Materialize { frontier, json } => {
11811 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
11812 if json {
11813 println!(
11814 "{}",
11815 serde_json::to_string_pretty(&payload)
11816 .expect("failed to serialize frontier materialize")
11817 );
11818 } else {
11819 println!(
11820 "{} materialized frontier repo at {}",
11821 style::ok("frontier"),
11822 frontier.display()
11823 );
11824 }
11825 }
11826 FrontierAction::AddDep {
11827 frontier,
11828 vfr_id,
11829 locator,
11830 snapshot,
11831 name,
11832 json,
11833 } => {
11834 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11835 if p.project
11836 .dependencies
11837 .iter()
11838 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
11839 {
11840 fail(&format!(
11841 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
11842 ));
11843 }
11844 let dep = ProjectDependency {
11845 name: name.unwrap_or_else(|| vfr_id.clone()),
11846 source: "vela.hub".into(),
11847 version: None,
11848 pinned_hash: None,
11849 vfr_id: Some(vfr_id.clone()),
11850 locator: Some(locator.clone()),
11851 pinned_snapshot_hash: Some(snapshot.clone()),
11852 };
11853 p.project.dependencies.push(dep);
11854 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11855 let payload = json!({
11856 "ok": true,
11857 "command": "frontier.add-dep",
11858 "frontier": frontier.display().to_string(),
11859 "vfr_id": vfr_id,
11860 "locator": locator,
11861 "pinned_snapshot_hash": snapshot,
11862 "declared_count": p.project.dependencies.len(),
11863 });
11864 if json {
11865 println!(
11866 "{}",
11867 serde_json::to_string_pretty(&payload)
11868 .expect("failed to serialize frontier.add-dep")
11869 );
11870 } else {
11871 println!(
11872 "{} declared cross-frontier dep {vfr_id}",
11873 style::ok("frontier")
11874 );
11875 println!(" locator: {locator}");
11876 println!(" snapshot: {snapshot}");
11877 }
11878 }
11879 FrontierAction::ListDeps { frontier, json } => {
11880 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11881 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
11882 if json {
11883 let payload = json!({
11884 "ok": true,
11885 "command": "frontier.list-deps",
11886 "frontier": frontier.display().to_string(),
11887 "count": deps.len(),
11888 "dependencies": deps,
11889 });
11890 println!(
11891 "{}",
11892 serde_json::to_string_pretty(&payload)
11893 .expect("failed to serialize frontier.list-deps")
11894 );
11895 } else {
11896 println!();
11897 println!(
11898 " {}",
11899 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
11900 .to_uppercase()
11901 .dimmed()
11902 );
11903 println!(" {}", style::tick_row(60));
11904 if deps.is_empty() {
11905 println!(" (no dependencies declared)");
11906 } else {
11907 for d in &deps {
11908 let kind = if d.is_cross_frontier() {
11909 "cross-frontier"
11910 } else {
11911 "compile-time"
11912 };
11913 println!(" · {} [{kind}]", d.name);
11914 if let Some(v) = &d.vfr_id {
11915 println!(" vfr_id: {v}");
11916 }
11917 if let Some(l) = &d.locator {
11918 println!(" locator: {l}");
11919 }
11920 if let Some(s) = &d.pinned_snapshot_hash {
11921 println!(" snapshot: {s}");
11922 }
11923 }
11924 }
11925 }
11926 }
11927 FrontierAction::RemoveDep {
11928 frontier,
11929 vfr_id,
11930 json,
11931 } => {
11932 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11933 for f in &p.findings {
11935 for l in &f.links {
11936 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
11937 crate::bundle::LinkRef::parse(&l.target)
11938 && v == &vfr_id
11939 {
11940 fail(&format!(
11941 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
11942 f.id, l.target
11943 ));
11944 }
11945 }
11946 }
11947 let before = p.project.dependencies.len();
11948 p.project
11949 .dependencies
11950 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
11951 let removed = before - p.project.dependencies.len();
11952 if removed == 0 {
11953 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
11954 }
11955 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11956 let payload = json!({
11957 "ok": true,
11958 "command": "frontier.remove-dep",
11959 "frontier": frontier.display().to_string(),
11960 "vfr_id": vfr_id,
11961 "removed": removed,
11962 });
11963 if json {
11964 println!(
11965 "{}",
11966 serde_json::to_string_pretty(&payload)
11967 .expect("failed to serialize frontier.remove-dep")
11968 );
11969 } else {
11970 println!(
11971 "{} removed cross-frontier dep {vfr_id}",
11972 style::ok("frontier")
11973 );
11974 }
11975 }
11976 FrontierAction::RefreshDeps {
11977 frontier,
11978 from,
11979 dry_run,
11980 json,
11981 } => {
11982 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11983 let cross_deps: Vec<String> = p
11984 .project
11985 .dependencies
11986 .iter()
11987 .filter_map(|d| d.vfr_id.clone())
11988 .collect();
11989 if cross_deps.is_empty() {
11990 if json {
11991 println!(
11992 "{}",
11993 serde_json::to_string_pretty(&json!({
11994 "ok": true,
11995 "command": "frontier.refresh-deps",
11996 "frontier": frontier.display().to_string(),
11997 "from": from,
11998 "dry_run": dry_run,
11999 "deps": [],
12000 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
12001 })).expect("serialize")
12002 );
12003 } else {
12004 println!(
12005 "{} no cross-frontier deps declared in {}",
12006 style::ok("frontier"),
12007 frontier.display()
12008 );
12009 }
12010 return;
12011 }
12012 let client = reqwest::blocking::Client::builder()
12013 .timeout(std::time::Duration::from_secs(20))
12014 .build()
12015 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
12016 let base = from.trim_end_matches('/');
12017 #[derive(serde::Deserialize)]
12018 struct HubEntry {
12019 latest_snapshot_hash: String,
12020 }
12021 let mut per_dep: Vec<serde_json::Value> = Vec::new();
12022 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
12023 (0u32, 0u32, 0u32, 0u32);
12024 for vfr in &cross_deps {
12025 let url = format!("{base}/entries/{vfr}");
12026 let resp = client.get(&url).send();
12027 let outcome = match resp {
12028 Ok(r) if r.status().as_u16() == 404 => {
12029 missing += 1;
12030 json!({ "vfr_id": vfr, "status": "missing", "url": url })
12031 }
12032 Ok(r) if !r.status().is_success() => {
12033 unreachable += 1;
12034 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
12035 }
12036 Err(e) => {
12037 unreachable += 1;
12038 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
12039 }
12040 Ok(r) => match r.json::<HubEntry>() {
12041 Err(e) => {
12042 unreachable += 1;
12043 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
12044 }
12045 Ok(entry) => {
12046 match p
12048 .project
12049 .dependencies
12050 .iter()
12051 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
12052 {
12053 None => {
12054 unreachable += 1;
12055 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
12056 }
12057 Some(idx) => {
12058 let local_pin =
12059 p.project.dependencies[idx].pinned_snapshot_hash.clone();
12060 let new_pin = entry.latest_snapshot_hash;
12061 if local_pin.as_deref() == Some(new_pin.as_str()) {
12062 unchanged += 1;
12063 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
12064 } else {
12065 if !dry_run {
12066 p.project.dependencies[idx].pinned_snapshot_hash =
12067 Some(new_pin.clone());
12068 }
12069 refreshed += 1;
12070 json!({
12071 "vfr_id": vfr,
12072 "status": "refreshed",
12073 "old_snapshot": local_pin,
12074 "new_snapshot": new_pin,
12075 })
12076 }
12077 }
12078 }
12079 }
12080 },
12081 };
12082 per_dep.push(outcome);
12083 }
12084 if !dry_run && refreshed > 0 {
12085 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12086 }
12087 let payload = json!({
12088 "ok": true,
12089 "command": "frontier.refresh-deps",
12090 "frontier": frontier.display().to_string(),
12091 "from": from,
12092 "dry_run": dry_run,
12093 "deps": per_dep,
12094 "summary": {
12095 "total": cross_deps.len(),
12096 "refreshed": refreshed,
12097 "unchanged": unchanged,
12098 "missing": missing,
12099 "unreachable": unreachable,
12100 },
12101 });
12102 if json {
12103 println!(
12104 "{}",
12105 serde_json::to_string_pretty(&payload)
12106 .expect("failed to serialize frontier.refresh-deps")
12107 );
12108 } else {
12109 let mode = if dry_run { " (dry-run)" } else { "" };
12110 println!(
12111 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
12112 style::ok("frontier"),
12113 cross_deps.len()
12114 );
12115 for d in &per_dep {
12116 let vfr = d["vfr_id"].as_str().unwrap_or("?");
12117 let status = d["status"].as_str().unwrap_or("?");
12118 match status {
12119 "refreshed" => println!(
12120 " {vfr} refreshed {} → {}",
12121 d["old_snapshot"]
12122 .as_str()
12123 .unwrap_or("(none)")
12124 .chars()
12125 .take(16)
12126 .collect::<String>(),
12127 d["new_snapshot"]
12128 .as_str()
12129 .unwrap_or("?")
12130 .chars()
12131 .take(16)
12132 .collect::<String>(),
12133 ),
12134 "unchanged" => println!(" {vfr} unchanged"),
12135 "missing" => println!(" {vfr} missing on hub"),
12136 _ => println!(" {vfr} unreachable"),
12137 }
12138 }
12139 }
12140 }
12141 FrontierAction::Diff {
12142 frontier,
12143 since,
12144 week,
12145 json,
12146 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
12147 }
12148}
12149
12150fn cmd_repo(action: RepoAction) {
12151 match action {
12152 RepoAction::Status { frontier, json } => {
12153 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
12154 if json {
12155 println!(
12156 "{}",
12157 serde_json::to_string_pretty(&payload)
12158 .expect("failed to serialize repo status")
12159 );
12160 } else {
12161 let summary = payload.get("summary").unwrap_or(&Value::Null);
12162 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
12163 println!("vela repo status");
12164 println!(" frontier: {}", frontier.display());
12165 println!(
12166 " events: {}",
12167 summary
12168 .get("accepted_events")
12169 .and_then(Value::as_u64)
12170 .unwrap_or_default()
12171 );
12172 println!(
12173 " open proposals: {}",
12174 summary
12175 .get("open_proposals")
12176 .and_then(Value::as_u64)
12177 .unwrap_or_default()
12178 );
12179 println!(
12180 " state: {}",
12181 freshness
12182 .get("materialized_state")
12183 .and_then(Value::as_str)
12184 .unwrap_or("unknown")
12185 );
12186 println!(
12187 " proof: {}",
12188 freshness
12189 .get("proof")
12190 .and_then(Value::as_str)
12191 .unwrap_or("unknown")
12192 );
12193 }
12194 }
12195 RepoAction::Doctor { frontier, json } => {
12196 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
12197 if json {
12198 println!(
12199 "{}",
12200 serde_json::to_string_pretty(&payload)
12201 .expect("failed to serialize repo doctor")
12202 );
12203 } else {
12204 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12205 let issues = payload
12206 .get("issues")
12207 .and_then(Value::as_array)
12208 .map_or(0, Vec::len);
12209 println!("vela repo doctor");
12210 println!(" frontier: {}", frontier.display());
12211 println!(" status: {}", if ok { "ok" } else { "needs attention" });
12212 println!(" issues: {issues}");
12213 }
12214 }
12215 }
12216}
12217
12218fn cmd_proof_verify(frontier: &Path, json_output: bool) {
12219 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
12220 if json_output {
12221 println!(
12222 "{}",
12223 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
12224 );
12225 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12226 std::process::exit(1);
12227 }
12228 } else {
12229 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12230 println!("vela proof verify");
12231 println!(" frontier: {}", frontier.display());
12232 println!(" status: {}", if ok { "ok" } else { "failed" });
12233 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
12234 for issue in issues {
12235 if let Some(message) = issue.get("message").and_then(Value::as_str) {
12236 println!(" issue: {message}");
12237 }
12238 }
12239 }
12240 if !ok {
12241 std::process::exit(1);
12242 }
12243 }
12244}
12245
12246fn cmd_proof_explain(frontier: &Path) {
12247 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
12248 print!("{text}");
12249}
12250
12251fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
12260 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
12261
12262 let now = chrono::Utc::now();
12264 let (window_start, window_end, week_label): (
12265 chrono::DateTime<chrono::Utc>,
12266 chrono::DateTime<chrono::Utc>,
12267 Option<String>,
12268 ) = if let Some(s) = since {
12269 let parsed = chrono::DateTime::parse_from_rfc3339(s)
12270 .map(|d| d.with_timezone(&chrono::Utc))
12271 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
12272 (parsed, now, None)
12273 } else {
12274 let key = week
12275 .map(str::to_owned)
12276 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
12277 let (start, end) = iso_week_bounds(&key)
12278 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
12279 (start, end, Some(key))
12280 };
12281
12282 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
12284 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
12285 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
12286 let mut cumulative: usize = 0;
12287
12288 for f in &project.findings {
12289 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
12290 .map(|d| d.with_timezone(&chrono::Utc))
12291 .ok();
12292 let updated_ts = f
12293 .updated
12294 .as_deref()
12295 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
12296 .map(|d| d.with_timezone(&chrono::Utc));
12297
12298 if let Some(c) = created
12299 && c < window_end
12300 {
12301 cumulative += 1;
12302 }
12303
12304 if let Some(c) = created
12305 && c >= window_start
12306 && c < window_end
12307 {
12308 added.push(f);
12309 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
12310 if is_tension {
12311 new_contradictions.push(f);
12312 }
12313 continue;
12314 }
12315 if let Some(u) = updated_ts
12316 && u >= window_start
12317 && u < window_end
12318 {
12319 updated.push(f);
12320 }
12321 }
12322
12323 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
12325 list.iter()
12326 .map(|f| {
12327 json!({
12328 "id": f.id,
12329 "assertion": f.assertion.text,
12330 "evidence_type": f.evidence.evidence_type,
12331 "confidence": f.confidence.score,
12332 "doi": f.provenance.doi,
12333 "pmid": f.provenance.pmid,
12334 })
12335 })
12336 .collect()
12337 };
12338
12339 let payload = json!({
12340 "ok": true,
12341 "command": "frontier.diff",
12342 "frontier": frontier.display().to_string(),
12343 "frontier_id": project.frontier_id,
12344 "window": {
12345 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12346 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12347 "iso_week": week_label,
12348 },
12349 "totals": {
12350 "added": added.len(),
12351 "updated": updated.len(),
12352 "new_contradictions": new_contradictions.len(),
12353 "cumulative_claims": cumulative,
12354 },
12355 "added": summary_for(&added),
12356 "updated": summary_for(&updated),
12357 "new_contradictions": summary_for(&new_contradictions),
12358 });
12359
12360 if json {
12361 println!(
12362 "{}",
12363 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
12364 );
12365 return;
12366 }
12367
12368 let label = week_label
12369 .clone()
12370 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
12371 println!();
12372 println!(
12373 " {}",
12374 format!("VELA · FRONTIER · DIFF · {label}")
12375 .to_uppercase()
12376 .dimmed()
12377 );
12378 println!(" {}", style::tick_row(60));
12379 println!(
12380 " range: {} → {}",
12381 window_start.format("%Y-%m-%d %H:%M"),
12382 window_end.format("%Y-%m-%d %H:%M")
12383 );
12384 println!(" added: {}", added.len());
12385 println!(" updated: {}", updated.len());
12386 println!(" contradictions: {}", new_contradictions.len());
12387 println!(" cumulative: {cumulative}");
12388 if added.is_empty() && updated.is_empty() {
12389 println!();
12390 println!(" (quiet window — no findings added or updated)");
12391 } else {
12392 println!();
12393 println!(" added:");
12394 for f in &added {
12395 println!(
12396 " · {} {}",
12397 f.id.dimmed(),
12398 truncate(&f.assertion.text, 88)
12399 );
12400 }
12401 if !updated.is_empty() {
12402 println!();
12403 println!(" updated:");
12404 for f in &updated {
12405 println!(
12406 " · {} {}",
12407 f.id.dimmed(),
12408 truncate(&f.assertion.text, 88)
12409 );
12410 }
12411 }
12412 }
12413}
12414
12415fn truncate(s: &str, n: usize) -> String {
12416 if s.chars().count() <= n {
12417 s.to_string()
12418 } else {
12419 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
12420 out.push('…');
12421 out
12422 }
12423}
12424
12425fn iso_week_key_for(d: chrono::NaiveDate) -> String {
12427 use chrono::Datelike;
12428 let iso = d.iso_week();
12429 format!("{:04}-W{:02}", iso.year(), iso.week())
12430}
12431
12432fn iso_week_bounds(
12435 key: &str,
12436) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
12437 let (year_str, week_str) = key
12438 .split_once("-W")
12439 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
12440 let year: i32 = year_str
12441 .parse()
12442 .map_err(|e| format!("bad year in '{key}': {e}"))?;
12443 let week: u32 = week_str
12444 .parse()
12445 .map_err(|e| format!("bad week in '{key}': {e}"))?;
12446 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
12447 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
12448 let next_monday = monday + chrono::Duration::days(7);
12449 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
12450 let end = next_monday
12451 .and_hms_opt(0, 0, 0)
12452 .expect("00:00 valid")
12453 .and_utc();
12454 Ok((start, end))
12455}
12456
12457fn cmd_registry(action: RegistryAction) {
12462 use crate::registry;
12463 let default_registry = || -> PathBuf {
12464 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12465 PathBuf::from(home)
12466 .join(".vela")
12467 .join("registry")
12468 .join("entries.json")
12469 };
12470 match action {
12471 RegistryAction::DependsOn { vfr_id, from, json } => {
12472 let base = from.trim_end_matches('/');
12473 let url = format!("{base}/entries/{vfr_id}/depends-on");
12474 let client = reqwest::blocking::Client::builder()
12475 .timeout(std::time::Duration::from_secs(30))
12476 .build()
12477 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12478 let resp = client
12479 .get(&url)
12480 .send()
12481 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
12482 if !resp.status().is_success() {
12483 fail(&format!("GET {url}: HTTP {}", resp.status()));
12484 }
12485 let body: serde_json::Value = resp
12486 .json()
12487 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
12488 if json {
12489 println!(
12490 "{}",
12491 serde_json::to_string_pretty(&body).expect("serialize")
12492 );
12493 } else {
12494 let dependents = body
12495 .get("dependents")
12496 .and_then(|v| v.as_array())
12497 .cloned()
12498 .unwrap_or_default();
12499 let count = dependents.len();
12500 println!(
12501 "{} {count} {} on {vfr_id}",
12502 style::ok("registry"),
12503 if count == 1 {
12504 "frontier depends"
12505 } else {
12506 "frontiers depend"
12507 },
12508 );
12509 for e in &dependents {
12510 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
12511 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
12512 let o = e
12513 .get("owner_actor_id")
12514 .and_then(|v| v.as_str())
12515 .unwrap_or("?");
12516 println!(" {v} {n} ({o})");
12517 }
12518 }
12519 }
12520 RegistryAction::Mirror {
12521 vfr_id,
12522 from,
12523 to,
12524 json,
12525 } => {
12526 let src_base = from.trim_end_matches('/');
12527 let dst_base = to.trim_end_matches('/');
12528 let src_url = format!("{src_base}/entries/{vfr_id}");
12529 let dst_url = format!("{dst_base}/entries");
12530 let client = reqwest::blocking::Client::builder()
12531 .timeout(std::time::Duration::from_secs(30))
12532 .build()
12533 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12534
12535 let entry: serde_json::Value = client
12536 .get(&src_url)
12537 .send()
12538 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12539 .error_for_status()
12540 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12541 .json()
12542 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
12543
12544 let resp = client
12545 .post(&dst_url)
12546 .header("content-type", "application/json")
12547 .body(
12548 serde_json::to_vec(&entry)
12549 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
12550 )
12551 .send()
12552 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
12553 let status = resp.status();
12554 if !status.is_success() {
12555 let body = resp.text().unwrap_or_default();
12556 fail(&format!(
12557 "POST {dst_url}: HTTP {status}: {}",
12558 body.chars().take(300).collect::<String>()
12559 ));
12560 }
12561 let body: serde_json::Value = resp
12562 .json()
12563 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
12564 let duplicate = body
12565 .get("duplicate")
12566 .and_then(serde_json::Value::as_bool)
12567 .unwrap_or(false);
12568 let payload = json!({
12569 "ok": true,
12570 "command": "registry.mirror",
12571 "vfr_id": vfr_id,
12572 "from": src_base,
12573 "to": dst_base,
12574 "duplicate_on_destination": duplicate,
12575 "destination_response": body,
12576 });
12577 if json {
12578 println!(
12579 "{}",
12580 serde_json::to_string_pretty(&payload).expect("serialize")
12581 );
12582 } else {
12583 println!(
12584 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
12585 style::ok("registry"),
12586 if duplicate {
12587 " (duplicate; signature already known)"
12588 } else {
12589 " (fresh insert)"
12590 }
12591 );
12592 }
12593 }
12594 RegistryAction::List { from, json } => {
12595 let (label, registry_data) = match &from {
12598 Some(loc) if loc.starts_with("http") => (
12599 loc.clone(),
12600 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12601 ),
12602 Some(loc) => {
12603 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12604 (
12605 p.display().to_string(),
12606 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12607 )
12608 }
12609 None => {
12610 let p = default_registry();
12611 (
12612 p.display().to_string(),
12613 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12614 )
12615 }
12616 };
12617 let r = registry_data;
12618 let path_label = label;
12619 if json {
12620 let payload = json!({
12621 "ok": true,
12622 "command": "registry.list",
12623 "registry": path_label,
12624 "entry_count": r.entries.len(),
12625 "entries": r.entries,
12626 });
12627 println!(
12628 "{}",
12629 serde_json::to_string_pretty(&payload)
12630 .expect("failed to serialize registry.list")
12631 );
12632 } else {
12633 println!();
12634 println!(
12635 " {}",
12636 format!("VELA · REGISTRY · LIST · {}", path_label)
12637 .to_uppercase()
12638 .dimmed()
12639 );
12640 println!(" {}", style::tick_row(60));
12641 if r.entries.is_empty() {
12642 println!(" (registry is empty)");
12643 } else {
12644 for entry in &r.entries {
12645 println!(
12646 " {} {} ({}) by {} published {}",
12647 entry.vfr_id,
12648 entry.name,
12649 entry.network_locator,
12650 entry.owner_actor_id,
12651 entry.signed_publish_at
12652 );
12653 }
12654 }
12655 }
12656 }
12657 RegistryAction::Publish {
12658 frontier,
12659 owner,
12660 key,
12661 locator,
12662 to,
12663 json,
12664 } => {
12665 let key_hex = std::fs::read_to_string(&key)
12668 .map(|s| s.trim().to_string())
12669 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
12670 let signing_key = parse_signing_key(&key_hex);
12671 let derived = hex::encode(signing_key.verifying_key().to_bytes());
12672
12673 let mut frontier_data =
12675 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12676
12677 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
12678 Some(actor) => actor.public_key.clone(),
12679 None => {
12680 eprintln!(
12688 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
12689 &derived[..16]
12690 );
12691 frontier_data.actors.push(sign::ActorRecord {
12692 id: owner.clone(),
12693 public_key: derived.clone(),
12694 algorithm: "ed25519".to_string(),
12695 created_at: chrono::Utc::now().to_rfc3339(),
12696 tier: None,
12697 orcid: None,
12698 access_clearance: None,
12699 });
12700 repo::save_to_path(&frontier, &frontier_data)
12701 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
12702 derived.clone()
12703 }
12704 };
12705
12706 let snapshot_hash = events::snapshot_hash(&frontier_data);
12710 let event_log_hash = events::event_log_hash(&frontier_data.events);
12711 let vfr_id = frontier_data.frontier_id();
12712 let name = frontier_data.project.name.clone();
12713
12714 if derived != pubkey {
12716 fail(&format!(
12717 "private key does not match registered pubkey for owner '{owner}'"
12718 ));
12719 }
12720
12721 let to_is_remote = matches!(
12729 to.as_deref(),
12730 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
12731 );
12732 let resolved_locator = match locator {
12733 Some(l) => l,
12734 None => {
12735 if to_is_remote {
12736 let hub = to.as_deref().unwrap().trim_end_matches('/');
12737 let hub_root = hub.trim_end_matches("/entries");
12738 format!("{hub_root}/entries/{vfr_id}/snapshot")
12739 } else {
12740 fail_return(
12741 "--locator is required for local publishes; pass e.g. \
12742 --locator file:///path/to/frontier.json or an HTTPS URL.",
12743 )
12744 }
12745 }
12746 };
12747
12748 let mut entry = registry::RegistryEntry {
12749 schema: registry::ENTRY_SCHEMA.to_string(),
12750 vfr_id: vfr_id.clone(),
12751 name: name.clone(),
12752 owner_actor_id: owner.clone(),
12753 owner_pubkey: pubkey,
12754 latest_snapshot_hash: snapshot_hash,
12755 latest_event_log_hash: event_log_hash,
12756 network_locator: resolved_locator,
12757 signed_publish_at: chrono::Utc::now().to_rfc3339(),
12758 signature: String::new(),
12759 };
12760 entry.signature =
12761 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
12762
12763 let (registry_label, duplicate) = if to_is_remote {
12764 let hub_url = to.clone().unwrap();
12765 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
12769 .unwrap_or_else(|e| fail_return(&e));
12770 (hub_url, resp.duplicate)
12771 } else {
12772 let registry_path = match &to {
12773 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
12774 None => default_registry(),
12775 };
12776 registry::publish_entry(®istry_path, entry.clone())
12777 .unwrap_or_else(|e| fail_return(&e));
12778 (registry_path.display().to_string(), false)
12779 };
12780
12781 let payload = json!({
12782 "ok": true,
12783 "command": "registry.publish",
12784 "registry": registry_label,
12785 "vfr_id": vfr_id,
12786 "name": name,
12787 "owner": owner,
12788 "snapshot_hash": entry.latest_snapshot_hash,
12789 "event_log_hash": entry.latest_event_log_hash,
12790 "signed_publish_at": entry.signed_publish_at,
12791 "signature": entry.signature,
12792 "duplicate": duplicate,
12793 });
12794 if json {
12795 println!(
12796 "{}",
12797 serde_json::to_string_pretty(&payload)
12798 .expect("failed to serialize registry.publish")
12799 );
12800 } else {
12801 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
12802 println!(
12803 "{} published {vfr_id} → {}{}",
12804 style::ok("registry"),
12805 registry_label,
12806 dup_suffix
12807 );
12808 println!(" snapshot: {}", entry.latest_snapshot_hash);
12809 println!(" event_log: {}", entry.latest_event_log_hash);
12810 println!(" signature: {}…", &entry.signature[..16]);
12811 }
12812 }
12813 RegistryAction::Pull {
12814 vfr_id,
12815 from,
12816 out,
12817 transitive,
12818 depth,
12819 json,
12820 } => {
12821 let (registry_label, registry_data) = match &from {
12825 Some(loc) if loc.starts_with("http") => (
12826 loc.clone(),
12827 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12828 ),
12829 Some(loc) => {
12830 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12831 (
12832 p.display().to_string(),
12833 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12834 )
12835 }
12836 None => {
12837 let p = default_registry();
12838 (
12839 p.display().to_string(),
12840 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12841 )
12842 }
12843 };
12844 let entry = registry::find_latest(®istry_data, &vfr_id)
12845 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
12846
12847 if transitive {
12848 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
12852 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
12853
12854 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
12855 result
12856 .deps
12857 .iter()
12858 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
12859 .collect(),
12860 );
12861 let payload = json!({
12862 "ok": true,
12863 "command": "registry.pull",
12864 "registry": registry_label,
12865 "vfr_id": vfr_id,
12866 "transitive": true,
12867 "depth": depth,
12868 "out_dir": out.display().to_string(),
12869 "primary": result.primary_path.display().to_string(),
12870 "verified": result.verified,
12871 "deps": dep_paths_json,
12872 });
12873 if json {
12874 println!(
12875 "{}",
12876 serde_json::to_string_pretty(&payload)
12877 .expect("failed to serialize registry.pull")
12878 );
12879 } else {
12880 println!(
12881 "{} pulled {vfr_id} (transitive) → {}",
12882 style::ok("registry"),
12883 out.display()
12884 );
12885 println!(" verified {} frontier(s):", result.verified.len());
12886 for v in &result.verified {
12887 println!(" · {v}");
12888 }
12889 println!(" every cross-frontier dependency's pinned snapshot hash matched");
12890 }
12891 return;
12892 }
12893
12894 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
12897 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
12898 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
12899 let _ = std::fs::remove_file(&out);
12900 fail_return(&format!("pull verification failed: {e}"))
12901 });
12902
12903 let payload = json!({
12904 "ok": true,
12905 "command": "registry.pull",
12906 "registry": registry_label,
12907 "vfr_id": vfr_id,
12908 "out": out.display().to_string(),
12909 "snapshot_hash": entry.latest_snapshot_hash,
12910 "event_log_hash": entry.latest_event_log_hash,
12911 "verified": true,
12912 });
12913 if json {
12914 println!(
12915 "{}",
12916 serde_json::to_string_pretty(&payload)
12917 .expect("failed to serialize registry.pull")
12918 );
12919 } else {
12920 println!(
12921 "{} pulled {vfr_id} → {}",
12922 style::ok("registry"),
12923 out.display()
12924 );
12925 println!(" verified snapshot+event_log hashes match registry; signature ok");
12926 }
12927 }
12928 }
12929}
12930
12931fn print_stats_json(path: &Path) {
12932 let frontier = load_frontier_or_fail(path);
12933 let source_hash = hash_path_or_fail(path);
12934 let payload = json!({
12935 "ok": true,
12936 "command": "stats",
12937 "schema_version": project::VELA_SCHEMA_VERSION,
12938 "frontier": {
12939 "name": &frontier.project.name,
12940 "description": &frontier.project.description,
12941 "source": path.display().to_string(),
12942 "hash": format!("sha256:{source_hash}"),
12943 "compiled_at": &frontier.project.compiled_at,
12944 "compiler": &frontier.project.compiler,
12945 "papers_processed": frontier.project.papers_processed,
12946 "errors": frontier.project.errors,
12947 },
12948 "stats": frontier.stats,
12949 "proposals": proposals::summary(&frontier),
12950 "proof_state": frontier.proof_state,
12951 });
12952 println!(
12953 "{}",
12954 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
12955 );
12956}
12957
12958fn cmd_search(
12959 source: Option<&Path>,
12960 query: &str,
12961 entity: Option<&str>,
12962 assertion_type: Option<&str>,
12963 all: Option<&Path>,
12964 limit: usize,
12965 json_output: bool,
12966) {
12967 if let Some(dir) = all {
12968 search::run_all(dir, query, entity, assertion_type, limit);
12969 return;
12970 }
12971 let Some(src) = source else {
12972 fail("Provide --source <frontier> or --all <directory>.");
12973 };
12974 if json_output {
12975 let results = search::search(src, query, entity, assertion_type, limit);
12976 let loaded = load_frontier_or_fail(src);
12977 let source_hash = hash_path_or_fail(src);
12978 let payload = json!({
12979 "ok": true,
12980 "command": "search",
12981 "schema_version": project::VELA_SCHEMA_VERSION,
12982 "query": query,
12983 "frontier": {
12984 "name": &loaded.project.name,
12985 "source": src.display().to_string(),
12986 "hash": format!("sha256:{source_hash}"),
12987 },
12988 "filters": {
12989 "entity": entity,
12990 "assertion_type": assertion_type,
12991 "limit": limit,
12992 },
12993 "count": results.len(),
12994 "results": results.iter().map(|result| json!({
12995 "id": &result.id,
12996 "score": result.score,
12997 "assertion": &result.assertion,
12998 "assertion_type": &result.assertion_type,
12999 "confidence": result.confidence,
13000 "entities": &result.entities,
13001 "doi": &result.doi,
13002 })).collect::<Vec<_>>()
13003 });
13004 println!(
13005 "{}",
13006 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
13007 );
13008 } else {
13009 search::run(src, query, entity, assertion_type, limit);
13010 }
13011}
13012
13013fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
13014 let frontier = load_frontier_or_fail(source);
13015 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
13016 if json_output {
13017 let source_hash = hash_path_or_fail(source);
13018 let payload = json!({
13019 "ok": true,
13020 "command": "tensions",
13021 "schema_version": project::VELA_SCHEMA_VERSION,
13022 "frontier": {
13023 "name": &frontier.project.name,
13024 "source": source.display().to_string(),
13025 "hash": format!("sha256:{source_hash}"),
13026 },
13027 "filters": {
13028 "both_high": both_high,
13029 "cross_domain": cross_domain,
13030 "top": top,
13031 },
13032 "count": result.len(),
13033 "tensions": result.iter().map(|t| json!({
13034 "score": t.score,
13035 "resolved": t.resolved,
13036 "superseding_id": &t.superseding_id,
13037 "finding_a": {
13038 "id": &t.finding_a.id,
13039 "assertion": &t.finding_a.assertion,
13040 "confidence": t.finding_a.confidence,
13041 "assertion_type": &t.finding_a.assertion_type,
13042 "citation_count": t.finding_a.citation_count,
13043 "contradicts_count": t.finding_a.contradicts_count,
13044 },
13045 "finding_b": {
13046 "id": &t.finding_b.id,
13047 "assertion": &t.finding_b.assertion,
13048 "confidence": t.finding_b.confidence,
13049 "assertion_type": &t.finding_b.assertion_type,
13050 "citation_count": t.finding_b.citation_count,
13051 "contradicts_count": t.finding_b.contradicts_count,
13052 }
13053 })).collect::<Vec<_>>()
13054 });
13055 println!(
13056 "{}",
13057 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
13058 );
13059 } else {
13060 tensions::print_tensions(&result);
13061 }
13062}
13063
13064fn cmd_gaps(action: GapsAction) {
13065 match action {
13066 GapsAction::Rank {
13067 frontier,
13068 top,
13069 domain,
13070 json,
13071 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
13072 }
13073}
13074
13075fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
13076 let frontier = load_frontier_or_fail(frontier_path);
13077 let mut ranked = frontier
13078 .findings
13079 .iter()
13080 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
13081 .filter(|finding| {
13082 domain.is_none_or(|domain| {
13083 finding
13084 .assertion
13085 .text
13086 .to_lowercase()
13087 .contains(&domain.to_lowercase())
13088 || finding
13089 .assertion
13090 .entities
13091 .iter()
13092 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
13093 })
13094 })
13095 .map(|finding| {
13096 let dependency_count = frontier
13097 .findings
13098 .iter()
13099 .flat_map(|candidate| candidate.links.iter())
13100 .filter(|link| link.target == finding.id)
13101 .count();
13102 let score = dependency_count as f64 + finding.confidence.score;
13103 json!({
13104 "id": &finding.id,
13105 "kind": "candidate_gap_review_lead",
13106 "assertion": &finding.assertion.text,
13107 "score": score,
13108 "dependency_count": dependency_count,
13109 "confidence": finding.confidence.score,
13110 "evidence_type": &finding.evidence.evidence_type,
13111 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
13112 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
13113 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
13114 })
13115 })
13116 .collect::<Vec<_>>();
13117 ranked.sort_by(|a, b| {
13118 b.get("score")
13119 .and_then(Value::as_f64)
13120 .partial_cmp(&a.get("score").and_then(Value::as_f64))
13121 .unwrap_or(std::cmp::Ordering::Equal)
13122 });
13123 ranked.truncate(top);
13124 if json_output {
13125 let source_hash = hash_path_or_fail(frontier_path);
13126 let payload = json!({
13127 "ok": true,
13128 "command": "gaps rank",
13129 "schema_version": project::VELA_SCHEMA_VERSION,
13130 "frontier": {
13131 "name": &frontier.project.name,
13132 "source": frontier_path.display().to_string(),
13133 "hash": format!("sha256:{source_hash}"),
13134 },
13135 "filters": {
13136 "top": top,
13137 "domain": domain,
13138 },
13139 "count": ranked.len(),
13140 "ranking_label": "candidate gap review leads",
13141 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
13142 "review_leads": ranked.clone(),
13143 "gaps": ranked,
13144 });
13145 println!(
13146 "{}",
13147 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
13148 );
13149 } else {
13150 println!();
13151 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
13152 println!(" {}", style::tick_row(60));
13153 println!(" review source scope; these are not guaranteed experiment targets.");
13154 println!();
13155 for (idx, gap) in ranked.iter().enumerate() {
13156 println!(
13157 " {}. [{}] score={} {}",
13158 idx + 1,
13159 gap["id"].as_str().unwrap_or("?"),
13160 gap["score"].as_f64().unwrap_or(0.0),
13161 gap["assertion"].as_str().unwrap_or("")
13162 );
13163 }
13164 }
13165}
13166
13167async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
13168 if inputs.len() < 2 {
13169 fail("need at least 2 frontier files for bridge detection.");
13170 }
13171 println!();
13172 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
13173 println!(" {}", style::tick_row(60));
13174 println!(" loading {} frontiers...", inputs.len());
13175 let mut named_projects = Vec::<(String, project::Project)>::new();
13176 let mut total_findings = 0;
13177 for path in inputs {
13178 let frontier = load_frontier_or_fail(path);
13179 let name = path
13180 .file_stem()
13181 .unwrap_or_default()
13182 .to_string_lossy()
13183 .to_string();
13184 println!(" {} · {} findings", name, frontier.stats.findings);
13185 total_findings += frontier.stats.findings;
13186 named_projects.push((name, frontier));
13187 }
13188 let refs = named_projects
13189 .iter()
13190 .map(|(name, frontier)| (name.as_str(), frontier))
13191 .collect::<Vec<_>>();
13192 let mut bridges = bridge::detect_bridges(&refs);
13193 if check_novelty && !bridges.is_empty() {
13194 let client = Client::new();
13195 let check_count = bridges.len().min(top_n);
13196 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
13197 for bridge_item in bridges.iter_mut().take(check_count) {
13198 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
13199 match bridge::check_novelty(&client, &query).await {
13200 Ok(count) => bridge_item.pubmed_count = Some(count),
13201 Err(e) => eprintln!(
13202 " {} prior-art check failed for {}: {e}",
13203 style::err_prefix(),
13204 bridge_item.entity_name
13205 ),
13206 }
13207 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
13208 }
13209 }
13210 print!("{}", bridge::format_report(&bridges, total_findings));
13211}
13212
13213struct BenchArgs {
13214 frontier: Option<PathBuf>,
13215 gold: Option<PathBuf>,
13216 entity_gold: Option<PathBuf>,
13217 link_gold: Option<PathBuf>,
13218 suite: Option<PathBuf>,
13219 suite_ready: bool,
13220 min_f1: Option<f64>,
13221 min_precision: Option<f64>,
13222 min_recall: Option<f64>,
13223 no_thresholds: bool,
13224 json: bool,
13225}
13226
13227fn cmd_agent_bench(
13232 gold: &Path,
13233 candidate: &Path,
13234 sources: Option<&Path>,
13235 threshold: Option<f64>,
13236 report_path: Option<&Path>,
13237 json_out: bool,
13238) {
13239 let input = crate::agent_bench::BenchInput {
13240 gold_path: gold.to_path_buf(),
13241 candidate_path: candidate.to_path_buf(),
13242 sources: sources.map(Path::to_path_buf),
13243 threshold: threshold.unwrap_or(0.0),
13244 };
13245 let report = match crate::agent_bench::run(input) {
13246 Ok(r) => r,
13247 Err(e) => {
13248 eprintln!("{} bench failed: {e}", style::err_prefix());
13249 std::process::exit(1);
13250 }
13251 };
13252
13253 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
13254 if let Some(path) = report_path
13255 && let Err(e) = std::fs::write(path, &json)
13256 {
13257 eprintln!(
13258 "{} failed to write report to {}: {e}",
13259 style::err_prefix(),
13260 path.display()
13261 );
13262 }
13263
13264 if json_out {
13265 println!("{json}");
13266 } else {
13267 println!();
13268 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
13269 println!(" {}", style::tick_row(60));
13270 print!("{}", crate::agent_bench::render_pretty(&report));
13271 println!();
13272 }
13273
13274 if !report.pass {
13275 std::process::exit(1);
13276 }
13277}
13278
13279fn cmd_bench(args: BenchArgs) {
13280 if args.suite_ready {
13281 let suite_path = args
13282 .suite
13283 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
13284 let payload =
13285 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
13286 println!(
13287 "{}",
13288 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
13289 );
13290 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13291 std::process::exit(1);
13292 }
13293 return;
13294 }
13295 if let Some(suite_path) = args.suite {
13296 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
13297 if args.json {
13298 println!(
13299 "{}",
13300 serde_json::to_string_pretty(&payload)
13301 .expect("failed to serialize benchmark suite")
13302 );
13303 } else {
13304 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13305 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
13306 println!();
13307 println!(" {}", "VELA · BENCH · SUITE".dimmed());
13308 println!(" {}", style::tick_row(60));
13309 println!(" suite: {}", suite_path.display());
13310 println!(
13311 " status: {}",
13312 if ok {
13313 style::ok("pass")
13314 } else {
13315 style::lost("fail")
13316 }
13317 );
13318 println!(
13319 " tasks: {}/{} passed",
13320 metrics
13321 .get("tasks_passed")
13322 .and_then(Value::as_u64)
13323 .unwrap_or(0),
13324 metrics
13325 .get("tasks_total")
13326 .and_then(Value::as_u64)
13327 .unwrap_or(0)
13328 );
13329 }
13330 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13331 std::process::exit(1);
13332 }
13333 return;
13334 }
13335
13336 let frontier = args
13337 .frontier
13338 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
13339 let thresholds = benchmark::BenchmarkThresholds {
13340 min_f1: if args.no_thresholds {
13341 None
13342 } else {
13343 args.min_f1.or(Some(0.05))
13344 },
13345 min_precision: if args.no_thresholds {
13346 None
13347 } else {
13348 args.min_precision
13349 },
13350 min_recall: if args.no_thresholds {
13351 None
13352 } else {
13353 args.min_recall
13354 },
13355 ..Default::default()
13356 };
13357 if let Some(path) = args.link_gold {
13358 print_benchmark_or_exit(benchmark::task_envelope(
13359 &frontier,
13360 None,
13361 benchmark::BenchmarkMode::Link,
13362 Some(&path),
13363 &thresholds,
13364 None,
13365 ));
13366 } else if let Some(path) = args.entity_gold {
13367 print_benchmark_or_exit(benchmark::task_envelope(
13368 &frontier,
13369 None,
13370 benchmark::BenchmarkMode::Entity,
13371 Some(&path),
13372 &thresholds,
13373 None,
13374 ));
13375 } else if let Some(path) = args.gold {
13376 if args.json {
13377 print_benchmark_or_exit(benchmark::task_envelope(
13378 &frontier,
13379 None,
13380 benchmark::BenchmarkMode::Finding,
13381 Some(&path),
13382 &thresholds,
13383 None,
13384 ));
13385 } else {
13386 benchmark::run(&frontier, &path, false);
13387 }
13388 } else {
13389 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
13390 }
13391}
13392
13393fn print_benchmark_or_exit(result: Result<Value, String>) {
13394 let payload = result.unwrap_or_else(|e| fail_return(&e));
13395 println!(
13396 "{}",
13397 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
13398 );
13399 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13400 std::process::exit(1);
13401 }
13402}
13403
13404fn cmd_packet(action: PacketAction) {
13405 let (result, json_output) = match action {
13406 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
13407 PacketAction::Validate { path, json } => (packet::validate(&path), json),
13408 };
13409 match result {
13410 Ok(output) if json_output => {
13411 println!(
13412 "{}",
13413 serde_json::to_string_pretty(&json!({
13414 "ok": true,
13415 "command": "packet",
13416 "result": output,
13417 }))
13418 .expect("failed to serialize packet response")
13419 );
13420 }
13421 Ok(output) => println!("{output}"),
13422 Err(e) => fail(&e),
13423 }
13424}
13425
13426fn cmd_verify(path: &Path, json_output: bool) {
13431 let result = packet::validate(path);
13432 match result {
13433 Ok(output) if json_output => {
13434 println!(
13435 "{}",
13436 serde_json::to_string_pretty(&json!({
13437 "ok": true,
13438 "command": "verify",
13439 "result": output,
13440 }))
13441 .expect("failed to serialize verify response")
13442 );
13443 }
13444 Ok(output) => {
13445 println!("{output}");
13446 println!(
13447 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
13448 );
13449 }
13450 Err(e) => fail(&e),
13451 }
13452}
13453
13454fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
13455 if path.join(".vela").exists() {
13456 fail(&format!(
13457 "already initialized: {} exists",
13458 path.join(".vela").display()
13459 ));
13460 }
13461 let payload = frontier_repo::initialize(
13462 path,
13463 frontier_repo::InitOptions {
13464 name,
13465 template,
13466 initialize_git,
13467 },
13468 )
13469 .unwrap_or_else(|e| fail_return(&e));
13470 if json_output {
13471 println!(
13472 "{}",
13473 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
13474 );
13475 } else {
13476 println!(
13477 "{} initialized frontier repository in {}",
13478 style::ok("ok"),
13479 path.display()
13480 );
13481 }
13482}
13483
13484fn cmd_quickstart(
13491 path: &Path,
13492 name: &str,
13493 reviewer: &str,
13494 assertion: Option<&str>,
13495 keys_out: Option<&Path>,
13496 json_output: bool,
13497) {
13498 use std::process::Command;
13499
13500 if path.join(".vela").exists() {
13501 fail(&format!(
13502 "already initialized: {} exists",
13503 path.join(".vela").display()
13504 ));
13505 }
13506
13507 let exe = std::env::current_exe()
13508 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
13509 let keys_dir = keys_out
13510 .map(Path::to_path_buf)
13511 .unwrap_or_else(|| path.join("keys"));
13512 let assertion_text =
13513 assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
13514
13515 let run_step = |label: &str, args: &[&str]| -> std::process::Output {
13516 let out = Command::new(&exe)
13517 .args(args)
13518 .output()
13519 .unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
13520 if !out.status.success() {
13521 let stderr = String::from_utf8_lossy(&out.stderr);
13522 fail(&format!("{label} failed:\n{stderr}"));
13523 }
13524 out
13525 };
13526
13527 run_step(
13529 "init",
13530 &[
13531 "init",
13532 path.to_string_lossy().as_ref(),
13533 "--name",
13534 name,
13535 "--no-git",
13536 "--json",
13537 ],
13538 );
13539
13540 let keys_out_str = keys_dir.to_string_lossy().into_owned();
13542 let keypair_out = run_step(
13543 "sign.generate-keypair",
13544 &[
13545 "sign",
13546 "generate-keypair",
13547 "--out",
13548 keys_out_str.as_ref(),
13549 "--json",
13550 ],
13551 );
13552 let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
13553 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
13554 let public_key = keypair_json
13555 .get("public_key")
13556 .and_then(|v| v.as_str())
13557 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
13558 .to_string();
13559
13560 run_step(
13562 "actor.add",
13563 &[
13564 "actor",
13565 "add",
13566 path.to_string_lossy().as_ref(),
13567 reviewer,
13568 "--pubkey",
13569 public_key.as_str(),
13570 "--json",
13571 ],
13572 );
13573
13574 let finding_out = run_step(
13576 "finding.add",
13577 &[
13578 "finding",
13579 "add",
13580 path.to_string_lossy().as_ref(),
13581 "--assertion",
13582 assertion_text,
13583 "--author",
13584 reviewer,
13585 "--apply",
13586 "--json",
13587 ],
13588 );
13589 let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
13590 .unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
13591 let finding_id = finding_json
13592 .get("finding_id")
13593 .and_then(|v| v.as_str())
13594 .map(str::to_string);
13595
13596 if json_output {
13597 let payload = json!({
13598 "ok": true,
13599 "command": "quickstart",
13600 "frontier": path.display().to_string(),
13601 "name": name,
13602 "reviewer": reviewer,
13603 "public_key": public_key,
13604 "keys_dir": keys_dir.display().to_string(),
13605 "finding_id": finding_id,
13606 "next_steps": [
13607 format!("vela serve {}", path.display()),
13608 format!(
13609 "vela ingest <paper.pdf|doi:...> --frontier {}",
13610 path.display()
13611 ),
13612 format!("vela log {}", path.display()),
13613 ],
13614 });
13615 println!(
13616 "{}",
13617 serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
13618 );
13619 return;
13620 }
13621
13622 println!();
13623 println!(
13624 " {}",
13625 format!("VELA · QUICKSTART · {}", path.display())
13626 .to_uppercase()
13627 .dimmed()
13628 );
13629 println!(" {}", style::tick_row(60));
13630 println!(" frontier: {}", path.display());
13631 println!(" name: {name}");
13632 println!(" reviewer: {reviewer}");
13633 println!(" keys: {}", keys_dir.display());
13634 println!(" pubkey: {}…", &public_key[..16]);
13635 if let Some(id) = finding_id.as_deref() {
13636 println!(" finding: {id}");
13637 }
13638 println!();
13639 println!(" {}", style::ok("done"));
13640 println!(" next:");
13641 println!(" vela serve {}", path.display());
13642 println!(
13643 " vela ingest <paper.pdf|doi:10.xxx|pmid:xxx> --frontier {}",
13644 path.display()
13645 );
13646 println!(" vela log {}", path.display());
13647 println!();
13648}
13649
13650fn cmd_lock(path: &Path, check: bool, json_output: bool) {
13656 if check {
13657 cmd_lock_check(path, json_output);
13658 return;
13659 }
13660 let payload = crate::frontier_repo::materialize(path).unwrap_or_else(|e| fail_return(&e));
13661 if json_output {
13662 println!(
13663 "{}",
13664 serde_json::to_string_pretty(&json!({
13665 "ok": true,
13666 "command": "lock",
13667 "path": path.display().to_string(),
13668 "snapshot_hash": payload.get("snapshot_hash"),
13669 "event_log_hash": payload.get("event_log_hash"),
13670 "proposal_state_hash": payload.get("proposal_state_hash"),
13671 }))
13672 .expect("failed to serialize lock report")
13673 );
13674 return;
13675 }
13676 println!();
13677 println!(
13678 " {}",
13679 format!("VELA · LOCK · {}", path.display())
13680 .to_uppercase()
13681 .dimmed()
13682 );
13683 println!(" {}", style::tick_row(60));
13684 println!(
13685 " snapshot_hash: {}",
13686 payload
13687 .get("snapshot_hash")
13688 .and_then(|v| v.as_str())
13689 .unwrap_or("?")
13690 );
13691 println!(
13692 " event_log_hash: {}",
13693 payload
13694 .get("event_log_hash")
13695 .and_then(|v| v.as_str())
13696 .unwrap_or("?")
13697 );
13698 println!(
13699 " proposal_state_hash: {}",
13700 payload
13701 .get("proposal_state_hash")
13702 .and_then(|v| v.as_str())
13703 .unwrap_or("?")
13704 );
13705 println!();
13706 println!(" {}", style::ok("locked"));
13707}
13708
13709fn cmd_lock_check(path: &Path, json_output: bool) {
13710 use crate::frontier_repo::read_lock;
13711 let lock = read_lock(path).unwrap_or_else(|e| fail_return(&e));
13712 let Some(lock) = lock else {
13713 fail("lock --check: no vela.lock found at path");
13714 };
13715 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
13716 let current_snapshot = format!("sha256:{}", crate::events::snapshot_hash(&project));
13717 let current_event_log = format!("sha256:{}", crate::events::event_log_hash(&project.events));
13718 let mut drift: Vec<String> = Vec::new();
13719 if lock.snapshot_hash != current_snapshot {
13720 drift.push(format!(
13721 "snapshot_hash: lock={} current={}",
13722 lock.snapshot_hash, current_snapshot
13723 ));
13724 }
13725 if lock.event_log_hash != current_event_log {
13726 drift.push(format!(
13727 "event_log_hash: lock={} current={}",
13728 lock.event_log_hash, current_event_log
13729 ));
13730 }
13731 let ok = drift.is_empty();
13732 if json_output {
13733 println!(
13734 "{}",
13735 serde_json::to_string_pretty(&json!({
13736 "ok": ok,
13737 "command": "lock.check",
13738 "path": path.display().to_string(),
13739 "drift": drift,
13740 "lock_snapshot_hash": lock.snapshot_hash,
13741 "current_snapshot_hash": current_snapshot,
13742 "lock_event_log_hash": lock.event_log_hash,
13743 "current_event_log_hash": current_event_log,
13744 "dependency_count": lock.dependencies.len(),
13745 }))
13746 .expect("failed to serialize lock check report")
13747 );
13748 } else {
13749 println!();
13750 println!(
13751 " {}",
13752 format!("VELA · LOCK · CHECK · {}", path.display())
13753 .to_uppercase()
13754 .dimmed()
13755 );
13756 println!(" {}", style::tick_row(60));
13757 if ok {
13758 println!(" snapshot_hash: {}", lock.snapshot_hash);
13759 println!(" event_log_hash: {}", lock.event_log_hash);
13760 println!(" dependencies pinned: {}", lock.dependencies.len());
13761 println!();
13762 println!(" {} on-disk state matches vela.lock", style::ok("ok"));
13763 } else {
13764 println!(" {} drift detected:", style::err_prefix());
13765 for d in &drift {
13766 println!(" - {d}");
13767 }
13768 }
13769 }
13770 if !ok {
13771 std::process::exit(1);
13772 }
13773}
13774
13775fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
13776 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
13777 let target = into
13778 .map(Path::to_path_buf)
13779 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
13780 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
13781 println!(
13782 "{} {} findings · {}",
13783 style::ok("imported"),
13784 frontier.findings.len(),
13785 target.display()
13786 );
13787}
13788
13789fn cmd_locator_repair(
13790 path: &Path,
13791 atom_id: &str,
13792 locator_override: Option<&str>,
13793 reviewer: &str,
13794 reason: &str,
13795 apply: bool,
13796 json_output: bool,
13797) {
13798 let report = state::repair_evidence_atom_locator(
13799 path,
13800 atom_id,
13801 locator_override,
13802 reviewer,
13803 reason,
13804 apply,
13805 )
13806 .unwrap_or_else(|e| fail_return(&e));
13807 print_state_report(&report, json_output);
13808}
13809
13810async fn cmd_source_fetch(
13815 identifier: &str,
13816 cache_root: Option<&Path>,
13817 out_path: Option<&Path>,
13818 refresh: bool,
13819 _json_output: bool,
13820) {
13821 use sha2::{Digest, Sha256};
13822
13823 let normalized = normalize_source_identifier(identifier);
13824 let cache_path = cache_root.map(|root| {
13825 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
13826 root.join("sources")
13827 .join("cache")
13828 .join(format!("{hash}.json"))
13829 });
13830
13831 if !refresh
13832 && let Some(p) = cache_path.as_ref()
13833 && p.is_file()
13834 {
13835 let body = std::fs::read_to_string(p)
13836 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
13837 emit_source_fetch_result(&body, out_path);
13838 return;
13839 }
13840
13841 let result = fetch_source_metadata(&normalized).await;
13842 let json = match result {
13843 Ok(value) => serde_json::to_string_pretty(&value)
13844 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
13845 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
13846 };
13847
13848 if let Some(p) = cache_path.as_ref() {
13849 if let Some(parent) = p.parent() {
13850 std::fs::create_dir_all(parent)
13851 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
13852 }
13853 std::fs::write(p, &json)
13854 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
13855 }
13856 emit_source_fetch_result(&json, out_path);
13857}
13858
13859fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
13860 if let Some(p) = out_path {
13861 if let Some(parent) = p.parent() {
13862 let _ = std::fs::create_dir_all(parent);
13863 }
13864 std::fs::write(p, body)
13865 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
13866 } else {
13867 println!("{body}");
13868 }
13869}
13870
13871fn normalize_source_identifier(raw: &str) -> String {
13872 let trimmed = raw.trim();
13873 if trimmed.starts_with("doi:")
13874 || trimmed.starts_with("pmid:")
13875 || trimmed.starts_with("nct:")
13876 || trimmed.starts_with("pmc:")
13877 {
13878 return trimmed.to_string();
13879 }
13880 if trimmed.starts_with("10.") {
13881 return format!("doi:{trimmed}");
13882 }
13883 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
13884 return format!(
13885 "nct:{}",
13886 trimmed
13887 .to_uppercase()
13888 .trim_start_matches("NCT")
13889 .to_string()
13890 .split_at(0)
13891 .0
13892 );
13893 }
13894 if trimmed.chars().all(|c| c.is_ascii_digit()) {
13895 return format!("pmid:{trimmed}");
13896 }
13897 trimmed.to_string()
13898}
13899
13900async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
13901 let client = Client::builder()
13902 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
13903 .timeout(std::time::Duration::from_secs(30))
13904 .build()
13905 .map_err(|e| format!("client build: {e}"))?;
13906 if let Some(rest) = normalized.strip_prefix("doi:") {
13907 let mut record = fetch_via_crossref(&client, rest).await?;
13914 let crossref_abstract = record
13915 .get("abstract")
13916 .and_then(|v| v.as_str())
13917 .unwrap_or("");
13918 if crossref_abstract.is_empty()
13919 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
13920 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
13921 {
13922 let pubmed_abstract = pubmed_record
13923 .get("abstract")
13924 .and_then(|v| v.as_str())
13925 .unwrap_or("")
13926 .to_string();
13927 if !pubmed_abstract.is_empty()
13928 && let Some(obj) = record.as_object_mut()
13929 {
13930 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
13931 obj.insert(
13932 "abstract_source".to_string(),
13933 Value::String(format!("pubmed:{pmid}")),
13934 );
13935 }
13936 }
13937 return Ok(record);
13938 }
13939 if let Some(rest) = normalized.strip_prefix("pmid:") {
13940 return fetch_via_pubmed(&client, rest).await;
13941 }
13942 if let Some(rest) = normalized.strip_prefix("nct:") {
13943 return fetch_via_ctgov(&client, rest).await;
13944 }
13945 Err(format!(
13946 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
13947 ))
13948}
13949
13950async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
13954 let url = format!(
13955 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
13956 urlencoding::encode(doi)
13957 );
13958 let resp = client.get(&url).send().await.ok()?;
13959 if !resp.status().is_success() {
13960 return None;
13961 }
13962 let body: Value = resp.json().await.ok()?;
13963 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
13964 if id_list.len() != 1 {
13965 return None;
13968 }
13969 id_list.first()?.as_str().map(|s| s.to_string())
13970}
13971
13972async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
13973 let url = format!("https://api.crossref.org/works/{doi}");
13974 let resp = client
13975 .get(&url)
13976 .send()
13977 .await
13978 .map_err(|e| format!("crossref get: {e}"))?;
13979 if !resp.status().is_success() {
13980 return Err(format!("crossref returned {}", resp.status()));
13981 }
13982 let body: Value = resp
13983 .json()
13984 .await
13985 .map_err(|e| format!("crossref json: {e}"))?;
13986 let work = body.get("message").cloned().unwrap_or(Value::Null);
13987 let title = work
13988 .get("title")
13989 .and_then(|v| v.as_array())
13990 .and_then(|a| a.first())
13991 .and_then(|v| v.as_str())
13992 .unwrap_or("")
13993 .to_string();
13994 let abstract_html = work
13995 .get("abstract")
13996 .and_then(|v| v.as_str())
13997 .unwrap_or("")
13998 .to_string();
13999 let abstract_text = strip_jats_tags(&abstract_html);
14000 let year = work
14001 .get("issued")
14002 .and_then(|v| v.get("date-parts"))
14003 .and_then(|v| v.as_array())
14004 .and_then(|a| a.first())
14005 .and_then(|v| v.as_array())
14006 .and_then(|a| a.first())
14007 .and_then(|v| v.as_i64());
14008 let journal = work
14009 .get("container-title")
14010 .and_then(|v| v.as_array())
14011 .and_then(|a| a.first())
14012 .and_then(|v| v.as_str())
14013 .unwrap_or("")
14014 .to_string();
14015 let authors = work
14016 .get("author")
14017 .and_then(|v| v.as_array())
14018 .map(|arr| {
14019 arr.iter()
14020 .filter_map(|a| {
14021 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
14022 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
14023 let combined = format!("{given} {family}").trim().to_string();
14024 if combined.is_empty() {
14025 None
14026 } else {
14027 Some(combined)
14028 }
14029 })
14030 .collect::<Vec<_>>()
14031 })
14032 .unwrap_or_default();
14033 Ok(json!({
14034 "schema": "vela.source_fetch.v0.1",
14035 "identifier": format!("doi:{doi}"),
14036 "source": "crossref",
14037 "title": title,
14038 "abstract": abstract_text,
14039 "year": year,
14040 "journal": journal,
14041 "authors": authors,
14042 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14043 }))
14044}
14045
14046async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
14047 let url = format!(
14048 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
14049 );
14050 let resp = client
14051 .get(&url)
14052 .send()
14053 .await
14054 .map_err(|e| format!("pubmed get: {e}"))?;
14055 if !resp.status().is_success() {
14056 return Err(format!("pubmed returned {}", resp.status()));
14057 }
14058 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
14059 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
14060 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
14061 let year = extract_xml_text(&xml, "<Year>", "</Year>")
14062 .parse::<i64>()
14063 .ok();
14064 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
14065 Ok(json!({
14066 "schema": "vela.source_fetch.v0.1",
14067 "identifier": format!("pmid:{pmid}"),
14068 "source": "pubmed",
14069 "title": title,
14070 "abstract": abstract_text,
14071 "year": year,
14072 "journal": journal,
14073 "authors": Vec::<String>::new(),
14074 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14075 }))
14076}
14077
14078async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
14079 let nct_clean = nct.trim();
14080 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
14081 nct_clean.to_uppercase()
14082 } else {
14083 format!("NCT{nct_clean}")
14084 };
14085 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
14086 let resp = client
14087 .get(&url)
14088 .send()
14089 .await
14090 .map_err(|e| format!("ctgov get: {e}"))?;
14091 if !resp.status().is_success() {
14092 return Err(format!("ctgov returned {}", resp.status()));
14093 }
14094 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
14095 let title = body
14096 .pointer("/protocolSection/identificationModule/briefTitle")
14097 .and_then(|v| v.as_str())
14098 .unwrap_or("")
14099 .to_string();
14100 let abstract_text = body
14101 .pointer("/protocolSection/descriptionModule/briefSummary")
14102 .and_then(|v| v.as_str())
14103 .unwrap_or("")
14104 .to_string();
14105 let phase = body
14106 .pointer("/protocolSection/designModule/phases")
14107 .and_then(|v| v.as_array())
14108 .and_then(|a| a.first())
14109 .and_then(|v| v.as_str())
14110 .unwrap_or("")
14111 .to_string();
14112 Ok(json!({
14113 "schema": "vela.source_fetch.v0.1",
14114 "identifier": format!("nct:{nct_id}"),
14115 "source": "clinicaltrials.gov",
14116 "title": title,
14117 "abstract": abstract_text,
14118 "year": Value::Null,
14119 "journal": phase,
14120 "authors": Vec::<String>::new(),
14121 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14122 }))
14123}
14124
14125fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
14126 if let Some(start) = xml.find(open) {
14127 let after = &xml[start + open.len()..];
14128 if let Some(end) = after.find(close) {
14129 return after[..end].trim().to_string();
14130 }
14131 }
14132 String::new()
14133}
14134
14135fn strip_jats_tags(html: &str) -> String {
14136 let mut out = String::with_capacity(html.len());
14137 let mut in_tag = false;
14138 for c in html.chars() {
14139 match c {
14140 '<' => in_tag = true,
14141 '>' => in_tag = false,
14142 _ if !in_tag => out.push(c),
14143 _ => {}
14144 }
14145 }
14146 out.split_whitespace().collect::<Vec<_>>().join(" ")
14147}
14148
14149fn cmd_span_repair(
14150 path: &Path,
14151 finding_id: &str,
14152 section: &str,
14153 text: &str,
14154 reviewer: &str,
14155 reason: &str,
14156 apply: bool,
14157 json_output: bool,
14158) {
14159 let report =
14160 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
14161 .unwrap_or_else(|e| fail_return(&e));
14162 print_state_report(&report, json_output);
14163}
14164
14165#[allow(clippy::too_many_arguments)]
14166fn cmd_entity_resolve(
14167 path: &Path,
14168 finding_id: &str,
14169 entity_name: &str,
14170 source: &str,
14171 id: &str,
14172 confidence: f64,
14173 matched_name: Option<&str>,
14174 resolution_method: &str,
14175 reviewer: &str,
14176 reason: &str,
14177 apply: bool,
14178 json_output: bool,
14179) {
14180 let report = state::resolve_finding_entity(
14181 path,
14182 finding_id,
14183 entity_name,
14184 source,
14185 id,
14186 confidence,
14187 matched_name,
14188 resolution_method,
14189 reviewer,
14190 reason,
14191 apply,
14192 )
14193 .unwrap_or_else(|e| fail_return(&e));
14194 print_state_report(&report, json_output);
14195}
14196
14197fn cmd_propagate(
14198 path: &Path,
14199 retract: Option<String>,
14200 reduce_confidence: Option<String>,
14201 to: Option<f64>,
14202 output: Option<&Path>,
14203) {
14204 let mut frontier = load_frontier_or_fail(path);
14205 let (finding_id, action, label) = if let Some(id) = retract {
14206 (id, propagate::PropagationAction::Retracted, "retraction")
14207 } else if let Some(id) = reduce_confidence {
14208 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
14209 if !(0.0..=1.0).contains(&score) {
14210 fail("--to must be between 0.0 and 1.0");
14211 }
14212 (
14213 id,
14214 propagate::PropagationAction::ConfidenceReduced { new_score: score },
14215 "confidence reduction",
14216 )
14217 } else {
14218 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
14219 };
14220 if !frontier.findings.iter().any(|f| f.id == finding_id) {
14221 fail(&format!("finding not found: {finding_id}"));
14222 }
14223 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
14224 frontier.review_events.extend(result.events.clone());
14229 project::recompute_stats(&mut frontier);
14230 propagate::print_result(&result, label, &finding_id);
14231 let out = output.unwrap_or(path);
14232 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
14233 println!(" output: {}", out.display());
14234}
14235
14236fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
14237 let source_desc = source
14238 .map(|p| p.display().to_string())
14239 .or_else(|| frontiers.map(|p| p.display().to_string()))
14240 .unwrap_or_else(|| "frontier.json".to_string());
14241 let args = if let Some(path) = source {
14242 format!(r#""serve", "{}""#, path.display())
14243 } else if let Some(path) = frontiers {
14244 format!(r#""serve", "--frontiers", "{}""#, path.display())
14245 } else {
14246 r#""serve", "frontier.json""#.to_string()
14247 };
14248 println!(
14249 r#"Add this MCP server configuration to your client:
14250
14251{{
14252 "mcpServers": {{
14253 "vela": {{
14254 "command": "vela",
14255 "args": [{args}]
14256 }}
14257 }}
14258}}
14259
14260Source: {source_desc}"#
14261 );
14262}
14263
14264fn parse_entities(input: &str) -> Vec<(String, String)> {
14265 if input.trim().is_empty() {
14266 return Vec::new();
14267 }
14268 input
14269 .split(',')
14270 .filter_map(|pair| {
14271 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
14272 if parts.len() == 2 {
14273 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
14274 } else {
14275 eprintln!(
14276 "{} skipping malformed entity '{}'",
14277 style::warn("warn"),
14278 pair.trim()
14279 );
14280 None
14281 }
14282 })
14283 .collect()
14284}
14285
14286fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
14287 inputs
14288 .iter()
14289 .filter_map(|input| {
14290 let trimmed = input.trim();
14291 if trimmed.is_empty() {
14292 return None;
14293 }
14294 if trimmed.starts_with('{') {
14295 match serde_json::from_str::<Value>(trimmed) {
14296 Ok(value @ Value::Object(_)) => return Some(value),
14297 Ok(_) | Err(_) => {
14298 eprintln!(
14299 "{} evidence span JSON should be an object; storing as text",
14300 style::warn("warn")
14301 );
14302 }
14303 }
14304 }
14305 Some(json!({
14306 "section": "curator_source",
14307 "text": trimmed,
14308 }))
14309 })
14310 .collect()
14311}
14312
14313fn hash_path(path: &Path) -> Result<String, String> {
14314 let mut hasher = Sha256::new();
14315 if path.is_file() {
14316 let bytes = std::fs::read(path)
14317 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
14318 hasher.update(&bytes);
14319 } else if path.is_dir() {
14320 let mut files = Vec::new();
14321 collect_hash_files(path, path, &mut files)?;
14322 files.sort();
14323 for rel in files {
14324 hasher.update(rel.to_string_lossy().as_bytes());
14325 let bytes = std::fs::read(path.join(&rel))
14326 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
14327 hasher.update(bytes);
14328 }
14329 } else {
14330 return Err(format!("Cannot hash missing path {}", path.display()));
14331 }
14332 Ok(format!("{:x}", hasher.finalize()))
14333}
14334
14335fn load_frontier_or_fail(path: &Path) -> project::Project {
14336 repo::load_from_path(path).unwrap_or_else(|e| {
14337 fail_return(&format!(
14338 "Failed to load frontier '{}': {e}",
14339 path.display()
14340 ))
14341 })
14342}
14343
14344fn hash_path_or_fail(path: &Path) -> String {
14345 hash_path(path).unwrap_or_else(|e| {
14346 fail_return(&format!(
14347 "Failed to hash frontier '{}': {e}",
14348 path.display()
14349 ))
14350 })
14351}
14352
14353fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
14354 for entry in
14355 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
14356 {
14357 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
14358 let path = entry.path();
14359 if path.is_dir() {
14360 collect_hash_files(root, &path, files)?;
14361 } else if path.is_file() {
14362 files.push(
14363 path.strip_prefix(root)
14364 .map_err(|e| e.to_string())?
14365 .to_path_buf(),
14366 );
14367 }
14368 }
14369 Ok(())
14370}
14371
14372fn schema_error_suggestion(error: &str) -> &'static str {
14373 if schema_error_action(error).is_some() {
14374 "Run `vela normalize` to repair deterministic frontier state."
14375 } else {
14376 "Inspect and correct the referenced frontier field."
14377 }
14378}
14379
14380fn schema_error_fix(error: &str) -> bool {
14381 schema_error_action(error).is_some()
14382}
14383
14384fn schema_error_action(error: &str) -> Option<&'static str> {
14385 if error.contains("stats.findings")
14386 || error.contains("stats.links")
14387 || error.contains("Invalid compiler")
14388 || error.contains("Invalid vela_version")
14389 || error.contains("Invalid schema")
14390 {
14391 Some("normalize_metadata_and_stats")
14392 } else if error.contains("does not match content-address") {
14393 Some("rewrite_ids")
14394 } else {
14395 None
14396 }
14397}
14398
14399fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
14400 let mut actions = std::collections::BTreeMap::<String, usize>::new();
14401 for diagnostic in diagnostics {
14402 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
14403 *actions.entry(action.to_string()).or_default() += 1;
14404 }
14405 }
14406 actions
14407 .into_iter()
14408 .map(|(action, count)| {
14409 let command = if action == "rewrite_ids" {
14410 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
14411 } else {
14412 "vela normalize <frontier> --write"
14413 };
14414 json!({
14415 "action": action,
14416 "count": count,
14417 "command": command,
14418 })
14419 })
14420 .collect()
14421}
14422
14423fn cmd_integrity(frontier: &Path, json: bool) {
14424 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
14425 if json {
14426 println!(
14427 "{}",
14428 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
14429 );
14430 } else {
14431 println!("vela integrity");
14432 println!(" frontier: {}", frontier.display());
14433 println!(" status: {}", report.status);
14434 println!(" proof freshness: {}", report.proof_freshness);
14435 println!(" structural errors: {}", report.structural_errors.len());
14436 for error in report.structural_errors.iter().take(8) {
14437 println!(" - {}: {}", error.rule_id, error.message);
14438 }
14439 }
14440}
14441
14442fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
14443 let report =
14444 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
14445 if json {
14446 println!(
14447 "{}",
14448 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
14449 );
14450 } else {
14451 println!("vela impact");
14452 println!(" finding: {}", report.target.id);
14453 println!(" frontier: {}", report.frontier.vfr_id);
14454 println!(" direct dependents: {}", report.summary.direct_dependents);
14455 println!(" downstream: {}", report.summary.total_downstream);
14456 println!(" open proposals: {}", report.summary.open_proposals);
14457 println!(" accepted events: {}", report.summary.accepted_events);
14458 println!(" proof: {}", report.summary.proof_status);
14459 }
14460}
14461
14462fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
14463 use crate::discord::DiscordKind;
14464 use crate::discord_compute::compute_discord_assignment;
14465
14466 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
14467 let assignment = compute_discord_assignment(&project);
14468 let support = assignment.frontier_support();
14469
14470 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
14473 for context in support.iter() {
14474 let set = assignment.get(context);
14475 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
14476 if let Some(filter) = kind_filter
14477 && !kinds.iter().any(|k| k == filter)
14478 {
14479 continue;
14480 }
14481 rows.push((context.clone(), kinds));
14482 }
14483
14484 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
14488 std::collections::BTreeMap::new();
14489 for kind in DiscordKind::ALL {
14490 let count = assignment
14491 .iter()
14492 .filter(|(_, set)| set.contains(*kind))
14493 .count();
14494 if count > 0 {
14495 histogram.insert(kind.as_str(), count);
14496 }
14497 }
14498
14499 let total_findings = project.findings.len();
14500 let frontier_id = project
14501 .frontier_id
14502 .clone()
14503 .unwrap_or_else(|| String::from("<unknown>"));
14504
14505 if json {
14506 let row_value = |row: &(String, Vec<String>)| {
14507 serde_json::json!({
14508 "finding_id": row.0,
14509 "discord_kinds": row.1,
14510 })
14511 };
14512 let report = serde_json::json!({
14513 "frontier_id": frontier_id,
14514 "total_findings": total_findings,
14515 "frontier_support_size": support.len(),
14516 "filtered_row_count": rows.len(),
14517 "filter_kind": kind_filter,
14518 "histogram": histogram,
14519 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
14520 });
14521 println!(
14522 "{}",
14523 serde_json::to_string_pretty(&report).expect("serialize discord report")
14524 );
14525 return;
14526 }
14527
14528 println!("vela discord");
14529 println!(" frontier: {frontier_id}");
14530 println!(" total findings: {total_findings}");
14531 println!(
14532 " frontier support (any discord): {} of {}",
14533 support.len(),
14534 total_findings
14535 );
14536 if let Some(k) = kind_filter {
14537 println!(" filter: kind = {k}");
14538 }
14539 println!();
14540 if histogram.is_empty() {
14541 println!(" no discord detected.");
14542 } else {
14543 println!(" discord histogram:");
14544 for (k, n) in &histogram {
14545 println!(" {n:>4} {k}");
14546 }
14547 }
14548 if !rows.is_empty() {
14549 println!();
14550 println!(" findings with discord (showing up to 50):");
14551 for (fid, kinds) in rows.iter().take(50) {
14552 println!(" {fid} · {}", kinds.join(", "));
14553 }
14554 if rows.len() > 50 {
14555 println!(" ... and {} more", rows.len() - 50);
14556 }
14557 }
14558}
14559
14560fn empty_signal_report() -> signals::SignalReport {
14561 signals::SignalReport {
14562 schema: "vela.signals.v0".to_string(),
14563 frontier: "unavailable".to_string(),
14564 signals: Vec::new(),
14565 review_queue: Vec::new(),
14566 proof_readiness: signals::ProofReadiness {
14567 status: "unavailable".to_string(),
14568 blockers: 0,
14569 warnings: 0,
14570 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
14571 },
14572 }
14573}
14574
14575fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
14576 println!();
14577 println!(" {}", "SIGNALS".dimmed());
14578 println!(" {}", style::tick_row(60));
14579 println!(" total signals: {}", report.signals.len());
14580 println!(" proof readiness: {}", report.proof_readiness.status);
14581 if !report.review_queue.is_empty() {
14582 println!(" review queue: {} items", report.review_queue.len());
14583 }
14584 if strict && report.proof_readiness.status != "ready" {
14585 println!(
14586 " {} proof readiness has blocking signals.",
14587 style::lost("strict check failed")
14588 );
14589 }
14590}
14591
14592fn append_packet_json_file(
14593 packet_dir: &Path,
14594 relative_path: &str,
14595 value: &Value,
14596) -> Result<(), String> {
14597 let content = serde_json::to_vec_pretty(value)
14598 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
14599 let path = packet_dir.join(relative_path);
14600 if let Some(parent) = path.parent() {
14601 std::fs::create_dir_all(parent)
14602 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
14603 }
14604 std::fs::write(&path, &content)
14605 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
14606 let entry = json!({
14607 "path": relative_path,
14608 "sha256": hex::encode(Sha256::digest(&content)),
14609 "bytes": content.len(),
14610 });
14611
14612 for manifest_name in ["manifest.json", "packet.lock.json"] {
14613 let manifest_path = packet_dir.join(manifest_name);
14614 let data = std::fs::read_to_string(&manifest_path)
14615 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14616 let mut manifest: Value = serde_json::from_str(&data)
14617 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14618 let array_key = if manifest_name == "manifest.json" {
14619 "included_files"
14620 } else {
14621 "files"
14622 };
14623 let files = manifest
14624 .get_mut(array_key)
14625 .and_then(Value::as_array_mut)
14626 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
14627 files.retain(|file| {
14628 file.get("path")
14629 .and_then(Value::as_str)
14630 .is_none_or(|path| path != relative_path)
14631 });
14632 files.push(entry.clone());
14633 std::fs::write(
14634 &manifest_path,
14635 serde_json::to_vec_pretty(&manifest)
14636 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14637 )
14638 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14639 }
14640
14641 let lock_path = packet_dir.join("packet.lock.json");
14642 let lock_content = std::fs::read(&lock_path)
14643 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
14644 let lock_entry = json!({
14645 "path": "packet.lock.json",
14646 "sha256": hex::encode(Sha256::digest(&lock_content)),
14647 "bytes": lock_content.len(),
14648 });
14649 let manifest_path = packet_dir.join("manifest.json");
14650 let data = std::fs::read_to_string(&manifest_path)
14651 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14652 let mut manifest: Value = serde_json::from_str(&data)
14653 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14654 let files = manifest
14655 .get_mut("included_files")
14656 .and_then(Value::as_array_mut)
14657 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
14658 files.retain(|file| {
14659 file.get("path")
14660 .and_then(Value::as_str)
14661 .is_none_or(|path| path != "packet.lock.json")
14662 });
14663 files.push(lock_entry);
14664 std::fs::write(
14665 &manifest_path,
14666 serde_json::to_vec_pretty(&manifest)
14667 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14668 )
14669 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14670 Ok(())
14671}
14672
14673fn print_tool_check_report(report: &Value) {
14674 let summary = report.get("summary").unwrap_or(&Value::Null);
14675 let frontier = report.get("frontier").unwrap_or(&Value::Null);
14676 println!();
14677 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
14678 println!(" {}", style::tick_row(60));
14679 println!(
14680 "frontier: {}",
14681 frontier
14682 .get("name")
14683 .and_then(Value::as_str)
14684 .unwrap_or("unknown")
14685 );
14686 println!(
14687 "findings: {}",
14688 frontier
14689 .get("findings")
14690 .and_then(Value::as_u64)
14691 .unwrap_or_default()
14692 );
14693 println!(
14694 "checks: {} passed, {} failed",
14695 summary
14696 .get("passed")
14697 .and_then(Value::as_u64)
14698 .unwrap_or_default(),
14699 summary
14700 .get("failed")
14701 .and_then(Value::as_u64)
14702 .unwrap_or_default()
14703 );
14704 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
14705 let names = tools
14706 .iter()
14707 .filter_map(Value::as_str)
14708 .collect::<Vec<_>>()
14709 .join(", ");
14710 println!("tools: {names}");
14711 }
14712 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
14713 for check in checks {
14714 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
14715 style::ok("ok")
14716 } else {
14717 style::lost("lost")
14718 };
14719 println!(
14720 " {} {}",
14721 status,
14722 check
14723 .get("tool")
14724 .and_then(Value::as_str)
14725 .unwrap_or("unknown")
14726 );
14727 }
14728 }
14729}
14730
14731fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
14732 if json_output {
14733 println!(
14734 "{}",
14735 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
14736 );
14737 } else {
14738 println!("{}", report.message);
14739 println!(" frontier: {}", report.frontier);
14740 println!(" finding: {}", report.finding_id);
14741 println!(" proposal: {}", report.proposal_id);
14742 println!(" status: {}", report.proposal_status);
14743 if let Some(event_id) = &report.applied_event_id {
14744 println!(" event: {}", event_id);
14745 }
14746 println!(" wrote: {}", report.wrote_to);
14747 }
14748}
14749
14750fn print_history(payload: &Value) {
14751 let finding = payload.get("finding").unwrap_or(&Value::Null);
14752 println!("vela history");
14753 println!(
14754 " finding: {}",
14755 finding
14756 .get("id")
14757 .and_then(Value::as_str)
14758 .unwrap_or("unknown")
14759 );
14760 println!(
14761 " assertion: {}",
14762 finding
14763 .get("assertion")
14764 .and_then(Value::as_str)
14765 .unwrap_or("")
14766 );
14767 println!(
14768 " confidence: {:.3}",
14769 finding
14770 .get("confidence")
14771 .and_then(Value::as_f64)
14772 .unwrap_or_default()
14773 );
14774 let reviews = payload
14775 .get("review_events")
14776 .and_then(Value::as_array)
14777 .map_or(0, Vec::len);
14778 let updates = payload
14779 .get("confidence_updates")
14780 .and_then(Value::as_array)
14781 .map_or(0, Vec::len);
14782 let annotations = finding
14783 .get("annotations")
14784 .and_then(Value::as_array)
14785 .map_or(0, Vec::len);
14786 let sources = payload
14787 .get("sources")
14788 .and_then(Value::as_array)
14789 .map_or(0, Vec::len);
14790 let atoms = payload
14791 .get("evidence_atoms")
14792 .and_then(Value::as_array)
14793 .map_or(0, Vec::len);
14794 let conditions = payload
14795 .get("condition_records")
14796 .and_then(Value::as_array)
14797 .map_or(0, Vec::len);
14798 let proposals = payload
14799 .get("proposals")
14800 .and_then(Value::as_array)
14801 .map_or(0, Vec::len);
14802 let events = payload
14803 .get("events")
14804 .and_then(Value::as_array)
14805 .map_or(0, Vec::len);
14806 println!(" review events: {reviews}");
14807 println!(" confidence updates: {updates}");
14808 println!(" annotations: {annotations}");
14809 println!(" sources: {sources}");
14810 println!(" evidence atoms: {atoms}");
14811 println!(" condition records: {conditions}");
14812 println!(" proposals: {proposals}");
14813 println!(" canonical events: {events}");
14814 if let Some(status) = payload
14815 .get("proof_state")
14816 .and_then(|value| value.get("latest_packet"))
14817 .and_then(|value| value.get("status"))
14818 .and_then(Value::as_str)
14819 {
14820 println!(" proof state: {status}");
14821 }
14822 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
14823 for event in events.iter().take(8) {
14824 println!(
14825 " - {} {} {}",
14826 event
14827 .get("reviewed_at")
14828 .and_then(Value::as_str)
14829 .unwrap_or(""),
14830 event.get("id").and_then(Value::as_str).unwrap_or(""),
14831 event.get("reason").and_then(Value::as_str).unwrap_or("")
14832 );
14833 }
14834 }
14835}
14836
14837#[derive(Debug, Serialize)]
14838pub struct ProofTrace {
14839 pub trace_version: String,
14840 pub command: Vec<String>,
14841 pub source: String,
14842 pub source_hash: String,
14843 pub schema_version: String,
14844 pub checked_artifacts: Vec<String>,
14845 pub benchmark: Option<Value>,
14846 pub packet_manifest: String,
14847 pub packet_validation: String,
14848 pub caveats: Vec<String>,
14849 pub status: String,
14850 pub trace_path: String,
14851}
14852
14853const SCIENCE_SUBCOMMANDS: &[&str] = &[
14854 "compile-notes",
14855 "compile-code",
14856 "compile-data",
14857 "review-pending",
14858 "find-tensions",
14859 "plan-experiments",
14860 "scout",
14861 "check",
14862 "normalize",
14863 "integrity",
14864 "impact",
14865 "discord",
14866 "quickstart",
14867 "proof",
14868 "repo",
14869 "serve",
14870 "stats",
14871 "search",
14872 "tensions",
14873 "gaps",
14874 "bridge",
14875 "export",
14876 "packet",
14877 "bench",
14878 "conformance",
14879 "version",
14880 "sign",
14881 "actor",
14882 "frontier",
14883 "queue",
14884 "registry",
14885 "init",
14886 "import",
14887 "lock",
14888 "diff",
14889 "proposals",
14890 "finding",
14891 "link",
14892 "entity",
14893 "review",
14894 "note",
14895 "caveat",
14896 "revise",
14897 "reject",
14898 "history",
14899 "import-events",
14900 "retract",
14901 "propagate",
14902 "replicate",
14904 "replications",
14905 "dataset-add",
14908 "datasets",
14909 "code-add",
14910 "code-artifacts",
14911 "artifact-add",
14912 "artifact-to-state",
14913 "bridge-kit",
14914 "source-adapter",
14915 "runtime-adapter",
14916 "artifacts",
14917 "artifact-audit",
14918 "decision-brief",
14919 "trial-summary",
14920 "source-verification",
14921 "source-ingest-plan",
14922 "clinical-trial-import",
14923 "negative-result-add",
14925 "negative-results",
14926 "trajectory-create",
14928 "trajectory-step",
14929 "trajectories",
14930 "tier-set",
14932 "locator-repair",
14934 "span-repair",
14936 "entity-resolve",
14938 "entity-add",
14940 "source-fetch",
14942 "predict",
14945 "resolve",
14946 "predictions",
14947 "predictions-expire",
14948 "calibration",
14949 "consensus",
14952 "federation",
14954 "causal",
14956 "status",
14960 "log",
14961 "inbox",
14962 "ask",
14963 "bridges",
14965 "workbench",
14967 "verify",
14969 "ingest",
14973 "propose",
14974 "accept",
14975 "attest",
14976 "lineage",
14977 "carina",
14980 "atlas",
14983 "constellation",
14986];
14987
14988pub fn is_science_subcommand(name: &str) -> bool {
14989 SCIENCE_SUBCOMMANDS.contains(&name)
14990}
14991
14992fn print_strict_help() {
14993 println!(
14994 r#"Vela {}
14995Version control for scientific state.
14996
14997Usage:
14998 vela <COMMAND>
14999
15000Core flow (v0.74):
15001 init Initialize a split frontier repo
15002 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
15003 propose Create a finding.review proposal
15004 diff Preview a `vpr_*` proposal, or compare two frontier files
15005 accept Apply a proposal under reviewer authority
15006 attest Sign findings under your private key
15007 log Recent canonical state events
15008 lineage State-transition replay for one finding
15009 serve Local Workbench (findings, evidence, diff, lineage)
15010
15011Read-only inspection:
15012 check Validate a frontier, repo, or proof packet
15013 integrity Check accepted frontier state integrity
15014 impact Report downstream finding impact
15015 normalize Apply deterministic frontier-state repairs
15016 proof Export and validate a proof packet
15017 repo Inspect split frontier repository status and shape
15018 stats Show frontier statistics
15019 search Search findings
15020 tensions List candidate contradictions and tensions
15021 gaps Inspect and rank candidate gap review leads
15022 bridge Find candidate cross-domain connections
15023
15024Advanced (proposal-creation, agent inboxes, federation):
15025 scout Run Literature Scout against a folder of PDFs (writes proposals)
15026 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
15027 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
15028 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
15029 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
15030 find-tensions Run Contradiction Finder: surface real contradictions among findings
15031 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
15032 export Export frontier artifacts
15033 packet Inspect or validate proof packets
15034 bench Run deterministic benchmark gates
15035 conformance Run protocol conformance vectors
15036 sign Optional signing and signature verification
15037 runtime-adapter
15038 Normalize external runtime exports into reviewable proposals
15039 version Show version information
15040 import Import frontier.json into a .vela repo
15041 proposals Inspect, validate, export, import, accept, or reject write proposals
15042 artifact-to-state
15043 Import a Carina artifact packet as reviewable proposals
15044 bridge-kit
15045 Validate Carina artifact packets before importing runtime output
15046 source-adapter
15047 Run reviewed source adapters into artifact-to-state proposals
15048 finding Add or manage finding bundles as frontier state
15049 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
15050 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
15051 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
15052 actor Register Ed25519 publisher identities in a frontier
15053 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
15054 review Create a review proposal or review interactively
15055 note Add a lightweight note to a finding
15056 caveat Create an explicit caveat proposal
15057 revise Create a confidence revision proposal
15058 reject Create a rejection proposal
15059 history Show state-transition history for one finding (v0.74 alias: `lineage`)
15060 import-events Import review/state events from a packet or JSON file
15061 retract Create a retraction proposal
15062 propagate Simulate impact over declared dependency links
15063 artifact-add Register a content-addressed artifact
15064 artifacts List content-addressed artifacts
15065 artifact-audit Audit artifact locators, hashes, references, and profiles
15066 decision-brief Show the validated decision brief projection
15067 trial-summary Show the validated trial outcome projection
15068 source-verification Show the validated source verification projection
15069 source-ingest-plan Show the validated source ingest plan
15070 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
15071 locator-repair Mechanically repair an evidence atom's missing source locator
15072 span-repair Mechanically repair a finding's missing evidence span
15073 entity-resolve Resolve a finding entity to a canonical id
15074 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
15075 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
15076 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
15077
15078Quick start (the demo):
15079 vela init demo --name "Your bounded question"
15080 vela ingest paper.pdf --frontier demo
15081 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
15082 vela diff <vpr_id> --frontier demo
15083 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
15084 vela serve --path demo
15085
15086Substrate health:
15087 vela frontier materialize my-frontier --json
15088 vela repo status my-frontier --json
15089 vela proof verify my-frontier --json
15090 vela check my-frontier --strict --json
15091
15092Monolithic frontier file:
15093 vela frontier new frontier.json --name "Your bounded question"
15094 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
15095 vela check frontier.json --json
15096 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
15097 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
15098
15099Publish your own frontier (see docs/PUBLISHING.md):
15100 vela frontier new ./frontier.json --name "Your bounded question"
15101 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
15102 vela sign generate-keypair --out keys
15103 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
15104 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
15105 --to https://vela-hub.fly.dev
15106"#,
15107 env!("CARGO_PKG_VERSION")
15108 );
15109}
15110
15111pub type ScoutHandler = fn(
15120 folder: PathBuf,
15121 frontier: PathBuf,
15122 backend: Option<String>,
15123 dry_run: bool,
15124 json: bool,
15125) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15126
15127static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
15128
15129pub fn register_scout_handler(handler: ScoutHandler) {
15133 let _ = SCOUT_HANDLER.set(handler);
15134}
15135
15136pub type AtlasInitHandler = fn(
15140 atlases_root: PathBuf,
15141 name: String,
15142 domain: String,
15143 scope_note: Option<String>,
15144 frontiers: Vec<PathBuf>,
15145 json: bool,
15146) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15147
15148static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
15149
15150pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
15151 let _ = ATLAS_INIT_HANDLER.set(handler);
15152}
15153
15154pub type AtlasMaterializeHandler =
15156 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15157
15158static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
15159
15160pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
15161 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
15162}
15163
15164pub type AtlasServeHandler = fn(
15169 atlases_root: PathBuf,
15170 name: String,
15171 port: u16,
15172 open_browser: bool,
15173) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15174
15175static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
15176
15177pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
15178 let _ = ATLAS_SERVE_HANDLER.set(handler);
15179}
15180
15181pub type AtlasUpdateHandler = fn(
15186 atlases_root: PathBuf,
15187 name: String,
15188 add_frontier: Vec<PathBuf>,
15189 remove_vfr_id: Vec<String>,
15190 json: bool,
15191) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15192
15193static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
15194
15195pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
15196 let _ = ATLAS_UPDATE_HANDLER.set(handler);
15197}
15198
15199pub type ConstellationInitHandler = fn(
15203 constellations_root: PathBuf,
15204 name: String,
15205 scope_note: Option<String>,
15206 atlases: Vec<PathBuf>,
15207 json: bool,
15208) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15209
15210static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
15211
15212pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
15213 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
15214}
15215
15216pub type ConstellationMaterializeHandler = fn(
15217 constellations_root: PathBuf,
15218 name: String,
15219 json: bool,
15220) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15221
15222static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
15223 OnceLock::new();
15224
15225pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
15226 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
15227}
15228
15229pub type ConstellationServeHandler = fn(
15230 constellations_root: PathBuf,
15231 name: String,
15232 port: u16,
15233 open_browser: bool,
15234) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15235
15236static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
15237
15238pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
15239 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
15240}
15241
15242pub type NotesHandler = fn(
15246 vault: PathBuf,
15247 frontier: PathBuf,
15248 backend: Option<String>,
15249 max_files: Option<usize>,
15250 max_items_per_category: Option<usize>,
15251 dry_run: bool,
15252 json: bool,
15253) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15254
15255static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
15256
15257pub fn register_notes_handler(handler: NotesHandler) {
15259 let _ = NOTES_HANDLER.set(handler);
15260}
15261
15262pub type CodeHandler = fn(
15264 root: PathBuf,
15265 frontier: PathBuf,
15266 backend: Option<String>,
15267 max_files: Option<usize>,
15268 dry_run: bool,
15269 json: bool,
15270) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15271
15272static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
15273
15274pub fn register_code_handler(handler: CodeHandler) {
15276 let _ = CODE_HANDLER.set(handler);
15277}
15278
15279pub type DatasetsHandler = fn(
15281 root: PathBuf,
15282 frontier: PathBuf,
15283 backend: Option<String>,
15284 sample_rows: Option<usize>,
15285 dry_run: bool,
15286 json: bool,
15287) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15288
15289static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
15290
15291pub fn register_datasets_handler(handler: DatasetsHandler) {
15293 let _ = DATASETS_HANDLER.set(handler);
15294}
15295
15296pub type ReviewerHandler = fn(
15298 frontier: PathBuf,
15299 backend: Option<String>,
15300 max_proposals: Option<usize>,
15301 batch_size: usize,
15302 dry_run: bool,
15303 json: bool,
15304) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15305
15306static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
15307
15308pub fn register_reviewer_handler(handler: ReviewerHandler) {
15310 let _ = REVIEWER_HANDLER.set(handler);
15311}
15312
15313pub type TensionsHandler = fn(
15315 frontier: PathBuf,
15316 backend: Option<String>,
15317 max_findings: Option<usize>,
15318 dry_run: bool,
15319 json: bool,
15320) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15321
15322static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
15323
15324pub fn register_tensions_handler(handler: TensionsHandler) {
15326 let _ = TENSIONS_HANDLER.set(handler);
15327}
15328
15329pub type ExperimentsHandler = fn(
15331 frontier: PathBuf,
15332 backend: Option<String>,
15333 max_findings: Option<usize>,
15334 dry_run: bool,
15335 json: bool,
15336) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15337
15338static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
15339
15340pub fn register_experiments_handler(handler: ExperimentsHandler) {
15342 let _ = EXPERIMENTS_HANDLER.set(handler);
15343}
15344
15345fn find_vela_repo() -> Option<PathBuf> {
15361 let mut cur = std::env::current_dir().ok()?;
15362 loop {
15363 if cur.join(".vela").is_dir() {
15364 return Some(cur);
15365 }
15366 if !cur.pop() {
15367 return None;
15368 }
15369 }
15370}
15371
15372fn print_session_help() {
15373 println!();
15374 println!(
15375 " Vela {} · Version control for scientific state.",
15376 env!("CARGO_PKG_VERSION")
15377 );
15378 println!();
15379 println!(" USAGE");
15380 println!(" vela Open a session against the nearest .vela/ repo");
15381 println!(" vela <command> Run a specific subcommand");
15382 println!(" vela help advanced Full subcommand list (30+ commands)");
15383 println!();
15384 println!(" CORE FLOW (v0.74)");
15385 println!(" init Initialize a split frontier repo");
15386 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
15387 println!(" propose Create a finding.review proposal");
15388 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
15389 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
15390 println!(" attest Sign findings under your private key");
15391 println!(" log Recent canonical state events");
15392 println!(" lineage <vf_id> State-transition replay for one finding");
15393 println!(" serve Local Workbench (find, evidence, diff, lineage)");
15394 println!();
15395 println!(" DAILY ALSO-RANS");
15396 println!(" status One-screen frontier health");
15397 println!(" inbox Pending review proposals");
15398 println!(" review Review a proposal interactively");
15399 println!(" ask <question> Plain-text query against the frontier");
15400 println!();
15401 println!(" REASONING (Pearl 1 → 2 → 3)");
15402 println!(" causal audit Per-finding identifiability");
15403 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
15404 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
15405 println!();
15406 println!(" COMPOSITION");
15407 println!(" bridge <a> <b> Cross-frontier hypotheses");
15408 println!(" consensus <vf> Field consensus over similar claims");
15409 println!();
15410 println!(" PUBLISH");
15411 println!(" registry publish Push a signed manifest to the hub");
15412 println!(" federation peer-add Federate with another hub");
15413 println!();
15414 println!(" In session, type a single letter for a quick verb, or any");
15415 println!(" question in plain text. `q` or `exit` quits.");
15416 println!();
15417}
15418
15419fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
15420 use crate::causal_reasoning::{audit_frontier, summarize_audit};
15421
15422 let label = frontier_label(project);
15423 let vfr = project.frontier_id();
15424 let vfr_short = vfr.chars().take(16).collect::<String>();
15425
15426 let mut pending = 0usize;
15427 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
15428 for p in &project.proposals {
15429 if p.status == "pending_review" {
15430 pending += 1;
15431 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
15432 }
15433 }
15434
15435 let audit = audit_frontier(project);
15436 let audit_summary = summarize_audit(&audit);
15437
15438 let bridges_dir = repo_path.join(".vela/bridges");
15439 let mut bridge_total = 0usize;
15440 let mut bridge_confirmed = 0usize;
15441 let mut bridge_derived = 0usize;
15442 if bridges_dir.is_dir()
15443 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
15444 {
15445 for entry in entries.flatten() {
15446 let path = entry.path();
15447 if path.extension().and_then(|s| s.to_str()) != Some("json") {
15448 continue;
15449 }
15450 bridge_total += 1;
15451 if let Ok(data) = std::fs::read_to_string(&path)
15452 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
15453 {
15454 match b.status {
15455 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
15456 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
15457 _ => {}
15458 }
15459 }
15460 }
15461 }
15462
15463 let mut targets_with_success = std::collections::HashSet::new();
15464 let mut failed_replications = 0usize;
15465 for r in &project.replications {
15466 if r.outcome == "replicated" {
15467 targets_with_success.insert(r.target_finding.clone());
15468 } else if r.outcome == "failed" {
15469 failed_replications += 1;
15470 }
15471 }
15472
15473 println!();
15474 let version = crate::project::VELA_COMPILER_VERSION
15475 .strip_prefix("vela/")
15476 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
15477 println!(
15478 " {}",
15479 format!("VELA · {version} · {label}")
15480 .to_uppercase()
15481 .dimmed()
15482 );
15483 println!(" {}", style::tick_row(60));
15484 println!(
15485 " vfr_id {}… repo {}",
15486 vfr_short,
15487 repo_path.display()
15488 );
15489 println!(
15490 " findings {:>4} events {} proposals pending {}",
15491 project.findings.len(),
15492 project.events.len(),
15493 pending
15494 );
15495
15496 if pending > 0 {
15497 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
15498 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
15499 }
15500 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
15501 println!(
15502 " {} · {} underidentified · {} conditional",
15503 if audit_summary.underidentified > 0 {
15504 style::lost("audit")
15505 } else {
15506 style::warn("audit")
15507 },
15508 audit_summary.underidentified,
15509 audit_summary.conditional,
15510 );
15511 }
15512 if bridge_total > 0 {
15513 println!(
15514 " {} · {} total · {} confirmed · {} awaiting review",
15515 style::ok("bridges"),
15516 bridge_total,
15517 bridge_confirmed,
15518 bridge_derived
15519 );
15520 }
15521 if !project.replications.is_empty() {
15522 println!(
15523 " {} · {} records · {} findings replicated · {} failed",
15524 style::ok("replications"),
15525 project.replications.len(),
15526 targets_with_success.len(),
15527 failed_replications,
15528 );
15529 }
15530
15531 println!();
15532 println!(" type a verb or ask anything:");
15533 println!(" a audit problems i inbox (pending) b bridges");
15534 println!(" g causal graph l log (recent) c counterfactuals");
15535 println!(" s refresh status h help (more verbs) q quit");
15536 println!();
15537}
15538
15539fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
15541 match verb {
15542 "a" | "audit" => {
15543 let action = CausalAction::Audit {
15544 frontier: repo_path.to_path_buf(),
15545 problems_only: true,
15546 json: false,
15547 };
15548 cmd_causal(action);
15549 true
15550 }
15551 "i" | "inbox" => {
15552 let action = ProposalAction::List {
15553 frontier: repo_path.to_path_buf(),
15554 status: Some("pending_review".into()),
15555 json: false,
15556 };
15557 cmd_proposals(action);
15558 true
15559 }
15560 "b" | "bridges" => {
15561 let action = BridgesAction::List {
15562 frontier: repo_path.to_path_buf(),
15563 status: None,
15564 json: false,
15565 };
15566 cmd_bridges(action);
15567 true
15568 }
15569 "g" | "graph" => {
15570 let action = CausalAction::Graph {
15571 frontier: repo_path.to_path_buf(),
15572 node: None,
15573 json: false,
15574 };
15575 cmd_causal(action);
15576 true
15577 }
15578 "l" | "log" => {
15579 cmd_log(repo_path, 10, None, false);
15580 true
15581 }
15582 "c" | "counterfactual" | "counterfactuals" => {
15583 let project = match repo::load_from_path(repo_path) {
15586 Ok(p) => p,
15587 Err(e) => {
15588 eprintln!("{} {e}", style::err_prefix());
15589 return true;
15590 }
15591 };
15592 println!();
15593 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
15594 println!(" {}", style::tick_row(60));
15595 let mut pairs = 0usize;
15599 for child in &project.findings {
15600 for link in &child.links {
15601 if !matches!(link.link_type.as_str(), "depends" | "supports") {
15602 continue;
15603 }
15604 if link.mechanism.is_none() {
15605 continue;
15606 }
15607 let parent = link
15608 .target
15609 .split_once(':')
15610 .map_or(link.target.as_str(), |(_, r)| r);
15611 pairs += 1;
15612 if pairs <= 10 {
15613 println!(" · do({parent}) → {}", child.id);
15614 }
15615 }
15616 }
15617 if pairs == 0 {
15618 println!(" no mechanism-annotated edges found.");
15619 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
15620 } else {
15621 println!();
15622 println!(" {pairs} live pair(s). Run with:");
15623 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
15624 }
15625 println!();
15626 true
15627 }
15628 "s" | "status" | "refresh" => {
15629 match repo::load_from_path(repo_path) {
15631 Ok(p) => print_session_dashboard(&p, repo_path),
15632 Err(e) => eprintln!("{} {e}", style::err_prefix()),
15633 }
15634 true
15635 }
15636 "h" | "help" | "?" => {
15637 print_session_help();
15638 true
15639 }
15640 _ => false,
15641 }
15642}
15643
15644fn run_session() {
15645 let repo_path = match find_vela_repo() {
15646 Some(p) => p,
15647 None => {
15648 println!();
15649 println!(
15650 " {}",
15651 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
15652 );
15653 println!(" {}", style::tick_row(60));
15654 println!(" Run `vela init` here to create a frontier, or cd into one.");
15655 println!(" Or run `vela help` for the command list.");
15656 println!();
15657 return;
15658 }
15659 };
15660
15661 let project = match repo::load_from_path(&repo_path) {
15662 Ok(p) => p,
15663 Err(e) => {
15664 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
15665 std::process::exit(1);
15666 }
15667 };
15668
15669 print_session_dashboard(&project, &repo_path);
15670
15671 use std::io::{BufRead, Write};
15672 let stdin = std::io::stdin();
15673 let mut stdout = std::io::stdout();
15674 loop {
15675 print!(" > ");
15676 stdout.flush().ok();
15677 let mut line = String::new();
15678 if stdin.lock().read_line(&mut line).is_err() {
15679 break;
15680 }
15681 let input = line.trim();
15682 if input.is_empty() {
15683 continue;
15684 }
15685 if matches!(input, "q" | "quit" | "exit") {
15686 break;
15687 }
15688 if run_session_verb(input, &repo_path) {
15689 continue;
15690 }
15691 let project = match repo::load_from_path(&repo_path) {
15693 Ok(p) => p,
15694 Err(e) => {
15695 eprintln!("{} {e}", style::err_prefix());
15696 continue;
15697 }
15698 };
15699 answer(&project, input, false);
15700 }
15701}
15702
15703pub fn run_from_args() {
15704 style::init();
15705 let args = std::env::args().collect::<Vec<_>>();
15706 match args.get(1).map(String::as_str) {
15707 None => {
15711 run_session();
15712 return;
15713 }
15714 Some("-h" | "--help" | "help") => {
15715 if args.get(2).map(String::as_str) == Some("advanced") {
15718 print_strict_help();
15719 } else {
15720 print_session_help();
15721 }
15722 return;
15723 }
15724 Some("-V" | "--version" | "version") => {
15725 println!("vela {}", env!("CARGO_PKG_VERSION"));
15726 return;
15727 }
15728 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
15729 let json = args.iter().any(|arg| arg == "--json");
15730 let frontier = args
15731 .iter()
15732 .skip(3)
15733 .find(|arg| !arg.starts_with('-'))
15734 .map(PathBuf::from)
15735 .unwrap_or_else(|| {
15736 eprintln!(
15737 "{} proof verify requires a frontier repo",
15738 style::err_prefix()
15739 );
15740 std::process::exit(2);
15741 });
15742 cmd_proof_verify(&frontier, json);
15743 return;
15744 }
15745 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
15746 let frontier = args
15747 .iter()
15748 .skip(3)
15749 .find(|arg| !arg.starts_with('-'))
15750 .map(PathBuf::from)
15751 .unwrap_or_else(|| {
15752 eprintln!(
15753 "{} proof explain requires a frontier repo",
15754 style::err_prefix()
15755 );
15756 std::process::exit(2);
15757 });
15758 cmd_proof_explain(&frontier);
15759 return;
15760 }
15761 Some(cmd) if !is_science_subcommand(cmd) => {
15762 eprintln!(
15763 "{} unknown or non-release command: {cmd}",
15764 style::err_prefix()
15765 );
15766 eprintln!("run `vela --help` for the strict v0 command surface.");
15767 std::process::exit(2);
15768 }
15769 Some(_) => {}
15770 }
15771 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
15772 runtime.block_on(run_command());
15773}
15774
15775fn fail(message: &str) -> ! {
15776 eprintln!("{} {message}", style::err_prefix());
15777 std::process::exit(1);
15778}
15779
15780fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
15785 if !valid.contains(&value) {
15786 fail(&format!(
15787 "invalid {flag} '{value}'. Valid: {}",
15788 valid.join(", ")
15789 ));
15790 }
15791}
15792
15793fn fail_return<T>(message: &str) -> T {
15794 fail(message)
15795}