1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Quickstart {
601 #[arg(default_value = "demo")]
603 path: PathBuf,
604 #[arg(long, default_value = "Quickstart frontier")]
606 name: String,
607 #[arg(long, default_value = "reviewer:you")]
611 reviewer: String,
612 #[arg(long)]
615 assertion: Option<String>,
616 #[arg(long)]
619 keys_out: Option<PathBuf>,
620 #[arg(long)]
622 json: bool,
623 },
624 Lock {
631 path: PathBuf,
633 #[arg(long)]
636 check: bool,
637 #[arg(long)]
639 json: bool,
640 },
641 Doc {
648 path: PathBuf,
650 #[arg(long)]
652 out: Option<PathBuf>,
653 #[arg(long)]
656 json: bool,
657 },
658 Import {
660 frontier: PathBuf,
661 #[arg(long)]
662 into: Option<PathBuf>,
663 },
664 Diff {
674 target: String,
677 frontier_b: Option<PathBuf>,
680 #[arg(long)]
684 frontier: Option<PathBuf>,
685 #[arg(long, default_value = "reviewer:preview")]
687 reviewer: String,
688 #[arg(long)]
689 json: bool,
690 #[arg(long)]
691 quiet: bool,
692 },
693 Proposals {
695 #[command(subcommand)]
696 action: ProposalAction,
697 },
698 ArtifactToState {
700 frontier: PathBuf,
702 packet: PathBuf,
704 #[arg(long)]
706 actor: String,
707 #[arg(long)]
709 apply_artifacts: bool,
710 #[arg(long)]
711 json: bool,
712 },
713 BridgeKit {
715 #[command(subcommand)]
716 action: BridgeKitAction,
717 },
718 SourceAdapter {
720 #[command(subcommand)]
721 action: SourceAdapterAction,
722 },
723 RuntimeAdapter {
725 #[command(subcommand)]
726 action: RuntimeAdapterAction,
727 },
728 Finding {
730 #[command(subcommand)]
731 command: FindingCommands,
732 },
733 Link {
737 #[command(subcommand)]
738 action: LinkAction,
739 },
740 Workbench {
745 #[arg(default_value = ".")]
747 path: PathBuf,
748 #[arg(long, default_value_t = 3850)]
750 port: u16,
751 #[arg(long)]
753 no_open: bool,
754 },
755 Bridges {
761 #[command(subcommand)]
762 action: BridgesAction,
763 },
764 Entity {
769 #[command(subcommand)]
770 action: EntityAction,
771 },
772 Review {
774 frontier: PathBuf,
776 finding_id: String,
778 #[arg(long)]
780 status: Option<String>,
781 #[arg(long)]
783 reason: Option<String>,
784 #[arg(long)]
786 reviewer: String,
787 #[arg(long)]
789 apply: bool,
790 #[arg(long)]
792 json: bool,
793 },
794 Note {
796 frontier: PathBuf,
797 finding_id: String,
798 #[arg(long)]
799 text: String,
800 #[arg(long)]
801 author: String,
802 #[arg(long)]
804 apply: bool,
805 #[arg(long)]
806 json: bool,
807 },
808 Caveat {
810 frontier: PathBuf,
811 finding_id: String,
812 #[arg(long)]
813 text: String,
814 #[arg(long)]
815 author: String,
816 #[arg(long)]
817 apply: bool,
818 #[arg(long)]
819 json: bool,
820 },
821 Revise {
823 frontier: PathBuf,
824 finding_id: String,
825 #[arg(long)]
827 confidence: f64,
828 #[arg(long)]
830 reason: String,
831 #[arg(long)]
833 reviewer: String,
834 #[arg(long)]
835 apply: bool,
836 #[arg(long)]
837 json: bool,
838 },
839 Reject {
841 frontier: PathBuf,
842 finding_id: String,
843 #[arg(long)]
844 reason: String,
845 #[arg(long)]
846 reviewer: String,
847 #[arg(long)]
848 apply: bool,
849 #[arg(long)]
850 json: bool,
851 },
852 History {
854 frontier: PathBuf,
855 finding_id: String,
856 #[arg(long)]
857 json: bool,
858 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
862 as_of: Option<String>,
863 },
864 ImportEvents {
866 source: PathBuf,
867 #[arg(long)]
868 into: PathBuf,
869 #[arg(long)]
870 json: bool,
871 },
872 Retract {
874 source: PathBuf,
875 finding_id: String,
876 #[arg(long)]
877 reason: String,
878 #[arg(long)]
879 reviewer: String,
880 #[arg(long)]
881 apply: bool,
882 #[arg(long)]
883 json: bool,
884 },
885 ProofAdd {
897 frontier: PathBuf,
898 #[arg(long = "target-finding")]
900 target_finding: String,
901 #[arg(long, default_value = "lean4")]
904 tool: String,
905 #[arg(long = "tool-version", default_value = "4.29.1")]
907 tool_version: String,
908 #[arg(long = "script-path")]
910 script_path: PathBuf,
911 #[arg(long, default_value = "Proof script")]
913 name: String,
914 #[arg(long)]
916 reviewer: String,
917 #[arg(long)]
919 reason: String,
920 #[arg(long)]
921 json: bool,
922 },
923 EntityAdd {
928 frontier: PathBuf,
929 finding_id: String,
930 #[arg(long)]
931 entity: String,
932 #[arg(long)]
936 entity_type: String,
937 #[arg(long)]
938 reviewer: String,
939 #[arg(long)]
940 reason: String,
941 #[arg(long)]
942 apply: bool,
943 #[arg(long)]
944 json: bool,
945 },
946 EntityResolve {
950 frontier: PathBuf,
951 finding_id: String,
952 #[arg(long)]
953 entity: String,
954 #[arg(long)]
955 source: String,
956 #[arg(long)]
957 id: String,
958 #[arg(long)]
959 confidence: f64,
960 #[arg(long)]
961 matched_name: Option<String>,
962 #[arg(long, default_value = "manual")]
963 resolution_method: String,
964 #[arg(long)]
965 reviewer: String,
966 #[arg(long)]
967 reason: String,
968 #[arg(long)]
969 apply: bool,
970 #[arg(long)]
971 json: bool,
972 },
973 SourceFetch {
981 identifier: String,
984 #[arg(long)]
988 cache: Option<PathBuf>,
989 #[arg(long)]
991 out: Option<PathBuf>,
992 #[arg(long)]
994 refresh: bool,
995 #[arg(long)]
996 json: bool,
997 },
998 SpanRepair {
1001 frontier: PathBuf,
1002 finding_id: String,
1003 #[arg(long)]
1004 section: String,
1005 #[arg(long)]
1006 text: String,
1007 #[arg(long)]
1008 reviewer: String,
1009 #[arg(long)]
1010 reason: String,
1011 #[arg(long)]
1012 apply: bool,
1013 #[arg(long)]
1014 json: bool,
1015 },
1016 LocatorRepair {
1021 frontier: PathBuf,
1022 atom_id: String,
1023 #[arg(long)]
1026 locator: Option<String>,
1027 #[arg(long)]
1030 reviewer: String,
1031 #[arg(long)]
1033 reason: String,
1034 #[arg(long)]
1036 apply: bool,
1037 #[arg(long)]
1038 json: bool,
1039 },
1040 Propagate {
1042 frontier: PathBuf,
1043 #[arg(long)]
1044 retract: Option<String>,
1045 #[arg(long)]
1046 reduce_confidence: Option<String>,
1047 #[arg(long)]
1048 to: Option<f64>,
1049 #[arg(short, long)]
1050 output: Option<PathBuf>,
1051 },
1052 Replicate {
1061 frontier: PathBuf,
1063 target: String,
1065 #[arg(long)]
1067 outcome: String,
1068 #[arg(long)]
1070 by: String,
1071 #[arg(long)]
1075 conditions: String,
1076 #[arg(long)]
1078 source_title: String,
1079 #[arg(long)]
1081 doi: Option<String>,
1082 #[arg(long)]
1084 pmid: Option<String>,
1085 #[arg(long)]
1087 sample_size: Option<String>,
1088 #[arg(long, default_value = "")]
1091 note: String,
1092 #[arg(long)]
1094 previous_attempt: Option<String>,
1095 #[arg(long, default_value_t = false)]
1102 no_cascade: bool,
1103 #[arg(long)]
1105 json: bool,
1106 },
1107 Replications {
1110 frontier: PathBuf,
1112 #[arg(long)]
1114 target: Option<String>,
1115 #[arg(long)]
1117 json: bool,
1118 },
1119 DatasetAdd {
1126 frontier: PathBuf,
1128 #[arg(long)]
1130 name: String,
1131 #[arg(long)]
1133 version: Option<String>,
1134 #[arg(long)]
1138 content_hash: String,
1139 #[arg(long)]
1141 url: Option<String>,
1142 #[arg(long)]
1144 license: Option<String>,
1145 #[arg(long)]
1147 source_title: String,
1148 #[arg(long)]
1150 doi: Option<String>,
1151 #[arg(long)]
1153 row_count: Option<u64>,
1154 #[arg(long)]
1156 json: bool,
1157 },
1158 Datasets {
1160 frontier: PathBuf,
1161 #[arg(long)]
1162 json: bool,
1163 },
1164 CodeAdd {
1168 frontier: PathBuf,
1170 #[arg(long)]
1172 language: String,
1173 #[arg(long)]
1175 repo_url: Option<String>,
1176 #[arg(long)]
1179 commit: Option<String>,
1180 #[arg(long)]
1182 path: String,
1183 #[arg(long)]
1185 content_hash: String,
1186 #[arg(long)]
1188 line_start: Option<u32>,
1189 #[arg(long)]
1191 line_end: Option<u32>,
1192 #[arg(long)]
1194 entry_point: Option<String>,
1195 #[arg(long)]
1197 json: bool,
1198 },
1199 CodeArtifacts {
1201 frontier: PathBuf,
1202 #[arg(long)]
1203 json: bool,
1204 },
1205 ArtifactAdd {
1210 frontier: PathBuf,
1212 #[arg(long)]
1215 kind: String,
1216 #[arg(long)]
1218 name: String,
1219 #[arg(long)]
1222 file: Option<PathBuf>,
1223 #[arg(long)]
1225 url: Option<String>,
1226 #[arg(long)]
1228 content_hash: Option<String>,
1229 #[arg(long)]
1231 media_type: Option<String>,
1232 #[arg(long)]
1234 license: Option<String>,
1235 #[arg(long)]
1237 source_title: Option<String>,
1238 #[arg(long)]
1240 source_url: Option<String>,
1241 #[arg(long)]
1243 doi: Option<String>,
1244 #[arg(long)]
1246 target: Vec<String>,
1247 #[arg(long)]
1249 metadata: Vec<String>,
1250 #[arg(long, default_value = "public")]
1252 access_tier: String,
1253 #[arg(long, default_value = "reviewer:manual")]
1255 deposited_by: String,
1256 #[arg(long, default_value = "artifact deposit")]
1258 reason: String,
1259 #[arg(long)]
1261 json: bool,
1262 },
1263 Artifacts {
1265 frontier: PathBuf,
1266 #[arg(long)]
1268 target: Option<String>,
1269 #[arg(long)]
1270 json: bool,
1271 },
1272 ArtifactAudit {
1274 frontier: PathBuf,
1275 #[arg(long)]
1277 json: bool,
1278 },
1279 DecisionBrief {
1281 frontier: PathBuf,
1282 #[arg(long)]
1284 json: bool,
1285 },
1286 TrialSummary {
1288 frontier: PathBuf,
1289 #[arg(long)]
1291 json: bool,
1292 },
1293 SourceVerification {
1295 frontier: PathBuf,
1296 #[arg(long)]
1298 json: bool,
1299 },
1300 SourceIngestPlan {
1302 frontier: PathBuf,
1303 #[arg(long)]
1305 json: bool,
1306 },
1307 ClinicalTrialImport {
1310 frontier: PathBuf,
1312 nct_id: String,
1314 #[arg(long)]
1317 input_json: Option<PathBuf>,
1318 #[arg(long)]
1320 target: Vec<String>,
1321 #[arg(long, default_value = "reviewer:manual")]
1323 deposited_by: String,
1324 #[arg(long, default_value = "clinical trial record import")]
1326 reason: String,
1327 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1329 license: String,
1330 #[arg(long)]
1332 json: bool,
1333 },
1334 NegativeResultAdd {
1342 frontier: PathBuf,
1344 #[arg(long)]
1346 kind: String,
1347 #[arg(long)]
1349 deposited_by: String,
1350 #[arg(long)]
1352 reason: String,
1353 #[arg(long)]
1356 conditions_text: String,
1357 #[arg(long, default_value = "")]
1359 notes: String,
1360 #[arg(long)]
1363 target: Vec<String>,
1364 #[arg(long)]
1368 endpoint: Option<String>,
1369 #[arg(long)]
1371 intervention: Option<String>,
1372 #[arg(long)]
1374 comparator: Option<String>,
1375 #[arg(long)]
1377 population: Option<String>,
1378 #[arg(long)]
1380 n_enrolled: Option<u32>,
1381 #[arg(long)]
1383 power: Option<f64>,
1384 #[arg(long)]
1386 ci_lower: Option<f64>,
1387 #[arg(long)]
1389 ci_upper: Option<f64>,
1390 #[arg(long)]
1392 effect_size_threshold: Option<f64>,
1393 #[arg(long)]
1395 registry_id: Option<String>,
1396 #[arg(long)]
1399 reagent: Option<String>,
1400 #[arg(long)]
1402 observation: Option<String>,
1403 #[arg(long)]
1405 attempts: Option<u32>,
1406 #[arg(long)]
1409 source_title: String,
1410 #[arg(long)]
1412 doi: Option<String>,
1413 #[arg(long)]
1415 url: Option<String>,
1416 #[arg(long)]
1418 year: Option<i32>,
1419 #[arg(long)]
1421 json: bool,
1422 },
1423 NegativeResults {
1425 frontier: PathBuf,
1426 #[arg(long)]
1428 target: Option<String>,
1429 #[arg(long)]
1430 json: bool,
1431 },
1432 TrajectoryCreate {
1437 frontier: PathBuf,
1439 #[arg(long)]
1441 deposited_by: String,
1442 #[arg(long)]
1444 reason: String,
1445 #[arg(long)]
1450 target: Vec<String>,
1451 #[arg(long, default_value = "")]
1453 notes: String,
1454 #[arg(long)]
1455 json: bool,
1456 },
1457 TrajectoryStep {
1460 frontier: PathBuf,
1462 trajectory_id: String,
1464 #[arg(long)]
1466 kind: String,
1467 #[arg(long)]
1471 description: String,
1472 #[arg(long)]
1474 actor: String,
1475 #[arg(long)]
1477 reason: String,
1478 #[arg(long)]
1481 reference: Vec<String>,
1482 #[arg(long)]
1483 json: bool,
1484 },
1485 Trajectories {
1487 frontier: PathBuf,
1488 #[arg(long)]
1490 target: Option<String>,
1491 #[arg(long)]
1492 json: bool,
1493 },
1494 TierSet {
1500 frontier: PathBuf,
1502 #[arg(long)]
1504 object_type: String,
1505 #[arg(long)]
1507 object_id: String,
1508 #[arg(long)]
1510 tier: String,
1511 #[arg(long)]
1514 actor: String,
1515 #[arg(long)]
1518 reason: String,
1519 #[arg(long)]
1520 json: bool,
1521 },
1522 Predict {
1529 frontier: PathBuf,
1531 #[arg(long)]
1533 by: String,
1534 #[arg(long)]
1537 claim: String,
1538 #[arg(long)]
1540 criterion: String,
1541 #[arg(long)]
1543 resolves_by: Option<String>,
1544 #[arg(long)]
1546 confidence: f64,
1547 #[arg(long, default_value = "")]
1549 target: String,
1550 #[arg(long, default_value = "affirmed")]
1552 outcome: String,
1553 #[arg(long, default_value = "")]
1555 conditions: String,
1556 #[arg(long)]
1558 json: bool,
1559 },
1560 Resolve {
1565 frontier: PathBuf,
1567 prediction: String,
1569 #[arg(long)]
1571 outcome: String,
1572 #[arg(long)]
1574 matched: bool,
1575 #[arg(long)]
1578 by: String,
1579 #[arg(long, default_value = "1.0")]
1581 confidence: f64,
1582 #[arg(long, default_value = "")]
1584 source_title: String,
1585 #[arg(long)]
1587 doi: Option<String>,
1588 #[arg(long)]
1590 json: bool,
1591 },
1592 Predictions {
1594 frontier: PathBuf,
1595 #[arg(long)]
1597 by: Option<String>,
1598 #[arg(long)]
1600 open: bool,
1601 #[arg(long)]
1603 json: bool,
1604 },
1605 Calibration {
1608 frontier: PathBuf,
1609 #[arg(long)]
1611 actor: Option<String>,
1612 #[arg(long)]
1614 json: bool,
1615 },
1616 PredictionsExpire {
1624 frontier: PathBuf,
1625 #[arg(long)]
1628 now: Option<String>,
1629 #[arg(long)]
1632 dry_run: bool,
1633 #[arg(long)]
1634 json: bool,
1635 },
1636 Consensus {
1645 frontier: PathBuf,
1647 target: String,
1649 #[arg(long, default_value = "composite")]
1652 weighting: String,
1653 #[arg(long)]
1658 causal_claim: Option<String>,
1659 #[arg(long)]
1664 causal_grade_min: Option<String>,
1665 #[arg(long)]
1667 json: bool,
1668 },
1669
1670 Ingest {
1686 path: String,
1689 #[arg(long)]
1692 frontier: PathBuf,
1693 #[arg(short, long)]
1697 backend: Option<String>,
1698 #[arg(long)]
1702 actor: Option<String>,
1703 #[arg(long)]
1705 dry_run: bool,
1706 #[arg(long)]
1707 json: bool,
1708 },
1709
1710 Propose {
1716 frontier: PathBuf,
1717 finding_id: String,
1718 #[arg(long)]
1720 status: String,
1721 #[arg(long)]
1722 reason: String,
1723 #[arg(long)]
1724 reviewer: String,
1725 #[arg(long)]
1728 apply: bool,
1729 #[arg(long)]
1730 json: bool,
1731 },
1732
1733 Accept {
1737 frontier: PathBuf,
1738 proposal_id: String,
1739 #[arg(long)]
1740 reviewer: String,
1741 #[arg(long)]
1742 reason: String,
1743 #[arg(long)]
1744 json: bool,
1745 },
1746
1747 Attest {
1759 frontier: PathBuf,
1761 #[arg(long)]
1765 event: Option<String>,
1766 #[arg(long)]
1769 attester: Option<String>,
1770 #[arg(long)]
1773 scope_note: Option<String>,
1774 #[arg(long)]
1777 proof_id: Option<String>,
1778 #[arg(long)]
1783 signature: Option<String>,
1784 #[arg(long)]
1787 key: Option<PathBuf>,
1788 #[arg(long)]
1789 json: bool,
1790 },
1791
1792 Lineage {
1795 frontier: PathBuf,
1796 finding_id: String,
1797 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1798 as_of: Option<String>,
1799 #[arg(long)]
1800 json: bool,
1801 },
1802
1803 Carina {
1806 #[command(subcommand)]
1807 action: CarinaAction,
1808 },
1809
1810 Atlas {
1815 #[command(subcommand)]
1816 action: AtlasAction,
1817 },
1818
1819 Constellation {
1825 #[command(subcommand)]
1826 action: ConstellationAction,
1827 },
1828}
1829
1830#[derive(Subcommand)]
1835enum AtlasAction {
1836 Init {
1841 name: String,
1844 #[arg(long, value_delimiter = ',', num_args = 1..)]
1846 frontiers: Vec<PathBuf>,
1847 #[arg(long, default_value = "general")]
1850 domain: String,
1851 #[arg(long)]
1853 scope_note: Option<String>,
1854 #[arg(long, default_value = "atlases")]
1856 atlases_root: PathBuf,
1857 #[arg(long)]
1858 json: bool,
1859 },
1860 Materialize {
1864 name: String,
1866 #[arg(long, default_value = "atlases")]
1867 atlases_root: PathBuf,
1868 #[arg(long)]
1869 json: bool,
1870 },
1871 Serve {
1876 name: String,
1877 #[arg(long, default_value = "atlases")]
1878 atlases_root: PathBuf,
1879 #[arg(long, default_value_t = 3848)]
1880 port: u16,
1881 #[arg(long)]
1882 no_open: bool,
1883 },
1884 Update {
1891 name: String,
1892 #[arg(long, value_delimiter = ',')]
1895 add_frontier: Vec<PathBuf>,
1896 #[arg(long, value_delimiter = ',')]
1899 remove_vfr_id: Vec<String>,
1900 #[arg(long, default_value = "atlases")]
1901 atlases_root: PathBuf,
1902 #[arg(long)]
1903 json: bool,
1904 },
1905}
1906
1907#[derive(Subcommand)]
1911enum ConstellationAction {
1912 Init {
1916 name: String,
1917 #[arg(long, value_delimiter = ',', num_args = 1..)]
1919 atlases: Vec<PathBuf>,
1920 #[arg(long)]
1921 scope_note: Option<String>,
1922 #[arg(long, default_value = "constellations")]
1923 constellations_root: PathBuf,
1924 #[arg(long)]
1925 json: bool,
1926 },
1927 Materialize {
1932 name: String,
1933 #[arg(long, default_value = "constellations")]
1934 constellations_root: PathBuf,
1935 #[arg(long)]
1936 json: bool,
1937 },
1938 Serve {
1942 name: String,
1943 #[arg(long, default_value = "constellations")]
1944 constellations_root: PathBuf,
1945 #[arg(long, default_value_t = 3849)]
1946 port: u16,
1947 #[arg(long)]
1948 no_open: bool,
1949 },
1950}
1951
1952#[derive(Subcommand)]
1956enum CarinaAction {
1957 Validate {
1962 path: PathBuf,
1966 #[arg(long)]
1969 primitive: Option<String>,
1970 #[arg(long)]
1971 json: bool,
1972 },
1973 List {
1975 #[arg(long)]
1976 json: bool,
1977 },
1978 Schema { primitive: String },
1980}
1981
1982#[derive(Subcommand)]
1983enum PacketAction {
1984 Inspect {
1986 path: PathBuf,
1987 #[arg(long)]
1988 json: bool,
1989 },
1990 Validate {
1992 path: PathBuf,
1993 #[arg(long)]
1994 json: bool,
1995 },
1996}
1997
1998#[derive(Subcommand)]
1999enum SignAction {
2000 GenerateKeypair {
2002 #[arg(long, default_value = ".vela/keys")]
2003 out: PathBuf,
2004 #[arg(long)]
2005 json: bool,
2006 },
2007 Apply {
2009 frontier: PathBuf,
2010 #[arg(long)]
2011 private_key: PathBuf,
2012 #[arg(long)]
2013 json: bool,
2014 },
2015 Verify {
2017 frontier: PathBuf,
2018 #[arg(long)]
2019 public_key: Option<PathBuf>,
2020 #[arg(long)]
2021 json: bool,
2022 },
2023 ThresholdSet {
2028 frontier: PathBuf,
2029 finding_id: String,
2031 #[arg(long)]
2033 to: u32,
2034 #[arg(long)]
2035 json: bool,
2036 },
2037}
2038
2039#[derive(Subcommand)]
2040enum ActorAction {
2041 Add {
2043 frontier: PathBuf,
2044 id: String,
2046 #[arg(long)]
2048 pubkey: String,
2049 #[arg(long)]
2053 tier: Option<String>,
2054 #[arg(long)]
2058 orcid: Option<String>,
2059 #[arg(long)]
2064 clearance: Option<String>,
2065 #[arg(long)]
2066 json: bool,
2067 },
2068 List {
2070 frontier: PathBuf,
2071 #[arg(long)]
2072 json: bool,
2073 },
2074}
2075
2076#[derive(Subcommand)]
2077enum CausalAction {
2078 Audit {
2082 frontier: PathBuf,
2083 #[arg(long)]
2086 problems_only: bool,
2087 #[arg(long)]
2088 json: bool,
2089 },
2090 Effect {
2103 frontier: PathBuf,
2104 source: String,
2106 #[arg(long)]
2108 on: String,
2109 #[arg(long)]
2110 json: bool,
2111 },
2112 Graph {
2115 frontier: PathBuf,
2116 #[arg(long)]
2118 node: Option<String>,
2119 #[arg(long)]
2120 json: bool,
2121 },
2122 Counterfactual {
2129 frontier: PathBuf,
2130 intervene_on: String,
2132 #[arg(long)]
2134 set_to: f64,
2135 #[arg(long)]
2137 target: String,
2138 #[arg(long)]
2139 json: bool,
2140 },
2141}
2142
2143#[derive(Subcommand)]
2144enum BridgesAction {
2145 Derive {
2149 frontier_a: PathBuf,
2152 #[arg(long, default_value = "a")]
2154 label_a: String,
2155 frontier_b: PathBuf,
2157 #[arg(long, default_value = "b")]
2159 label_b: String,
2160 #[arg(long)]
2161 json: bool,
2162 },
2163 List {
2165 frontier: PathBuf,
2167 #[arg(long)]
2169 status: Option<String>,
2170 #[arg(long)]
2171 json: bool,
2172 },
2173 Show {
2175 frontier: PathBuf,
2176 bridge_id: String,
2177 #[arg(long)]
2178 json: bool,
2179 },
2180 Confirm {
2185 frontier: PathBuf,
2186 bridge_id: String,
2187 #[arg(long)]
2190 reviewer: Option<String>,
2191 #[arg(long)]
2193 note: Option<String>,
2194 #[arg(long)]
2195 json: bool,
2196 },
2197 Refute {
2200 frontier: PathBuf,
2201 bridge_id: String,
2202 #[arg(long)]
2203 reviewer: Option<String>,
2204 #[arg(long)]
2205 note: Option<String>,
2206 #[arg(long)]
2207 json: bool,
2208 },
2209}
2210
2211#[derive(Subcommand)]
2212enum FederationAction {
2213 PeerAdd {
2217 frontier: PathBuf,
2218 id: String,
2220 #[arg(long)]
2222 url: String,
2223 #[arg(long)]
2225 pubkey: String,
2226 #[arg(long, default_value = "")]
2228 note: String,
2229 #[arg(long)]
2230 json: bool,
2231 },
2232 PeerList {
2234 frontier: PathBuf,
2235 #[arg(long)]
2236 json: bool,
2237 },
2238 PeerRemove {
2242 frontier: PathBuf,
2243 id: String,
2244 #[arg(long)]
2245 json: bool,
2246 },
2247 Sync {
2264 frontier: PathBuf,
2265 peer_id: String,
2267 #[arg(long)]
2269 url: Option<String>,
2270 #[arg(long)]
2274 via_hub: bool,
2275 #[arg(long)]
2278 vfr_id: Option<String>,
2279 #[arg(long)]
2286 allow_cross_vfr: bool,
2287 #[arg(long)]
2289 dry_run: bool,
2290 #[arg(long)]
2291 json: bool,
2292 },
2293 PushResolution {
2306 frontier: PathBuf,
2307 conflict_event_id: String,
2311 #[arg(long = "to")]
2313 to: String,
2314 #[arg(long)]
2318 key: Option<PathBuf>,
2319 #[arg(long)]
2322 vfr_id: Option<String>,
2323 #[arg(long)]
2324 json: bool,
2325 },
2326}
2327
2328#[derive(Subcommand)]
2329enum FrontierAction {
2330 New {
2337 path: PathBuf,
2339 #[arg(long)]
2341 name: String,
2342 #[arg(long, default_value = "")]
2344 description: String,
2345 #[arg(long)]
2347 force: bool,
2348 #[arg(long)]
2349 json: bool,
2350 },
2351 Materialize {
2353 frontier: PathBuf,
2355 #[arg(long)]
2356 json: bool,
2357 },
2358 AddDep {
2362 frontier: PathBuf,
2364 vfr_id: String,
2366 #[arg(long)]
2369 locator: String,
2370 #[arg(long)]
2373 snapshot: String,
2374 #[arg(long)]
2376 name: Option<String>,
2377 #[arg(long)]
2378 json: bool,
2379 },
2380 ListDeps {
2382 frontier: PathBuf,
2383 #[arg(long)]
2384 json: bool,
2385 },
2386 RemoveDep {
2389 frontier: PathBuf,
2390 vfr_id: String,
2391 #[arg(long)]
2392 json: bool,
2393 },
2394 RefreshDeps {
2401 frontier: PathBuf,
2402 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2404 from: String,
2405 #[arg(long)]
2407 dry_run: bool,
2408 #[arg(long)]
2409 json: bool,
2410 },
2411 Diff {
2423 frontier: PathBuf,
2425 #[arg(long)]
2428 since: Option<String>,
2429 #[arg(long)]
2432 week: Option<String>,
2433 #[arg(long)]
2435 json: bool,
2436 },
2437}
2438
2439#[derive(Subcommand)]
2440enum RepoAction {
2441 Status {
2443 frontier: PathBuf,
2445 #[arg(long)]
2447 json: bool,
2448 },
2449 Doctor {
2451 frontier: PathBuf,
2453 #[arg(long)]
2455 json: bool,
2456 },
2457}
2458
2459#[derive(Subcommand)]
2460enum QueueAction {
2461 List {
2463 #[arg(long)]
2464 queue_file: Option<PathBuf>,
2465 #[arg(long)]
2466 json: bool,
2467 },
2468 Sign {
2471 #[arg(long)]
2473 actor: String,
2474 #[arg(long)]
2476 key: PathBuf,
2477 #[arg(long)]
2479 queue_file: Option<PathBuf>,
2480 #[arg(long, alias = "all")]
2486 yes_to_all: bool,
2487 #[arg(long)]
2488 json: bool,
2489 },
2490 Clear {
2492 #[arg(long)]
2493 queue_file: Option<PathBuf>,
2494 #[arg(long)]
2495 json: bool,
2496 },
2497}
2498
2499#[derive(Subcommand)]
2500enum RegistryAction {
2501 List {
2503 #[arg(long)]
2505 from: Option<String>,
2506 #[arg(long)]
2507 json: bool,
2508 },
2509 Publish {
2511 frontier: PathBuf,
2513 #[arg(long)]
2515 owner: String,
2516 #[arg(long)]
2518 key: PathBuf,
2519 #[arg(long)]
2526 locator: Option<String>,
2527 #[arg(long)]
2529 to: Option<String>,
2530 #[arg(long)]
2531 json: bool,
2532 },
2533 DependsOn {
2540 vfr_id: String,
2542 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2544 from: String,
2545 #[arg(long)]
2546 json: bool,
2547 },
2548 Mirror {
2556 vfr_id: String,
2558 #[arg(long)]
2560 from: String,
2561 #[arg(long)]
2563 to: String,
2564 #[arg(long)]
2565 json: bool,
2566 },
2567 Pull {
2569 vfr_id: String,
2571 #[arg(long)]
2573 from: Option<String>,
2574 #[arg(long)]
2578 out: PathBuf,
2579 #[arg(long)]
2582 transitive: bool,
2583 #[arg(long, default_value = "4")]
2586 depth: usize,
2587 #[arg(long)]
2588 json: bool,
2589 },
2590}
2591
2592#[derive(Subcommand)]
2593enum GapsAction {
2594 Rank {
2596 frontier: PathBuf,
2597 #[arg(long, default_value = "10")]
2598 top: usize,
2599 #[arg(long)]
2600 domain: Option<String>,
2601 #[arg(long)]
2602 json: bool,
2603 },
2604}
2605
2606#[derive(Subcommand)]
2607enum LinkAction {
2608 Add {
2613 frontier: PathBuf,
2615 #[arg(long)]
2617 from: String,
2618 #[arg(long)]
2620 to: String,
2621 #[arg(long, default_value = "supports")]
2623 r#type: String,
2624 #[arg(long, default_value = "")]
2626 note: String,
2627 #[arg(long, default_value = "reviewer")]
2629 inferred_by: String,
2630 #[arg(long)]
2639 no_check_target: bool,
2640 #[arg(long)]
2641 json: bool,
2642 },
2643}
2644
2645#[derive(Subcommand)]
2646enum EntityAction {
2647 Resolve {
2654 frontier: PathBuf,
2655 #[arg(long)]
2657 force: bool,
2658 #[arg(long)]
2659 json: bool,
2660 },
2661 List {
2663 #[arg(long)]
2664 json: bool,
2665 },
2666}
2667
2668#[derive(Subcommand)]
2669enum FindingCommands {
2670 Add {
2672 frontier: PathBuf,
2674 #[arg(long)]
2676 assertion: String,
2677 #[arg(long, default_value = "mechanism")]
2679 r#type: String,
2680 #[arg(long, default_value = "manual finding")]
2682 source: String,
2683 #[arg(long, default_value = "expert_assertion")]
2685 source_type: String,
2686 #[arg(long)]
2688 author: String,
2689 #[arg(long, default_value = "0.3")]
2691 confidence: f64,
2692 #[arg(long, default_value = "theoretical")]
2694 evidence_type: String,
2695 #[arg(long, default_value = "")]
2697 entities: String,
2698 #[arg(long)]
2700 entities_reviewed: bool,
2701 #[arg(long)]
2703 evidence_span: Vec<String>,
2704 #[arg(long)]
2706 gap: bool,
2707 #[arg(long)]
2709 negative_space: bool,
2710 #[arg(long)]
2712 doi: Option<String>,
2713 #[arg(long)]
2715 pmid: Option<String>,
2716 #[arg(long)]
2718 year: Option<i32>,
2719 #[arg(long)]
2721 journal: Option<String>,
2722 #[arg(long)]
2724 url: Option<String>,
2725 #[arg(long)]
2727 source_authors: Option<String>,
2728 #[arg(long)]
2730 conditions_text: Option<String>,
2731 #[arg(long)]
2733 species: Option<String>,
2734 #[arg(long)]
2736 in_vivo: bool,
2737 #[arg(long)]
2739 in_vitro: bool,
2740 #[arg(long)]
2742 human_data: bool,
2743 #[arg(long)]
2745 clinical_trial: bool,
2746 #[arg(long)]
2748 json: bool,
2749 #[arg(long)]
2751 apply: bool,
2752 },
2753 Supersede {
2760 frontier: PathBuf,
2762 old_id: String,
2764 #[arg(long)]
2766 assertion: String,
2767 #[arg(long, default_value = "mechanism")]
2769 r#type: String,
2770 #[arg(long, default_value = "manual finding")]
2772 source: String,
2773 #[arg(long, default_value = "expert_assertion")]
2775 source_type: String,
2776 #[arg(long)]
2778 author: String,
2779 #[arg(long)]
2781 reason: String,
2782 #[arg(long, default_value = "0.5")]
2784 confidence: f64,
2785 #[arg(long, default_value = "experimental")]
2787 evidence_type: String,
2788 #[arg(long, default_value = "")]
2790 entities: String,
2791 #[arg(long)]
2793 doi: Option<String>,
2794 #[arg(long)]
2796 pmid: Option<String>,
2797 #[arg(long)]
2799 year: Option<i32>,
2800 #[arg(long)]
2802 journal: Option<String>,
2803 #[arg(long)]
2805 url: Option<String>,
2806 #[arg(long)]
2808 source_authors: Option<String>,
2809 #[arg(long)]
2811 conditions_text: Option<String>,
2812 #[arg(long)]
2814 species: Option<String>,
2815 #[arg(long)]
2816 in_vivo: bool,
2817 #[arg(long)]
2818 in_vitro: bool,
2819 #[arg(long)]
2820 human_data: bool,
2821 #[arg(long)]
2822 clinical_trial: bool,
2823 #[arg(long)]
2824 json: bool,
2825 #[arg(long)]
2827 apply: bool,
2828 },
2829 CausalSet {
2835 frontier: PathBuf,
2837 finding_id: String,
2839 #[arg(long)]
2841 claim: String,
2842 #[arg(long)]
2845 grade: Option<String>,
2846 #[arg(long)]
2849 actor: String,
2850 #[arg(long)]
2853 reason: String,
2854 #[arg(long)]
2855 json: bool,
2856 },
2857}
2858
2859#[derive(Subcommand)]
2860enum ProposalAction {
2861 List {
2863 frontier: PathBuf,
2864 #[arg(long)]
2865 status: Option<String>,
2866 #[arg(long)]
2867 json: bool,
2868 },
2869 Show {
2871 frontier: PathBuf,
2872 proposal_id: String,
2873 #[arg(long)]
2874 json: bool,
2875 },
2876 Preview {
2878 frontier: PathBuf,
2879 proposal_id: String,
2880 #[arg(long, default_value = "reviewer:preview")]
2881 reviewer: String,
2882 #[arg(long)]
2883 json: bool,
2884 },
2885 Import {
2887 frontier: PathBuf,
2888 source: PathBuf,
2889 #[arg(long)]
2890 json: bool,
2891 },
2892 Validate {
2894 source: PathBuf,
2895 #[arg(long)]
2896 json: bool,
2897 },
2898 Export {
2900 frontier: PathBuf,
2901 output: PathBuf,
2902 #[arg(long)]
2903 status: Option<String>,
2904 #[arg(long)]
2905 json: bool,
2906 },
2907 Accept {
2909 frontier: PathBuf,
2910 proposal_id: String,
2911 #[arg(long)]
2912 reviewer: String,
2913 #[arg(long)]
2914 reason: String,
2915 #[arg(long)]
2916 json: bool,
2917 },
2918 Reject {
2920 frontier: PathBuf,
2921 proposal_id: String,
2922 #[arg(long)]
2923 reviewer: String,
2924 #[arg(long)]
2925 reason: String,
2926 #[arg(long)]
2927 json: bool,
2928 },
2929}
2930
2931#[derive(Subcommand)]
2932enum SourceAdapterAction {
2933 Run {
2935 frontier: PathBuf,
2937 adapter: String,
2939 #[arg(long)]
2941 actor: String,
2942 #[arg(long = "entry")]
2944 entries: Vec<String>,
2945 #[arg(long)]
2947 priority: Option<String>,
2948 #[arg(long)]
2950 include_excluded: bool,
2951 #[arg(long)]
2953 allow_partial: bool,
2954 #[arg(long)]
2956 dry_run: bool,
2957 #[arg(long)]
2959 input_dir: Option<PathBuf>,
2960 #[arg(long)]
2962 apply_artifacts: bool,
2963 #[arg(long)]
2965 json: bool,
2966 },
2967}
2968
2969#[derive(Subcommand)]
2970enum RuntimeAdapterAction {
2971 Run {
2973 frontier: PathBuf,
2975 adapter: String,
2977 #[arg(long)]
2979 input: PathBuf,
2980 #[arg(long)]
2982 actor: String,
2983 #[arg(long)]
2985 dry_run: bool,
2986 #[arg(long)]
2988 apply_artifacts: bool,
2989 #[arg(long)]
2991 json: bool,
2992 },
2993}
2994
2995#[derive(Subcommand)]
2996enum BridgeKitAction {
2997 Validate {
2999 source: PathBuf,
3001 #[arg(long)]
3003 json: bool,
3004 },
3005 VerifyProvenance {
3012 packet: PathBuf,
3014 #[arg(long)]
3016 json: bool,
3017 },
3018}
3019
3020pub async fn run_command() {
3021 dotenvy::dotenv().ok();
3022
3023 match Cli::parse().command {
3024 Commands::Scout {
3025 folder,
3026 frontier,
3027 backend,
3028 dry_run,
3029 json,
3030 } => {
3031 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
3032 }
3033 Commands::CompileNotes {
3034 vault,
3035 frontier,
3036 backend,
3037 max_files,
3038 max_items_per_category,
3039 dry_run,
3040 json,
3041 } => {
3042 cmd_compile_notes(
3043 &vault,
3044 &frontier,
3045 backend.as_deref(),
3046 max_files,
3047 max_items_per_category,
3048 dry_run,
3049 json,
3050 )
3051 .await;
3052 }
3053 Commands::CompileCode {
3054 root,
3055 frontier,
3056 backend,
3057 max_files,
3058 dry_run,
3059 json,
3060 } => {
3061 cmd_compile_code(
3062 &root,
3063 &frontier,
3064 backend.as_deref(),
3065 max_files,
3066 dry_run,
3067 json,
3068 )
3069 .await;
3070 }
3071 Commands::CompileData {
3072 root,
3073 frontier,
3074 backend,
3075 sample_rows,
3076 dry_run,
3077 json,
3078 } => {
3079 cmd_compile_data(
3080 &root,
3081 &frontier,
3082 backend.as_deref(),
3083 sample_rows,
3084 dry_run,
3085 json,
3086 )
3087 .await;
3088 }
3089 Commands::ReviewPending {
3090 frontier,
3091 backend,
3092 max_proposals,
3093 batch_size,
3094 dry_run,
3095 json,
3096 } => {
3097 cmd_review_pending(
3098 &frontier,
3099 backend.as_deref(),
3100 max_proposals,
3101 batch_size,
3102 dry_run,
3103 json,
3104 )
3105 .await;
3106 }
3107 Commands::FindTensions {
3108 frontier,
3109 backend,
3110 max_findings,
3111 dry_run,
3112 json,
3113 } => {
3114 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3115 }
3116 Commands::PlanExperiments {
3117 frontier,
3118 backend,
3119 max_findings,
3120 dry_run,
3121 json,
3122 } => {
3123 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3124 }
3125 Commands::Check {
3126 source,
3127 schema,
3128 stats,
3129 conformance,
3130 conformance_dir,
3131 all,
3132 schema_only,
3133 strict,
3134 fix,
3135 json,
3136 } => cmd_check(
3137 source.as_deref(),
3138 schema,
3139 stats,
3140 conformance,
3141 &conformance_dir,
3142 all,
3143 schema_only,
3144 strict,
3145 fix,
3146 json,
3147 ),
3148 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3149 Commands::Impact {
3150 frontier,
3151 finding_id,
3152 depth,
3153 json,
3154 } => cmd_impact(&frontier, &finding_id, depth, json),
3155 Commands::Discord {
3156 frontier,
3157 json,
3158 kind,
3159 } => cmd_discord(&frontier, json, kind.as_deref()),
3160 Commands::Normalize {
3161 source,
3162 out,
3163 write,
3164 dry_run,
3165 rewrite_ids,
3166 id_map,
3167 resync_provenance,
3168 json,
3169 } => cmd_normalize(
3170 &source,
3171 out.as_deref(),
3172 write,
3173 dry_run,
3174 rewrite_ids,
3175 id_map.as_deref(),
3176 resync_provenance,
3177 json,
3178 ),
3179 Commands::Proof {
3180 frontier,
3181 out,
3182 template,
3183 gold,
3184 record_proof_state,
3185 json,
3186 } => cmd_proof(
3187 &frontier,
3188 &out,
3189 &template,
3190 gold.as_deref(),
3191 record_proof_state,
3192 json,
3193 ),
3194 Commands::Repo { action } => cmd_repo(action),
3195 Commands::Serve {
3196 frontier,
3197 frontiers,
3198 backend,
3199 http,
3200 setup,
3201 check_tools,
3202 json,
3203 workbench,
3204 } => {
3205 if setup {
3206 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3207 } else if check_tools {
3208 let source =
3209 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3210 match serve::check_tools(source) {
3211 Ok(report) => {
3212 if json {
3213 println!(
3214 "{}",
3215 serde_json::to_string_pretty(&report)
3216 .expect("failed to serialize tool check report")
3217 );
3218 } else {
3219 print_tool_check_report(&report);
3220 }
3221 }
3222 Err(e) => fail(&format!("Tool check failed: {e}")),
3223 }
3224 } else {
3225 let source =
3226 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3227 let resolved_port = if workbench {
3229 Some(http.unwrap_or(3848))
3230 } else {
3231 http
3232 };
3233 if let Some(port) = resolved_port {
3234 serve::run_http(source, backend.as_deref(), port, workbench).await;
3235 } else {
3236 serve::run(source, backend.as_deref()).await;
3237 }
3238 }
3239 }
3240 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3241 Commands::Log {
3242 frontier,
3243 limit,
3244 kind,
3245 json,
3246 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3247 Commands::Inbox {
3248 frontier,
3249 kind,
3250 limit,
3251 json,
3252 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3253 Commands::Ask {
3254 frontier,
3255 question,
3256 json,
3257 } => cmd_ask(&frontier, &question.join(" "), json),
3258 Commands::Stats { frontier, json } => {
3259 if json {
3260 print_stats_json(&frontier);
3261 } else {
3262 cmd_stats(&frontier);
3263 }
3264 }
3265 Commands::Search {
3266 source,
3267 query,
3268 entity,
3269 r#type,
3270 all,
3271 limit,
3272 json,
3273 } => cmd_search(
3274 source.as_deref(),
3275 &query,
3276 entity.as_deref(),
3277 r#type.as_deref(),
3278 all.as_deref(),
3279 limit,
3280 json,
3281 ),
3282 Commands::Tensions {
3283 source,
3284 both_high,
3285 cross_domain,
3286 top,
3287 json,
3288 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3289 Commands::Gaps { action } => cmd_gaps(action),
3290 Commands::Bridge {
3291 inputs,
3292 novelty,
3293 top,
3294 } => cmd_bridge(&inputs, novelty, top).await,
3295 Commands::Export {
3296 frontier,
3297 format,
3298 output,
3299 } => export::run(&frontier, &format, output.as_deref()),
3300 Commands::Packet { action } => cmd_packet(action),
3301 Commands::Verify { path, json } => cmd_verify(&path, json),
3302 Commands::Bench {
3303 frontier,
3304 gold,
3305 candidate,
3306 sources,
3307 threshold,
3308 report,
3309 entity_gold,
3310 link_gold,
3311 suite,
3312 suite_ready,
3313 min_f1,
3314 min_precision,
3315 min_recall,
3316 no_thresholds,
3317 json,
3318 } => {
3319 if let Some(cand) = candidate.clone() {
3324 let Some(g) = gold.clone() else {
3325 eprintln!(
3326 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3327 style::err_prefix()
3328 );
3329 std::process::exit(2);
3330 };
3331 cmd_agent_bench(
3332 &g,
3333 &cand,
3334 sources.as_deref(),
3335 threshold,
3336 report.as_deref(),
3337 json,
3338 );
3339 } else {
3340 cmd_bench(BenchArgs {
3341 frontier,
3342 gold,
3343 entity_gold,
3344 link_gold,
3345 suite,
3346 suite_ready,
3347 min_f1,
3348 min_precision,
3349 min_recall,
3350 no_thresholds,
3351 json,
3352 });
3353 }
3354 }
3355 Commands::Conformance { dir } => {
3356 let _ = conformance::run(&dir);
3357 }
3358 Commands::Version => println!("vela 0.36.0"),
3359 Commands::Sign { action } => cmd_sign(action),
3360 Commands::Actor { action } => cmd_actor(action),
3361 Commands::Federation { action } => cmd_federation(action),
3362 Commands::Causal { action } => cmd_causal(action),
3363 Commands::Frontier { action } => cmd_frontier(action),
3364 Commands::Queue { action } => cmd_queue(action),
3365 Commands::Registry { action } => cmd_registry(action),
3366 Commands::Init {
3367 path,
3368 name,
3369 template,
3370 no_git,
3371 json,
3372 } => cmd_init(&path, &name, &template, !no_git, json),
3373 Commands::Quickstart {
3374 path,
3375 name,
3376 reviewer,
3377 assertion,
3378 keys_out,
3379 json,
3380 } => cmd_quickstart(
3381 &path,
3382 &name,
3383 &reviewer,
3384 assertion.as_deref(),
3385 keys_out.as_deref(),
3386 json,
3387 ),
3388 Commands::Lock { path, check, json } => cmd_lock(&path, check, json),
3389 Commands::Doc { path, out, json } => cmd_doc(&path, out.as_deref(), json),
3390 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3391 Commands::Diff {
3392 target,
3393 frontier_b,
3394 frontier,
3395 reviewer,
3396 json,
3397 quiet,
3398 } => {
3399 if target.starts_with("vpr_") {
3404 let frontier_root = frontier
3405 .clone()
3406 .or_else(|| frontier_b.clone())
3407 .unwrap_or_else(|| std::path::PathBuf::from("."));
3408 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3409 .unwrap_or_else(|e| fail_return(&e));
3410 let payload = json!({
3411 "ok": true,
3412 "command": "diff.proposal",
3413 "frontier": frontier_root.display().to_string(),
3414 "proposal_id": target,
3415 "preview": preview,
3416 });
3417 if json {
3418 println!(
3419 "{}",
3420 serde_json::to_string_pretty(&payload)
3421 .expect("failed to serialize diff preview")
3422 );
3423 } else {
3424 println!("vela diff · proposal preview");
3425 println!(" proposal: {}", target);
3426 println!(" kind: {}", preview.kind);
3427 println!(
3428 " findings: {} -> {}",
3429 preview.findings_before, preview.findings_after
3430 );
3431 println!(
3432 " artifacts: {} -> {}",
3433 preview.artifacts_before, preview.artifacts_after
3434 );
3435 println!(
3436 " events: {} -> {}",
3437 preview.events_before, preview.events_after
3438 );
3439 if !preview.changed_findings.is_empty() {
3440 println!(
3441 " findings changed: {}",
3442 preview.changed_findings.join(", ")
3443 );
3444 }
3445 }
3446 } else {
3447 let frontier_a = std::path::PathBuf::from(&target);
3448 let b = frontier_b.unwrap_or_else(|| {
3449 fail_return(
3450 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3451 )
3452 });
3453 diff::run(&frontier_a, &b, json, quiet);
3454 }
3455 }
3456 Commands::Proposals { action } => cmd_proposals(action),
3457 Commands::ArtifactToState {
3458 frontier,
3459 packet,
3460 actor,
3461 apply_artifacts,
3462 json,
3463 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3464 Commands::BridgeKit { action } => cmd_bridge_kit(action).await,
3465 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3466 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3467 Commands::Link { action } => cmd_link(action),
3468 Commands::Workbench {
3469 path,
3470 port,
3471 no_open,
3472 } => {
3473 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3474 fail(&e);
3475 }
3476 }
3477 Commands::Bridges { action } => cmd_bridges(action),
3478 Commands::Entity { action } => cmd_entity(action),
3479 Commands::Finding { command } => match command {
3480 FindingCommands::Add {
3481 frontier,
3482 assertion,
3483 r#type,
3484 source,
3485 source_type,
3486 author,
3487 confidence,
3488 evidence_type,
3489 entities,
3490 entities_reviewed,
3491 evidence_span,
3492 gap,
3493 negative_space,
3494 doi,
3495 pmid,
3496 year,
3497 journal,
3498 url,
3499 source_authors,
3500 conditions_text,
3501 species,
3502 in_vivo,
3503 in_vitro,
3504 human_data,
3505 clinical_trial,
3506 json,
3507 apply,
3508 } => {
3509 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3510 validate_enum_arg(
3511 "--evidence-type",
3512 &evidence_type,
3513 bundle::VALID_EVIDENCE_TYPES,
3514 );
3515 validate_enum_arg(
3516 "--source-type",
3517 &source_type,
3518 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3519 );
3520 let parsed_entities = parse_entities(&entities);
3521 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3522 for (name, etype) in &parsed_entities {
3523 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3524 fail(&format!(
3525 "invalid entity type '{}' for '{}'. Valid: {}",
3526 etype,
3527 name,
3528 bundle::VALID_ENTITY_TYPES.join(", "),
3529 ));
3530 }
3531 }
3532 let parsed_source_authors = source_authors
3533 .map(|s| {
3534 s.split(';')
3535 .map(|a| a.trim().to_string())
3536 .filter(|a| !a.is_empty())
3537 .collect()
3538 })
3539 .unwrap_or_default();
3540 let parsed_species = species
3541 .map(|s| {
3542 s.split(';')
3543 .map(|a| a.trim().to_string())
3544 .filter(|a| !a.is_empty())
3545 .collect()
3546 })
3547 .unwrap_or_default();
3548 let report = state::add_finding(
3549 &frontier,
3550 state::FindingDraftOptions {
3551 text: assertion,
3552 assertion_type: r#type,
3553 source,
3554 source_type,
3555 author,
3556 confidence,
3557 evidence_type,
3558 entities: parsed_entities,
3559 doi,
3560 pmid,
3561 year,
3562 journal,
3563 url,
3564 source_authors: parsed_source_authors,
3565 conditions_text,
3566 species: parsed_species,
3567 in_vivo,
3568 in_vitro,
3569 human_data,
3570 clinical_trial,
3571 entities_reviewed,
3572 evidence_spans: parsed_evidence_spans,
3573 gap,
3574 negative_space,
3575 },
3576 apply,
3577 )
3578 .unwrap_or_else(|e| fail_return(&e));
3579 print_state_report(&report, json);
3580 }
3581 FindingCommands::Supersede {
3582 frontier,
3583 old_id,
3584 assertion,
3585 r#type,
3586 source,
3587 source_type,
3588 author,
3589 reason,
3590 confidence,
3591 evidence_type,
3592 entities,
3593 doi,
3594 pmid,
3595 year,
3596 journal,
3597 url,
3598 source_authors,
3599 conditions_text,
3600 species,
3601 in_vivo,
3602 in_vitro,
3603 human_data,
3604 clinical_trial,
3605 json,
3606 apply,
3607 } => {
3608 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3609 validate_enum_arg(
3610 "--evidence-type",
3611 &evidence_type,
3612 bundle::VALID_EVIDENCE_TYPES,
3613 );
3614 validate_enum_arg(
3615 "--source-type",
3616 &source_type,
3617 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3618 );
3619 let parsed_entities = parse_entities(&entities);
3620 for (name, etype) in &parsed_entities {
3621 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3622 fail(&format!(
3623 "invalid entity type '{}' for '{}'. Valid: {}",
3624 etype,
3625 name,
3626 bundle::VALID_ENTITY_TYPES.join(", "),
3627 ));
3628 }
3629 }
3630 let parsed_source_authors = source_authors
3631 .map(|s| {
3632 s.split(';')
3633 .map(|a| a.trim().to_string())
3634 .filter(|a| !a.is_empty())
3635 .collect()
3636 })
3637 .unwrap_or_default();
3638 let parsed_species = species
3639 .map(|s| {
3640 s.split(';')
3641 .map(|a| a.trim().to_string())
3642 .filter(|a| !a.is_empty())
3643 .collect()
3644 })
3645 .unwrap_or_default();
3646 let report = state::supersede_finding(
3647 &frontier,
3648 &old_id,
3649 &reason,
3650 state::FindingDraftOptions {
3651 text: assertion,
3652 assertion_type: r#type,
3653 source,
3654 source_type,
3655 author,
3656 confidence,
3657 evidence_type,
3658 entities: parsed_entities,
3659 doi,
3660 pmid,
3661 year,
3662 journal,
3663 url,
3664 source_authors: parsed_source_authors,
3665 conditions_text,
3666 species: parsed_species,
3667 in_vivo,
3668 in_vitro,
3669 human_data,
3670 clinical_trial,
3671 entities_reviewed: false,
3672 evidence_spans: Vec::new(),
3673 gap: false,
3674 negative_space: false,
3675 },
3676 apply,
3677 )
3678 .unwrap_or_else(|e| fail_return(&e));
3679 print_state_report(&report, json);
3680 }
3681 FindingCommands::CausalSet {
3682 frontier,
3683 finding_id,
3684 claim,
3685 grade,
3686 actor,
3687 reason,
3688 json,
3689 } => {
3690 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3691 fail(&format!(
3692 "invalid --claim '{claim}'; valid: {:?}",
3693 bundle::VALID_CAUSAL_CLAIMS
3694 ));
3695 }
3696 if let Some(g) = grade.as_deref()
3697 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3698 {
3699 fail(&format!(
3700 "invalid --grade '{g}'; valid: {:?}",
3701 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3702 ));
3703 }
3704 let report = state::set_causal(
3705 &frontier,
3706 &finding_id,
3707 &claim,
3708 grade.as_deref(),
3709 &actor,
3710 &reason,
3711 )
3712 .unwrap_or_else(|e| fail_return(&e));
3713 print_state_report(&report, json);
3714 }
3715 },
3716 Commands::Review {
3717 frontier,
3718 finding_id,
3719 status,
3720 reason,
3721 reviewer,
3722 apply,
3723 json,
3724 } => {
3725 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3726 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3727 let report = state::review_finding(
3728 &frontier,
3729 &finding_id,
3730 state::ReviewOptions {
3731 status,
3732 reason,
3733 reviewer,
3734 },
3735 apply,
3736 )
3737 .unwrap_or_else(|e| fail_return(&e));
3738 print_state_report(&report, json);
3739 }
3740 Commands::Note {
3741 frontier,
3742 finding_id,
3743 text,
3744 author,
3745 apply,
3746 json,
3747 } => {
3748 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3749 .unwrap_or_else(|e| fail_return(&e));
3750 print_state_report(&report, json);
3751 }
3752 Commands::Caveat {
3753 frontier,
3754 finding_id,
3755 text,
3756 author,
3757 apply,
3758 json,
3759 } => {
3760 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3761 .unwrap_or_else(|e| fail_return(&e));
3762 print_state_report(&report, json);
3763 }
3764 Commands::Revise {
3765 frontier,
3766 finding_id,
3767 confidence,
3768 reason,
3769 reviewer,
3770 apply,
3771 json,
3772 } => {
3773 let report = state::revise_confidence(
3774 &frontier,
3775 &finding_id,
3776 state::ReviseOptions {
3777 confidence,
3778 reason,
3779 reviewer,
3780 },
3781 apply,
3782 )
3783 .unwrap_or_else(|e| fail_return(&e));
3784 print_state_report(&report, json);
3785 }
3786 Commands::Reject {
3787 frontier,
3788 finding_id,
3789 reason,
3790 reviewer,
3791 apply,
3792 json,
3793 } => {
3794 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3795 .unwrap_or_else(|e| fail_return(&e));
3796 print_state_report(&report, json);
3797 }
3798 Commands::History {
3799 frontier,
3800 finding_id,
3801 json,
3802 as_of,
3803 } => {
3804 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3805 .unwrap_or_else(|e| fail_return(&e));
3806 if json {
3807 println!(
3808 "{}",
3809 serde_json::to_string_pretty(&payload)
3810 .expect("failed to serialize history response")
3811 );
3812 } else {
3813 print_history(&payload);
3814 }
3815 }
3816 Commands::ImportEvents { source, into, json } => {
3817 let report =
3818 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3819 if json {
3820 println!(
3821 "{}",
3822 serde_json::to_string_pretty(&json!({
3823 "ok": true,
3824 "command": "import-events",
3825 "source": report.source,
3826 "target": into.display().to_string(),
3827 "summary": {
3828 "imported": report.imported,
3829 "new": report.new,
3830 "duplicate": report.duplicate,
3831 "canonical_events_imported": report.events_imported,
3832 "canonical_events_new": report.events_new,
3833 "canonical_events_duplicate": report.events_duplicate,
3834 }
3835 }))
3836 .expect("failed to serialize import-events response")
3837 );
3838 } else {
3839 println!("{report}");
3840 }
3841 }
3842 Commands::Retract {
3843 source,
3844 finding_id,
3845 reason,
3846 reviewer,
3847 apply,
3848 json,
3849 } => {
3850 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3851 .unwrap_or_else(|e| fail_return(&e));
3852 print_state_report(&report, json);
3853 }
3854 Commands::LocatorRepair {
3855 frontier,
3856 atom_id,
3857 locator,
3858 reviewer,
3859 reason,
3860 apply,
3861 json,
3862 } => {
3863 cmd_locator_repair(
3864 &frontier,
3865 &atom_id,
3866 locator.as_deref(),
3867 &reviewer,
3868 &reason,
3869 apply,
3870 json,
3871 );
3872 }
3873 Commands::SourceFetch {
3874 identifier,
3875 cache,
3876 out,
3877 refresh,
3878 json,
3879 } => {
3880 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3881 }
3882 Commands::SpanRepair {
3883 frontier,
3884 finding_id,
3885 section,
3886 text,
3887 reviewer,
3888 reason,
3889 apply,
3890 json,
3891 } => {
3892 cmd_span_repair(
3893 &frontier,
3894 &finding_id,
3895 §ion,
3896 &text,
3897 &reviewer,
3898 &reason,
3899 apply,
3900 json,
3901 );
3902 }
3903 Commands::ProofAdd {
3904 frontier,
3905 target_finding,
3906 tool,
3907 tool_version,
3908 script_path,
3909 name,
3910 reviewer,
3911 reason,
3912 json,
3913 } => {
3914 cmd_proof_add(
3915 &frontier,
3916 &target_finding,
3917 &tool,
3918 &tool_version,
3919 &script_path,
3920 &name,
3921 &reviewer,
3922 &reason,
3923 json,
3924 );
3925 }
3926 Commands::EntityAdd {
3927 frontier,
3928 finding_id,
3929 entity,
3930 entity_type,
3931 reviewer,
3932 reason,
3933 apply,
3934 json,
3935 } => {
3936 let report = state::add_finding_entity(
3937 &frontier,
3938 &finding_id,
3939 &entity,
3940 &entity_type,
3941 &reviewer,
3942 &reason,
3943 apply,
3944 )
3945 .unwrap_or_else(|e| fail_return(&e));
3946 print_state_report(&report, json);
3947 }
3948 Commands::EntityResolve {
3949 frontier,
3950 finding_id,
3951 entity,
3952 source,
3953 id,
3954 confidence,
3955 matched_name,
3956 resolution_method,
3957 reviewer,
3958 reason,
3959 apply,
3960 json,
3961 } => {
3962 cmd_entity_resolve(
3963 &frontier,
3964 &finding_id,
3965 &entity,
3966 &source,
3967 &id,
3968 confidence,
3969 matched_name.as_deref(),
3970 &resolution_method,
3971 &reviewer,
3972 &reason,
3973 apply,
3974 json,
3975 );
3976 }
3977 Commands::Propagate {
3978 frontier,
3979 retract,
3980 reduce_confidence,
3981 to,
3982 output,
3983 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3984 Commands::Replicate {
3985 frontier,
3986 target,
3987 outcome,
3988 by,
3989 conditions,
3990 source_title,
3991 doi,
3992 pmid,
3993 sample_size,
3994 note,
3995 previous_attempt,
3996 no_cascade,
3997 json,
3998 } => cmd_replicate(
3999 &frontier,
4000 &target,
4001 &outcome,
4002 &by,
4003 &conditions,
4004 &source_title,
4005 doi.as_deref(),
4006 pmid.as_deref(),
4007 sample_size.as_deref(),
4008 ¬e,
4009 previous_attempt.as_deref(),
4010 no_cascade,
4011 json,
4012 ),
4013 Commands::Replications {
4014 frontier,
4015 target,
4016 json,
4017 } => cmd_replications(&frontier, target.as_deref(), json),
4018 Commands::DatasetAdd {
4019 frontier,
4020 name,
4021 version,
4022 content_hash,
4023 url,
4024 license,
4025 source_title,
4026 doi,
4027 row_count,
4028 json,
4029 } => cmd_dataset_add(
4030 &frontier,
4031 &name,
4032 version.as_deref(),
4033 &content_hash,
4034 url.as_deref(),
4035 license.as_deref(),
4036 &source_title,
4037 doi.as_deref(),
4038 row_count,
4039 json,
4040 ),
4041 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
4042 Commands::CodeAdd {
4043 frontier,
4044 language,
4045 repo_url,
4046 commit,
4047 path,
4048 content_hash,
4049 line_start,
4050 line_end,
4051 entry_point,
4052 json,
4053 } => cmd_code_add(
4054 &frontier,
4055 &language,
4056 repo_url.as_deref(),
4057 commit.as_deref(),
4058 &path,
4059 &content_hash,
4060 line_start,
4061 line_end,
4062 entry_point.as_deref(),
4063 json,
4064 ),
4065 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
4066 Commands::ArtifactAdd {
4067 frontier,
4068 kind,
4069 name,
4070 file,
4071 url,
4072 content_hash,
4073 media_type,
4074 license,
4075 source_title,
4076 source_url,
4077 doi,
4078 target,
4079 metadata,
4080 access_tier,
4081 deposited_by,
4082 reason,
4083 json,
4084 } => cmd_artifact_add(
4085 &frontier,
4086 &kind,
4087 &name,
4088 file.as_deref(),
4089 url.as_deref(),
4090 content_hash.as_deref(),
4091 media_type.as_deref(),
4092 license.as_deref(),
4093 source_title.as_deref(),
4094 source_url.as_deref(),
4095 doi.as_deref(),
4096 target,
4097 metadata,
4098 &access_tier,
4099 &deposited_by,
4100 &reason,
4101 json,
4102 ),
4103 Commands::Artifacts {
4104 frontier,
4105 target,
4106 json,
4107 } => cmd_artifacts(&frontier, target.as_deref(), json),
4108 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
4109 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
4110 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
4111 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
4112 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
4113 Commands::ClinicalTrialImport {
4114 frontier,
4115 nct_id,
4116 input_json,
4117 target,
4118 deposited_by,
4119 reason,
4120 license,
4121 json,
4122 } => {
4123 cmd_clinical_trial_import(
4124 &frontier,
4125 &nct_id,
4126 input_json.as_deref(),
4127 target,
4128 &deposited_by,
4129 &reason,
4130 &license,
4131 json,
4132 )
4133 .await
4134 }
4135 Commands::NegativeResultAdd {
4136 frontier,
4137 kind,
4138 deposited_by,
4139 reason,
4140 conditions_text,
4141 notes,
4142 target,
4143 endpoint,
4144 intervention,
4145 comparator,
4146 population,
4147 n_enrolled,
4148 power,
4149 ci_lower,
4150 ci_upper,
4151 effect_size_threshold,
4152 registry_id,
4153 reagent,
4154 observation,
4155 attempts,
4156 source_title,
4157 doi,
4158 url,
4159 year,
4160 json,
4161 } => cmd_negative_result_add(
4162 &frontier,
4163 &kind,
4164 &deposited_by,
4165 &reason,
4166 &conditions_text,
4167 ¬es,
4168 target,
4169 endpoint.as_deref(),
4170 intervention.as_deref(),
4171 comparator.as_deref(),
4172 population.as_deref(),
4173 n_enrolled,
4174 power,
4175 ci_lower,
4176 ci_upper,
4177 effect_size_threshold,
4178 registry_id.as_deref(),
4179 reagent.as_deref(),
4180 observation.as_deref(),
4181 attempts,
4182 &source_title,
4183 doi.as_deref(),
4184 url.as_deref(),
4185 year,
4186 json,
4187 ),
4188 Commands::NegativeResults {
4189 frontier,
4190 target,
4191 json,
4192 } => cmd_negative_results(&frontier, target.as_deref(), json),
4193 Commands::TrajectoryCreate {
4194 frontier,
4195 deposited_by,
4196 reason,
4197 target,
4198 notes,
4199 json,
4200 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4201 Commands::TrajectoryStep {
4202 frontier,
4203 trajectory_id,
4204 kind,
4205 description,
4206 actor,
4207 reason,
4208 reference,
4209 json,
4210 } => cmd_trajectory_step(
4211 &frontier,
4212 &trajectory_id,
4213 &kind,
4214 &description,
4215 &actor,
4216 &reason,
4217 reference,
4218 json,
4219 ),
4220 Commands::Trajectories {
4221 frontier,
4222 target,
4223 json,
4224 } => cmd_trajectories(&frontier, target.as_deref(), json),
4225 Commands::TierSet {
4226 frontier,
4227 object_type,
4228 object_id,
4229 tier,
4230 actor,
4231 reason,
4232 json,
4233 } => cmd_tier_set(
4234 &frontier,
4235 &object_type,
4236 &object_id,
4237 &tier,
4238 &actor,
4239 &reason,
4240 json,
4241 ),
4242 Commands::Predict {
4243 frontier,
4244 by,
4245 claim,
4246 criterion,
4247 resolves_by,
4248 confidence,
4249 target,
4250 outcome,
4251 conditions,
4252 json,
4253 } => cmd_predict(
4254 &frontier,
4255 &by,
4256 &claim,
4257 &criterion,
4258 resolves_by.as_deref(),
4259 confidence,
4260 &target,
4261 &outcome,
4262 &conditions,
4263 json,
4264 ),
4265 Commands::Resolve {
4266 frontier,
4267 prediction,
4268 outcome,
4269 matched,
4270 by,
4271 confidence,
4272 source_title,
4273 doi,
4274 json,
4275 } => cmd_resolve(
4276 &frontier,
4277 &prediction,
4278 &outcome,
4279 matched,
4280 &by,
4281 confidence,
4282 &source_title,
4283 doi.as_deref(),
4284 json,
4285 ),
4286 Commands::Predictions {
4287 frontier,
4288 by,
4289 open,
4290 json,
4291 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4292 Commands::Calibration {
4293 frontier,
4294 actor,
4295 json,
4296 } => cmd_calibration(&frontier, actor.as_deref(), json),
4297 Commands::PredictionsExpire {
4298 frontier,
4299 now,
4300 dry_run,
4301 json,
4302 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4303 Commands::Consensus {
4304 frontier,
4305 target,
4306 weighting,
4307 causal_claim,
4308 causal_grade_min,
4309 json,
4310 } => cmd_consensus(
4311 &frontier,
4312 &target,
4313 &weighting,
4314 causal_claim.as_deref(),
4315 causal_grade_min.as_deref(),
4316 json,
4317 ),
4318
4319 Commands::Ingest {
4322 path,
4323 frontier,
4324 backend,
4325 actor,
4326 dry_run,
4327 json,
4328 } => {
4329 cmd_ingest(
4330 &path,
4331 &frontier,
4332 backend.as_deref(),
4333 actor.as_deref(),
4334 dry_run,
4335 json,
4336 )
4337 .await
4338 }
4339
4340 Commands::Propose {
4341 frontier,
4342 finding_id,
4343 status,
4344 reason,
4345 reviewer,
4346 apply,
4347 json,
4348 } => {
4349 let options = state::ReviewOptions {
4352 status: status.clone(),
4353 reason: reason.clone(),
4354 reviewer: reviewer.clone(),
4355 };
4356 let report = state::review_finding(&frontier, &finding_id, options, apply)
4357 .unwrap_or_else(|e| fail_return(&e));
4358 print_state_report(&report, json);
4359 }
4360
4361 Commands::Accept {
4362 frontier,
4363 proposal_id,
4364 reviewer,
4365 reason,
4366 json,
4367 } => {
4368 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4370 .unwrap_or_else(|e| fail_return(&e));
4371 let payload = json!({
4372 "ok": true,
4373 "command": "accept",
4374 "frontier": frontier.display().to_string(),
4375 "proposal_id": proposal_id,
4376 "reviewer": reviewer,
4377 "applied_event_id": event_id,
4378 });
4379 if json {
4380 println!(
4381 "{}",
4382 serde_json::to_string_pretty(&payload)
4383 .expect("failed to serialize accept response")
4384 );
4385 } else {
4386 println!(
4387 "{} accepted and applied proposal {}",
4388 style::ok("ok"),
4389 proposal_id
4390 );
4391 println!(" event: {}", event_id);
4392 }
4393 }
4394
4395 Commands::Attest {
4396 frontier,
4397 event,
4398 attester,
4399 scope_note,
4400 proof_id,
4401 signature,
4402 key,
4403 json,
4404 } => {
4405 if let Some(target_event_id) = event {
4409 let attester_id = attester.unwrap_or_else(|| {
4410 fail_return("attest: --attester is required in per-event mode")
4411 });
4412 let scope = scope_note.unwrap_or_else(|| {
4413 fail_return("attest: --scope-note is required in per-event mode")
4414 });
4415 let attestation_event_id = state::record_attestation(
4416 &frontier,
4417 &target_event_id,
4418 &attester_id,
4419 &scope,
4420 proof_id.as_deref(),
4421 signature.as_deref(),
4422 )
4423 .unwrap_or_else(|e| fail_return(&e));
4424 if json {
4425 let payload = json!({
4426 "ok": true,
4427 "command": "attest.event",
4428 "frontier": frontier.display().to_string(),
4429 "target_event_id": target_event_id,
4430 "attestation_event_id": attestation_event_id,
4431 "attester_id": attester_id,
4432 });
4433 println!(
4434 "{}",
4435 serde_json::to_string_pretty(&payload)
4436 .expect("failed to serialize attest.event response")
4437 );
4438 } else {
4439 println!(
4440 "{} attested {} by {} ({})",
4441 style::ok("ok"),
4442 target_event_id,
4443 attester_id,
4444 attestation_event_id
4445 );
4446 }
4447 return;
4448 }
4449 let key_path = key.unwrap_or_else(|| {
4451 fail_return(
4452 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
4453 )
4454 });
4455 let count =
4456 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
4457 let payload = json!({
4458 "ok": true,
4459 "command": "attest",
4460 "frontier": frontier.display().to_string(),
4461 "private_key": key_path.display().to_string(),
4462 "signed": count,
4463 });
4464 if json {
4465 println!(
4466 "{}",
4467 serde_json::to_string_pretty(&payload)
4468 .expect("failed to serialize attest response")
4469 );
4470 } else {
4471 println!(
4472 "{} {count} findings in {}",
4473 style::ok("attested"),
4474 frontier.display()
4475 );
4476 }
4477 }
4478
4479 Commands::Lineage {
4480 frontier,
4481 finding_id,
4482 as_of,
4483 json,
4484 } => {
4485 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4487 .unwrap_or_else(|e| fail_return(&e));
4488 if json {
4489 println!(
4490 "{}",
4491 serde_json::to_string_pretty(&payload)
4492 .expect("failed to serialize lineage response")
4493 );
4494 } else {
4495 print_history(&payload);
4496 }
4497 }
4498
4499 Commands::Carina { action } => cmd_carina(action),
4500
4501 Commands::Atlas { action } => cmd_atlas(action).await,
4502
4503 Commands::Constellation { action } => cmd_constellation(action).await,
4504 }
4505}
4506
4507async fn cmd_atlas(action: AtlasAction) {
4512 match action {
4513 AtlasAction::Init {
4514 name,
4515 frontiers,
4516 domain,
4517 scope_note,
4518 atlases_root,
4519 json,
4520 } => match ATLAS_INIT_HANDLER.get() {
4521 Some(handler) => {
4522 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
4523 }
4524 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
4525 },
4526 AtlasAction::Materialize {
4527 name,
4528 atlases_root,
4529 json,
4530 } => match ATLAS_MATERIALIZE_HANDLER.get() {
4531 Some(handler) => handler(atlases_root, name, json).await,
4532 None => fail("vela atlas materialize: handler not registered"),
4533 },
4534 AtlasAction::Serve {
4535 name,
4536 atlases_root,
4537 port,
4538 no_open,
4539 } => {
4540 match ATLAS_SERVE_HANDLER.get() {
4544 Some(handler) => handler(atlases_root, name, port, !no_open).await,
4545 None => fail("vela atlas serve: handler not registered"),
4546 }
4547 }
4548 AtlasAction::Update {
4549 name,
4550 add_frontier,
4551 remove_vfr_id,
4552 atlases_root,
4553 json,
4554 } => match ATLAS_UPDATE_HANDLER.get() {
4555 Some(handler) => {
4556 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
4557 }
4558 None => fail("vela atlas update: handler not registered"),
4559 },
4560 }
4561}
4562
4563async fn cmd_constellation(action: ConstellationAction) {
4567 match action {
4568 ConstellationAction::Init {
4569 name,
4570 atlases,
4571 scope_note,
4572 constellations_root,
4573 json,
4574 } => match CONSTELLATION_INIT_HANDLER.get() {
4575 Some(handler) => {
4576 handler(constellations_root, name, scope_note, atlases, json).await;
4577 }
4578 None => fail(
4579 "vela constellation init: handler not registered (built without vela-constellation)",
4580 ),
4581 },
4582 ConstellationAction::Materialize {
4583 name,
4584 constellations_root,
4585 json,
4586 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
4587 Some(handler) => handler(constellations_root, name, json).await,
4588 None => fail("vela constellation materialize: handler not registered"),
4589 },
4590 ConstellationAction::Serve {
4591 name,
4592 constellations_root,
4593 port,
4594 no_open,
4595 } => match CONSTELLATION_SERVE_HANDLER.get() {
4596 Some(handler) => handler(constellations_root, name, port, !no_open).await,
4597 None => fail("vela constellation serve: handler not registered"),
4598 },
4599 }
4600}
4601
4602fn cmd_carina(action: CarinaAction) {
4605 match action {
4606 CarinaAction::List { json } => {
4607 if json {
4608 println!(
4609 "{}",
4610 serde_json::to_string_pretty(&json!({
4611 "ok": true,
4612 "command": "carina.list",
4613 "primitives": carina_validate::PRIMITIVE_NAMES,
4614 }))
4615 .expect("failed to serialize carina.list")
4616 );
4617 } else {
4618 println!("Carina primitives bundled with this build:");
4619 for name in carina_validate::PRIMITIVE_NAMES {
4620 println!(" · {name}");
4621 }
4622 }
4623 }
4624 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4625 Some(text) => print!("{text}"),
4626 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4627 },
4628 CarinaAction::Validate {
4629 path,
4630 primitive,
4631 json,
4632 } => {
4633 let text = std::fs::read_to_string(&path)
4634 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4635 let value: Value = serde_json::from_str(&text)
4636 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4637 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4643 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4644 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4645 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4646 for (key, child) in primitives {
4647 let outcome = carina_validate::validate(key, child)
4648 .map(|()| carina_validate::detect_primitive(child));
4649 report.push((key.clone(), outcome));
4650 }
4651 } else {
4652 let outcome = match primitive.as_deref() {
4653 Some(name) => carina_validate::validate(name, &value).map(|()| {
4654 carina_validate::PRIMITIVE_NAMES
4655 .iter()
4656 .copied()
4657 .find(|p| *p == name)
4658 }),
4659 None => carina_validate::validate_auto(&value).map(Some),
4660 };
4661 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4662 report.push((label, outcome));
4663 }
4664
4665 let total = report.len();
4666 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4667 let fail = total - pass;
4668
4669 if json {
4670 let entries: Vec<Value> = report
4671 .iter()
4672 .map(|(label, r)| match r {
4673 Ok(name) => json!({
4674 "key": label,
4675 "primitive": name,
4676 "ok": true,
4677 }),
4678 Err(errs) => json!({
4679 "key": label,
4680 "ok": false,
4681 "errors": errs,
4682 }),
4683 })
4684 .collect();
4685 println!(
4686 "{}",
4687 serde_json::to_string_pretty(&json!({
4688 "ok": fail == 0,
4689 "command": "carina.validate",
4690 "file": path.display().to_string(),
4691 "total": total,
4692 "passed": pass,
4693 "failed": fail,
4694 "entries": entries,
4695 }))
4696 .expect("failed to serialize carina.validate")
4697 );
4698 } else {
4699 for (label, r) in &report {
4700 match r {
4701 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4702 Ok(None) => println!(" {} {label}", style::ok("ok")),
4703 Err(errs) => {
4704 println!(" {} {label}", style::lost("fail"));
4705 for e in errs {
4706 println!(" {e}");
4707 }
4708 }
4709 }
4710 }
4711 println!();
4712 if fail == 0 {
4713 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4714 } else {
4715 println!(
4716 "{} {pass}/{total} valid · {fail} failed",
4717 style::lost("carina.validate")
4718 );
4719 }
4720 }
4721
4722 if fail > 0 {
4723 std::process::exit(1);
4724 }
4725 }
4726 }
4727}
4728
4729#[allow(clippy::too_many_arguments)]
4740fn cmd_proof_add(
4741 frontier: &Path,
4742 target_finding: &str,
4743 tool: &str,
4744 tool_version: &str,
4745 script_path: &Path,
4746 name: &str,
4747 reviewer: &str,
4748 reason: &str,
4749 json_output: bool,
4750) {
4751 use std::collections::BTreeMap;
4752
4753 if !target_finding.starts_with("vf_") {
4755 fail(&format!(
4756 "--target-finding must be a vf_* finding id; got `{target_finding}`"
4757 ));
4758 }
4759 let valid_tools = [
4761 "lean4", "coq", "isabelle", "agda", "metamath", "rocq", "other",
4762 ];
4763 if !valid_tools.contains(&tool) {
4764 fail(&format!(
4765 "--tool `{tool}` not in {valid_tools:?}; see embedded/carina-schemas/proof.schema.json"
4766 ));
4767 }
4768
4769 let script_bytes = std::fs::read(script_path)
4771 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", script_path.display())));
4772 let script_hash_hex = hex::encode(Sha256::digest(&script_bytes));
4773 let script_locator = format!("sha256:{script_hash_hex}");
4774
4775 let vpf_preimage = format!("{script_locator}|{tool}|{tool_version}|{target_finding}");
4779 let vpf_id = format!(
4780 "vpf_{}",
4781 &hex::encode(Sha256::digest(vpf_preimage.as_bytes()))[..16]
4782 );
4783
4784 let verified_at = chrono::Utc::now().to_rfc3339();
4787 let proof_primitive = json!({
4788 "schema": "carina.proof.v0.3",
4789 "id": vpf_id,
4790 "tool": tool,
4791 "tool_version": tool_version,
4792 "script_locator": script_locator,
4793 "verifier_output_hash": format!("sha256:{}", "0".repeat(64)),
4797 "verified_at": verified_at,
4798 "target_finding_id": target_finding,
4799 });
4800 if let Err(errs) = carina_validate::validate("proof", &proof_primitive) {
4801 fail(&format!(
4802 "constructed Proof primitive does not validate against proof.schema.json:\n - {}",
4803 errs.join("\n - ")
4804 ));
4805 }
4806
4807 let mut metadata: BTreeMap<String, Value> = BTreeMap::new();
4809 metadata.insert(
4810 "carina_kind".to_string(),
4811 Value::String("proof_script".to_string()),
4812 );
4813 metadata.insert(
4814 "carina_proof_tool".to_string(),
4815 Value::String(tool.to_string()),
4816 );
4817 metadata.insert(
4818 "carina_proof_tool_version".to_string(),
4819 Value::String(tool_version.to_string()),
4820 );
4821 metadata.insert("carina_proof_id".to_string(), Value::String(vpf_id.clone()));
4822 metadata.insert(
4823 "carina_proof_target_finding".to_string(),
4824 Value::String(target_finding.to_string()),
4825 );
4826
4827 let media_type = match tool {
4828 "lean4" | "rocq" => Some("text/x-lean".to_string()),
4829 "coq" => Some("text/x-coq".to_string()),
4830 "isabelle" => Some("text/x-isabelle".to_string()),
4831 "agda" => Some("text/x-agda".to_string()),
4832 "metamath" => Some("text/x-metamath".to_string()),
4833 _ => None,
4834 };
4835
4836 let provenance = crate::bundle::Provenance {
4837 source_type: "code_repository".to_string(),
4838 doi: None,
4839 pmid: None,
4840 pmc: None,
4841 openalex_id: None,
4842 url: None,
4843 title: format!("Proof script for {target_finding} ({tool} {tool_version})"),
4844 authors: Vec::new(),
4845 year: None,
4846 journal: None,
4847 license: Some("Apache-2.0 OR MIT".to_string()),
4848 publisher: None,
4849 funders: Vec::new(),
4850 extraction: crate::bundle::Extraction::default(),
4851 review: None,
4852 citation_count: None,
4853 };
4854
4855 let artifact_id = crate::bundle::Artifact::content_address(
4856 "source_file",
4857 name,
4858 &format!("sha256:{script_hash_hex}"),
4859 None,
4860 Some(&script_path.display().to_string()),
4861 );
4862
4863 let artifact = crate::bundle::Artifact {
4864 id: artifact_id.clone(),
4865 kind: "source_file".to_string(),
4866 name: name.to_string(),
4867 content_hash: format!("sha256:{script_hash_hex}"),
4868 size_bytes: Some(script_bytes.len() as u64),
4869 media_type,
4870 storage_mode: "pointer".to_string(),
4871 locator: Some(script_path.display().to_string()),
4872 source_url: None,
4873 license: Some("Apache-2.0 OR MIT".to_string()),
4874 target_findings: vec![target_finding.to_string()],
4875 source_id: None,
4876 provenance,
4877 metadata,
4878 review_state: None,
4879 retracted: false,
4880 access_tier: crate::access_tier::AccessTier::default(),
4881 created: verified_at.clone(),
4882 };
4883
4884 let report = state::add_artifact(frontier, artifact, reviewer, reason)
4888 .unwrap_or_else(|e| fail_return(&e));
4889
4890 let payload = json!({
4892 "ok": true,
4893 "command": "proof-add",
4894 "frontier": frontier.display().to_string(),
4895 "target_finding": target_finding,
4896 "tool": tool,
4897 "tool_version": tool_version,
4898 "script_path": script_path.display().to_string(),
4899 "script_locator": script_locator,
4900 "size_bytes": script_bytes.len(),
4901 "vpf_id": vpf_id,
4902 "va_id": artifact_id,
4903 "applied_event_id": report.applied_event_id,
4904 "verified_at": verified_at,
4905 "reviewer": reviewer,
4906 });
4907
4908 if json_output {
4909 println!(
4910 "{}",
4911 serde_json::to_string_pretty(&payload).expect("failed to serialize proof-add response")
4912 );
4913 } else {
4914 println!(
4915 "{} proof artifact deposited for {target_finding}",
4916 style::ok("ok")
4917 );
4918 println!(" vpf_id: {vpf_id}");
4919 println!(" va_id: {artifact_id}");
4920 println!(" locator: {script_locator}");
4921 println!(" tool: {tool} {tool_version}");
4922 if let Some(eid) = &report.applied_event_id {
4923 println!(" event: {eid}");
4924 }
4925 }
4926}
4927
4928fn cmd_consensus(
4931 frontier: &Path,
4932 target: &str,
4933 weighting_str: &str,
4934 causal_claim: Option<&str>,
4935 causal_grade_min: Option<&str>,
4936 json: bool,
4937) {
4938 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4939
4940 if !target.starts_with("vf_") {
4941 fail(&format!("target `{target}` is not a vf_ finding id"));
4942 }
4943 let scheme =
4944 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4945
4946 let parsed_claim = match causal_claim {
4947 None => None,
4948 Some("correlation") => Some(CausalClaim::Correlation),
4949 Some("mediation") => Some(CausalClaim::Mediation),
4950 Some("intervention") => Some(CausalClaim::Intervention),
4951 Some(other) => fail_return(&format!(
4952 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4953 )),
4954 };
4955 let parsed_grade = match causal_grade_min {
4956 None => None,
4957 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4958 Some("observational") => Some(CausalEvidenceGrade::Observational),
4959 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4960 Some("rct") => Some(CausalEvidenceGrade::Rct),
4961 Some(other) => fail_return(&format!(
4962 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4963 )),
4964 };
4965 let filter = crate::aggregate::AggregateFilter {
4966 causal_claim: parsed_claim,
4967 causal_grade_min: parsed_grade,
4968 };
4969 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4970
4971 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4972 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4973
4974 if json {
4975 println!(
4976 "{}",
4977 serde_json::to_string_pretty(&result).expect("serialize consensus")
4978 );
4979 return;
4980 }
4981
4982 println!();
4983 println!(
4984 " {}",
4985 format!(
4986 "VELA · CONSENSUS · {} ({})",
4987 result.target, result.weighting
4988 )
4989 .to_uppercase()
4990 .dimmed()
4991 );
4992 println!(" {}", style::tick_row(60));
4993 println!(
4994 " target: {}",
4995 truncate(&result.target_assertion, 80)
4996 );
4997 println!(" similar findings: {}", result.n_findings);
4998 println!(
4999 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
5000 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
5001 );
5002 println!();
5003 println!(" constituents (sorted by weight):");
5004 let mut sorted = result.constituents.clone();
5005 sorted.sort_by(|a, b| {
5006 b.weight
5007 .partial_cmp(&a.weight)
5008 .unwrap_or(std::cmp::Ordering::Equal)
5009 });
5010 for c in sorted.iter().take(10) {
5011 let repls = if c.n_replications > 0 {
5012 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
5013 } else {
5014 String::new()
5015 };
5016 println!(
5017 " · w={:.2} raw={:.2} adj={:.2}{}",
5018 c.weight, c.raw_score, c.adjusted_score, repls
5019 );
5020 println!(" {}", truncate(&c.assertion_text, 88));
5021 }
5022 if result.constituents.len() > 10 {
5023 println!(" ... ({} more)", result.constituents.len() - 10);
5024 }
5025}
5026
5027fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
5033 let trimmed = s.trim();
5034 if trimmed.eq_ignore_ascii_case("affirmed") {
5035 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
5036 }
5037 if trimmed.eq_ignore_ascii_case("falsified") {
5038 return Ok(crate::bundle::ExpectedOutcome::Falsified);
5039 }
5040 if let Some(rest) = trimmed.strip_prefix("cat:") {
5041 return Ok(crate::bundle::ExpectedOutcome::Categorical {
5042 value: rest.to_string(),
5043 });
5044 }
5045 if let Some(rest) = trimmed.strip_prefix("quant:") {
5046 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
5047 let (val_s, tol_s) = vt
5048 .split_once('±')
5049 .or_else(|| vt.split_once("+/-"))
5050 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
5051 let value: f64 = val_s
5052 .parse()
5053 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
5054 let tolerance: f64 = tol_s
5055 .parse()
5056 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
5057 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
5058 value,
5059 tolerance,
5060 units: units.to_string(),
5061 });
5062 }
5063 Err(format!(
5064 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
5065 ))
5066}
5067
5068#[allow(clippy::too_many_arguments)]
5070fn cmd_predict(
5071 frontier: &Path,
5072 by: &str,
5073 claim: &str,
5074 criterion: &str,
5075 resolves_by: Option<&str>,
5076 confidence: f64,
5077 target_csv: &str,
5078 outcome: &str,
5079 conditions_text: &str,
5080 json: bool,
5081) {
5082 if !(0.0..=1.0).contains(&confidence) {
5083 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
5084 }
5085 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
5086
5087 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5088
5089 let targets: Vec<String> = target_csv
5090 .split(',')
5091 .map(|s| s.trim().to_string())
5092 .filter(|s| !s.is_empty())
5093 .collect();
5094 for t in &targets {
5095 if !t.starts_with("vf_") {
5096 fail(&format!("target `{t}` is not a vf_ id"));
5097 }
5098 if !project.findings.iter().any(|f| f.id == *t) {
5099 fail(&format!("target `{t}` not present in frontier"));
5100 }
5101 }
5102
5103 let lower = conditions_text.to_lowercase();
5104 let conditions = crate::bundle::Conditions {
5105 text: conditions_text.to_string(),
5106 species_verified: Vec::new(),
5107 species_unverified: Vec::new(),
5108 in_vitro: lower.contains("in vitro"),
5109 in_vivo: lower.contains("in vivo"),
5110 human_data: lower.contains("human") || lower.contains("clinical"),
5111 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
5112 concentration_range: None,
5113 duration: None,
5114 age_group: None,
5115 cell_type: None,
5116 };
5117
5118 let prediction = crate::bundle::Prediction::new(
5119 claim.to_string(),
5120 targets,
5121 None,
5122 resolves_by.map(|s| s.to_string()),
5123 criterion.to_string(),
5124 expected,
5125 by.to_string(),
5126 confidence,
5127 conditions,
5128 );
5129
5130 if project.predictions.iter().any(|p| p.id == prediction.id) {
5131 if json {
5132 println!(
5133 "{}",
5134 serde_json::to_string_pretty(&json!({
5135 "ok": false,
5136 "command": "predict",
5137 "reason": "prediction_already_exists",
5138 "id": prediction.id,
5139 }))
5140 .expect("serialize")
5141 );
5142 } else {
5143 println!(
5144 "{} prediction {} already exists in {}; skipping.",
5145 style::warn("predict"),
5146 prediction.id,
5147 frontier.display()
5148 );
5149 }
5150 return;
5151 }
5152
5153 let new_id = prediction.id.clone();
5154 project.predictions.push(prediction);
5155 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5156
5157 if json {
5158 println!(
5159 "{}",
5160 serde_json::to_string_pretty(&json!({
5161 "ok": true,
5162 "command": "predict",
5163 "id": new_id,
5164 "made_by": by,
5165 "confidence": confidence,
5166 "frontier": frontier.display().to_string(),
5167 }))
5168 .expect("serialize predict result")
5169 );
5170 } else {
5171 println!();
5172 println!(
5173 " {}",
5174 format!("VELA · PREDICT · {}", new_id)
5175 .to_uppercase()
5176 .dimmed()
5177 );
5178 println!(" {}", style::tick_row(60));
5179 println!(" by: {by}");
5180 println!(" confidence: {confidence:.3}");
5181 if let Some(d) = resolves_by {
5182 println!(" resolves by: {d}");
5183 }
5184 println!(" outcome: {outcome}");
5185 println!(" claim: {}", truncate(claim, 88));
5186 println!();
5187 println!(
5188 " {} prediction recorded in {}",
5189 style::ok("ok"),
5190 frontier.display()
5191 );
5192 }
5193}
5194
5195#[allow(clippy::too_many_arguments)]
5197fn cmd_resolve(
5198 frontier: &Path,
5199 prediction_id: &str,
5200 actual_outcome: &str,
5201 matched: bool,
5202 by: &str,
5203 confidence: f64,
5204 source_title: &str,
5205 doi: Option<&str>,
5206 json: bool,
5207) {
5208 if !prediction_id.starts_with("vpred_") {
5209 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
5210 }
5211 if !(0.0..=1.0).contains(&confidence) {
5212 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
5213 }
5214 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5215 if !project.predictions.iter().any(|p| p.id == prediction_id) {
5216 fail(&format!(
5217 "prediction `{prediction_id}` not present in frontier"
5218 ));
5219 }
5220
5221 let evidence = crate::bundle::Evidence {
5222 evidence_type: "experimental".to_string(),
5223 model_system: String::new(),
5224 species: None,
5225 method: "prediction_resolution".to_string(),
5226 sample_size: None,
5227 effect_size: None,
5228 p_value: None,
5229 replicated: false,
5230 replication_count: None,
5231 evidence_spans: if source_title.is_empty() {
5232 Vec::new()
5233 } else {
5234 vec![serde_json::json!({"text": source_title})]
5235 },
5236 };
5237
5238 let _ = doi; let resolution = crate::bundle::Resolution::new(
5245 prediction_id.to_string(),
5246 actual_outcome.to_string(),
5247 matched,
5248 by.to_string(),
5249 evidence,
5250 confidence,
5251 );
5252
5253 if project.resolutions.iter().any(|r| r.id == resolution.id) {
5254 if json {
5255 println!(
5256 "{}",
5257 serde_json::to_string_pretty(&json!({
5258 "ok": false,
5259 "command": "resolve",
5260 "reason": "resolution_already_exists",
5261 "id": resolution.id,
5262 }))
5263 .expect("serialize")
5264 );
5265 } else {
5266 println!(
5267 "{} resolution {} already exists in {}; skipping.",
5268 style::warn("resolve"),
5269 resolution.id,
5270 frontier.display()
5271 );
5272 }
5273 return;
5274 }
5275
5276 let new_id = resolution.id.clone();
5277 project.resolutions.push(resolution);
5278 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5279
5280 if json {
5281 println!(
5282 "{}",
5283 serde_json::to_string_pretty(&json!({
5284 "ok": true,
5285 "command": "resolve",
5286 "id": new_id,
5287 "prediction": prediction_id,
5288 "matched": matched,
5289 "frontier": frontier.display().to_string(),
5290 }))
5291 .expect("serialize resolve result")
5292 );
5293 } else {
5294 println!();
5295 println!(
5296 " {}",
5297 format!("VELA · RESOLVE · {}", new_id)
5298 .to_uppercase()
5299 .dimmed()
5300 );
5301 println!(" {}", style::tick_row(60));
5302 println!(" prediction: {prediction_id}");
5303 println!(
5304 " matched: {}",
5305 if matched {
5306 style::ok("yes")
5307 } else {
5308 style::lost("no")
5309 }
5310 );
5311 println!(" by: {by}");
5312 println!(" outcome: {}", truncate(actual_outcome, 80));
5313 println!();
5314 println!(
5315 " {} resolution recorded in {}",
5316 style::ok("ok"),
5317 frontier.display()
5318 );
5319 }
5320}
5321
5322fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
5324 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5325
5326 let resolved_ids: std::collections::HashSet<&str> = project
5327 .resolutions
5328 .iter()
5329 .map(|r| r.prediction_id.as_str())
5330 .collect();
5331
5332 let mut filtered: Vec<&crate::bundle::Prediction> = project
5333 .predictions
5334 .iter()
5335 .filter(|p| by.is_none_or(|b| p.made_by == b))
5336 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
5337 .collect();
5338 filtered.sort_by(|a, b| {
5339 a.resolves_by
5340 .as_deref()
5341 .unwrap_or("9999")
5342 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
5343 });
5344
5345 if json {
5346 let payload: Vec<serde_json::Value> = filtered
5347 .iter()
5348 .map(|p| {
5349 json!({
5350 "id": p.id,
5351 "claim_text": p.claim_text,
5352 "made_by": p.made_by,
5353 "confidence": p.confidence,
5354 "predicted_at": p.predicted_at,
5355 "resolves_by": p.resolves_by,
5356 "expected_outcome": p.expected_outcome,
5357 "resolved": resolved_ids.contains(p.id.as_str()),
5358 })
5359 })
5360 .collect();
5361 println!(
5362 "{}",
5363 serde_json::to_string_pretty(&json!({
5364 "ok": true,
5365 "command": "predictions",
5366 "frontier": frontier.display().to_string(),
5367 "count": payload.len(),
5368 "predictions": payload,
5369 }))
5370 .expect("serialize predictions")
5371 );
5372 return;
5373 }
5374
5375 println!();
5376 println!(
5377 " {}",
5378 format!("VELA · PREDICTIONS · {}", frontier.display())
5379 .to_uppercase()
5380 .dimmed()
5381 );
5382 println!(" {}", style::tick_row(60));
5383 if filtered.is_empty() {
5384 println!(" (no predictions matching filters)");
5385 return;
5386 }
5387 for p in &filtered {
5388 let resolved = resolved_ids.contains(p.id.as_str());
5389 let chip = if resolved {
5390 style::ok("resolved")
5391 } else {
5392 style::warn("open")
5393 };
5394 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
5395 println!(
5396 " · {} {} by {} → {}",
5397 p.id.dimmed(),
5398 chip,
5399 p.made_by,
5400 deadline,
5401 );
5402 println!(" claim: {}", truncate(&p.claim_text, 90));
5403 println!(" confidence: {:.2}", p.confidence);
5404 }
5405}
5406
5407fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
5412 use chrono::DateTime;
5413
5414 let now_dt = match now_override {
5415 Some(s) => DateTime::parse_from_rfc3339(s)
5416 .map(|dt| dt.with_timezone(&chrono::Utc))
5417 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
5418 None => chrono::Utc::now(),
5419 };
5420
5421 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5422 if dry_run {
5423 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5425 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
5426 if json {
5427 println!(
5428 "{}",
5429 serde_json::to_string_pretty(&json!({
5430 "ok": true,
5431 "command": "predictions.expire",
5432 "dry_run": true,
5433 "report": report,
5434 }))
5435 .expect("serialize predictions.expire (dry-run)")
5436 );
5437 } else {
5438 println!(
5439 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
5440 style::ok("ok"),
5441 report.now,
5442 report.newly_expired.len(),
5443 report.already_expired.len(),
5444 report.already_resolved.len(),
5445 report.still_open.len(),
5446 );
5447 for id in &report.newly_expired {
5448 println!(" · {id}");
5449 }
5450 }
5451 return;
5452 }
5453
5454 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
5455 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5456
5457 if json {
5458 println!(
5459 "{}",
5460 serde_json::to_string_pretty(&json!({
5461 "ok": true,
5462 "command": "predictions.expire",
5463 "report": report,
5464 }))
5465 .expect("serialize predictions.expire")
5466 );
5467 } else {
5468 println!(
5469 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
5470 style::ok("expired"),
5471 report.now,
5472 report.newly_expired.len(),
5473 report.already_expired.len(),
5474 report.already_resolved.len(),
5475 report.still_open.len(),
5476 );
5477 for id in &report.newly_expired {
5478 println!(" · {id}");
5479 }
5480 }
5481}
5482
5483fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
5484 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5485 let records = match actor {
5486 Some(a) => {
5487 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
5488 .map(|r| vec![r])
5489 .unwrap_or_default()
5490 }
5491 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
5492 };
5493
5494 if json {
5495 println!(
5496 "{}",
5497 serde_json::to_string_pretty(&json!({
5498 "ok": true,
5499 "command": "calibration",
5500 "frontier": frontier.display().to_string(),
5501 "filter_actor": actor,
5502 "records": records,
5503 }))
5504 .expect("serialize calibration")
5505 );
5506 return;
5507 }
5508
5509 println!();
5510 println!(
5511 " {}",
5512 format!("VELA · CALIBRATION · {}", frontier.display())
5513 .to_uppercase()
5514 .dimmed()
5515 );
5516 println!(" {}", style::tick_row(60));
5517 if records.is_empty() {
5518 println!(" (no calibration records)");
5519 return;
5520 }
5521 for r in &records {
5522 println!(" · {}", r.actor);
5523 println!(
5524 " predictions: {} resolved: {} hits: {}",
5525 r.n_predictions, r.n_resolved, r.n_hit
5526 );
5527 match r.hit_rate {
5528 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
5529 None => println!(" hit rate: n/a"),
5530 }
5531 match r.brier_score {
5532 Some(b) => println!(
5533 " brier: {:.4} (lower is better; 0.25 = chance)",
5534 b
5535 ),
5536 None => println!(" brier: n/a"),
5537 }
5538 match r.log_score {
5539 Some(l) => println!(
5540 " log score: {:.4} (higher is better; 0 = perfect)",
5541 l
5542 ),
5543 None => println!(" log score: n/a"),
5544 }
5545 }
5546}
5547
5548#[allow(clippy::too_many_arguments)]
5550fn cmd_dataset_add(
5551 frontier: &Path,
5552 name: &str,
5553 version: Option<&str>,
5554 content_hash: &str,
5555 url: Option<&str>,
5556 license: Option<&str>,
5557 source_title: &str,
5558 doi: Option<&str>,
5559 row_count: Option<u64>,
5560 json: bool,
5561) {
5562 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5563
5564 let provenance = crate::bundle::Provenance {
5565 source_type: "data_release".to_string(),
5566 doi: doi.map(|s| s.to_string()),
5567 pmid: None,
5568 pmc: None,
5569 openalex_id: None,
5570 url: url.map(|s| s.to_string()),
5571 title: source_title.to_string(),
5572 authors: Vec::new(),
5573 year: None,
5574 journal: None,
5575 license: license.map(|s| s.to_string()),
5576 publisher: None,
5577 funders: Vec::new(),
5578 extraction: crate::bundle::Extraction {
5579 method: "manual_curation".to_string(),
5580 model: None,
5581 model_version: None,
5582 extracted_at: chrono::Utc::now().to_rfc3339(),
5583 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5584 },
5585 review: None,
5586 citation_count: None,
5587 };
5588
5589 let mut dataset = crate::bundle::Dataset::new(
5590 name.to_string(),
5591 version.map(|s| s.to_string()),
5592 content_hash.to_string(),
5593 url.map(|s| s.to_string()),
5594 license.map(|s| s.to_string()),
5595 provenance,
5596 );
5597 dataset.row_count = row_count;
5598
5599 if project.datasets.iter().any(|d| d.id == dataset.id) {
5600 if json {
5601 println!(
5602 "{}",
5603 serde_json::to_string_pretty(&json!({
5604 "ok": false,
5605 "command": "dataset.add",
5606 "reason": "dataset_already_exists",
5607 "id": dataset.id,
5608 }))
5609 .expect("serialize")
5610 );
5611 } else {
5612 println!(
5613 "{} dataset {} already exists in {}; skipping.",
5614 style::warn("dataset"),
5615 dataset.id,
5616 frontier.display()
5617 );
5618 }
5619 return;
5620 }
5621
5622 let new_id = dataset.id.clone();
5623 project.datasets.push(dataset);
5624 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5625
5626 if json {
5627 println!(
5628 "{}",
5629 serde_json::to_string_pretty(&json!({
5630 "ok": true,
5631 "command": "dataset.add",
5632 "id": new_id,
5633 "name": name,
5634 "version": version,
5635 "frontier": frontier.display().to_string(),
5636 }))
5637 .expect("failed to serialize dataset.add result")
5638 );
5639 } else {
5640 println!();
5641 println!(
5642 " {}",
5643 format!("VELA · DATASET · {}", new_id)
5644 .to_uppercase()
5645 .dimmed()
5646 );
5647 println!(" {}", style::tick_row(60));
5648 println!(" name: {name}");
5649 if let Some(v) = version {
5650 println!(" version: {v}");
5651 }
5652 println!(" content_hash: {content_hash}");
5653 if let Some(u) = url {
5654 println!(" url: {u}");
5655 }
5656 println!(" source: {source_title}");
5657 println!();
5658 println!(
5659 " {} dataset recorded in {}",
5660 style::ok("ok"),
5661 frontier.display()
5662 );
5663 }
5664}
5665
5666#[allow(clippy::too_many_arguments)]
5672fn cmd_negative_result_add(
5673 frontier: &Path,
5674 kind: &str,
5675 deposited_by: &str,
5676 reason: &str,
5677 conditions_text: &str,
5678 notes: &str,
5679 targets: Vec<String>,
5680 endpoint: Option<&str>,
5681 intervention: Option<&str>,
5682 comparator: Option<&str>,
5683 population: Option<&str>,
5684 n_enrolled: Option<u32>,
5685 power: Option<f64>,
5686 ci_lower: Option<f64>,
5687 ci_upper: Option<f64>,
5688 effect_size_threshold: Option<f64>,
5689 registry_id: Option<&str>,
5690 reagent: Option<&str>,
5691 observation: Option<&str>,
5692 attempts: Option<u32>,
5693 source_title: &str,
5694 doi: Option<&str>,
5695 url: Option<&str>,
5696 year: Option<i32>,
5697 json: bool,
5698) {
5699 let nr_kind = match kind {
5700 "registered_trial" => {
5701 let endpoint =
5702 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
5703 let intervention = intervention
5704 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
5705 let comparator = comparator
5706 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
5707 let population = population
5708 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
5709 let n_enrolled = n_enrolled
5710 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
5711 let power =
5712 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
5713 let ci_lower =
5714 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
5715 let ci_upper =
5716 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
5717 crate::bundle::NegativeResultKind::RegisteredTrial {
5718 endpoint: endpoint.to_string(),
5719 intervention: intervention.to_string(),
5720 comparator: comparator.to_string(),
5721 population: population.to_string(),
5722 n_enrolled,
5723 power,
5724 effect_size_ci: (ci_lower, ci_upper),
5725 effect_size_threshold,
5726 registry_id: registry_id.map(|s| s.to_string()),
5727 }
5728 }
5729 "exploratory" => {
5730 let reagent =
5731 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
5732 let observation = observation
5733 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
5734 let attempts =
5735 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
5736 crate::bundle::NegativeResultKind::Exploratory {
5737 reagent: reagent.to_string(),
5738 observation: observation.to_string(),
5739 attempts,
5740 }
5741 }
5742 other => fail_return(&format!(
5743 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
5744 )),
5745 };
5746
5747 let conditions = crate::bundle::Conditions {
5748 text: conditions_text.to_string(),
5749 species_verified: Vec::new(),
5750 species_unverified: Vec::new(),
5751 in_vitro: false,
5752 in_vivo: false,
5753 human_data: false,
5754 clinical_trial: matches!(kind, "registered_trial"),
5755 concentration_range: None,
5756 duration: None,
5757 age_group: None,
5758 cell_type: None,
5759 };
5760
5761 let provenance = crate::bundle::Provenance {
5762 source_type: if matches!(kind, "registered_trial") {
5763 "clinical_trial".to_string()
5764 } else {
5765 "lab_notebook".to_string()
5766 },
5767 doi: doi.map(|s| s.to_string()),
5768 pmid: None,
5769 pmc: None,
5770 openalex_id: None,
5771 url: url.map(|s| s.to_string()),
5772 title: source_title.to_string(),
5773 authors: Vec::new(),
5774 year,
5775 journal: None,
5776 license: None,
5777 publisher: None,
5778 funders: Vec::new(),
5779 extraction: crate::bundle::Extraction {
5780 method: "manual_curation".to_string(),
5781 model: None,
5782 model_version: None,
5783 extracted_at: chrono::Utc::now().to_rfc3339(),
5784 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5785 },
5786 review: None,
5787 citation_count: None,
5788 };
5789
5790 let report = state::add_negative_result(
5791 frontier,
5792 nr_kind,
5793 targets,
5794 deposited_by,
5795 conditions,
5796 provenance,
5797 notes,
5798 reason,
5799 )
5800 .unwrap_or_else(|e| fail_return(&e));
5801
5802 if json {
5803 println!(
5804 "{}",
5805 serde_json::to_string_pretty(&report).expect("serialize report")
5806 );
5807 } else {
5808 println!();
5809 println!(
5810 " {}",
5811 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5812 .to_uppercase()
5813 .dimmed()
5814 );
5815 println!(" {}", style::tick_row(60));
5816 println!(" kind: {kind}");
5817 println!(" deposited_by: {deposited_by}");
5818 if let Some(ev) = &report.applied_event_id {
5819 println!(" event: {ev}");
5820 }
5821 println!(
5822 " {} negative_result deposited in {}",
5823 style::ok("ok"),
5824 frontier.display()
5825 );
5826 }
5827}
5828
5829fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5832 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5833 let filtered: Vec<&crate::bundle::NegativeResult> = project
5834 .negative_results
5835 .iter()
5836 .filter(|nr| {
5837 target
5838 .map(|t| nr.target_findings.iter().any(|f| f == t))
5839 .unwrap_or(true)
5840 })
5841 .collect();
5842
5843 if json {
5844 println!(
5845 "{}",
5846 serde_json::to_string_pretty(&json!({
5847 "ok": true,
5848 "command": "negative_results",
5849 "frontier": frontier.display().to_string(),
5850 "count": filtered.len(),
5851 "negative_results": filtered,
5852 }))
5853 .expect("serialize negative_results")
5854 );
5855 return;
5856 }
5857
5858 if filtered.is_empty() {
5859 println!(" no negative_results in {}", frontier.display());
5860 return;
5861 }
5862
5863 println!();
5864 println!(
5865 " {} ({})",
5866 "VELA · NEGATIVE RESULTS".dimmed(),
5867 filtered.len()
5868 );
5869 println!(" {}", style::tick_row(60));
5870 for nr in &filtered {
5871 let kind_label = match &nr.kind {
5872 crate::bundle::NegativeResultKind::RegisteredTrial {
5873 endpoint, power, ..
5874 } => format!("trial · {endpoint} · power {power:.2}"),
5875 crate::bundle::NegativeResultKind::Exploratory {
5876 reagent, attempts, ..
5877 } => format!("exploratory · {reagent} · {attempts} attempts"),
5878 };
5879 let retracted = if nr.retracted { " [retracted]" } else { "" };
5880 let review = nr
5881 .review_state
5882 .as_ref()
5883 .map(|s| format!(" [{s:?}]"))
5884 .unwrap_or_default();
5885 println!(" {}{}{}", nr.id, retracted, review);
5886 println!(" {kind_label}");
5887 if !nr.target_findings.is_empty() {
5888 println!(" targets: {}", nr.target_findings.join(", "));
5889 }
5890 }
5891 println!();
5892}
5893
5894#[allow(clippy::too_many_arguments)]
5896fn cmd_tier_set(
5897 frontier: &Path,
5898 object_type: &str,
5899 object_id: &str,
5900 tier: &str,
5901 actor: &str,
5902 reason: &str,
5903 json: bool,
5904) {
5905 let parsed_tier =
5906 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5907 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5908 .unwrap_or_else(|e| fail_return(&e));
5909
5910 if json {
5911 println!(
5912 "{}",
5913 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5914 );
5915 } else {
5916 println!();
5917 println!(
5918 " {}",
5919 format!("VELA · TIER · {}", object_id)
5920 .to_uppercase()
5921 .dimmed()
5922 );
5923 println!(" {}", style::tick_row(60));
5924 println!(" object_type: {object_type}");
5925 println!(" new_tier: {}", parsed_tier.canonical());
5926 println!(" actor: {actor}");
5927 if let Some(ev) = &report.applied_event_id {
5928 println!(" event: {ev}");
5929 }
5930 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5931 }
5932}
5933
5934#[allow(clippy::too_many_arguments)]
5936fn cmd_trajectory_create(
5937 frontier: &Path,
5938 deposited_by: &str,
5939 reason: &str,
5940 targets: Vec<String>,
5941 notes: &str,
5942 json: bool,
5943) {
5944 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5945 .unwrap_or_else(|e| fail_return(&e));
5946
5947 if json {
5948 println!(
5949 "{}",
5950 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5951 );
5952 } else {
5953 println!();
5954 println!(
5955 " {}",
5956 format!("VELA · TRAJECTORY · {}", report.finding_id)
5957 .to_uppercase()
5958 .dimmed()
5959 );
5960 println!(" {}", style::tick_row(60));
5961 println!(" deposited_by: {deposited_by}");
5962 if let Some(ev) = &report.applied_event_id {
5963 println!(" event: {ev}");
5964 }
5965 println!(
5966 " {} trajectory opened in {}",
5967 style::ok("ok"),
5968 frontier.display()
5969 );
5970 }
5971}
5972
5973#[allow(clippy::too_many_arguments)]
5975fn cmd_trajectory_step(
5976 frontier: &Path,
5977 trajectory_id: &str,
5978 kind: &str,
5979 description: &str,
5980 actor: &str,
5981 reason: &str,
5982 references: Vec<String>,
5983 json: bool,
5984) {
5985 let parsed_kind = match kind {
5986 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5987 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5988 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5989 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5990 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5991 other => fail_return(&format!(
5992 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5993 )),
5994 };
5995 let report = state::append_trajectory_step(
5996 frontier,
5997 trajectory_id,
5998 parsed_kind,
5999 description,
6000 actor,
6001 references,
6002 reason,
6003 )
6004 .unwrap_or_else(|e| fail_return(&e));
6005
6006 if json {
6007 println!(
6008 "{}",
6009 serde_json::to_string_pretty(&report).expect("serialize step report")
6010 );
6011 } else {
6012 println!();
6013 println!(
6014 " {}",
6015 format!("VELA · STEP · {}", report.finding_id)
6016 .to_uppercase()
6017 .dimmed()
6018 );
6019 println!(" {}", style::tick_row(60));
6020 println!(" trajectory: {trajectory_id}");
6021 println!(" kind: {kind}");
6022 println!(" actor: {actor}");
6023 println!(
6024 " {} step appended in {}",
6025 style::ok("ok"),
6026 frontier.display()
6027 );
6028 }
6029}
6030
6031fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
6033 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6034 let filtered: Vec<&crate::bundle::Trajectory> = project
6035 .trajectories
6036 .iter()
6037 .filter(|t| {
6038 target
6039 .map(|tg| t.target_findings.iter().any(|f| f == tg))
6040 .unwrap_or(true)
6041 })
6042 .collect();
6043
6044 if json {
6045 println!(
6046 "{}",
6047 serde_json::to_string_pretty(&json!({
6048 "ok": true,
6049 "command": "trajectories",
6050 "frontier": frontier.display().to_string(),
6051 "count": filtered.len(),
6052 "trajectories": filtered,
6053 }))
6054 .expect("serialize trajectories")
6055 );
6056 return;
6057 }
6058
6059 if filtered.is_empty() {
6060 println!(" no trajectories in {}", frontier.display());
6061 return;
6062 }
6063
6064 println!();
6065 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
6066 println!(" {}", style::tick_row(60));
6067 for t in &filtered {
6068 let retracted = if t.retracted { " [retracted]" } else { "" };
6069 let review = t
6070 .review_state
6071 .as_ref()
6072 .map(|s| format!(" [{s:?}]"))
6073 .unwrap_or_default();
6074 println!(" {}{}{}", t.id, retracted, review);
6075 println!(
6076 " {} step(s){}",
6077 t.steps.len(),
6078 if t.target_findings.is_empty() {
6079 String::new()
6080 } else {
6081 format!(" · targets: {}", t.target_findings.join(", "))
6082 }
6083 );
6084 for step in &t.steps {
6085 let label = match step.kind {
6086 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
6087 crate::bundle::TrajectoryStepKind::Tried => "tried",
6088 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
6089 crate::bundle::TrajectoryStepKind::Observed => "observed",
6090 crate::bundle::TrajectoryStepKind::Refined => "refined",
6091 };
6092 let preview: String = step.description.chars().take(80).collect();
6093 println!(" [{label}] {preview}");
6094 }
6095 }
6096 println!();
6097}
6098
6099fn cmd_datasets(frontier: &Path, json: bool) {
6101 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6102 if json {
6103 println!(
6104 "{}",
6105 serde_json::to_string_pretty(&json!({
6106 "ok": true,
6107 "command": "datasets",
6108 "frontier": frontier.display().to_string(),
6109 "count": project.datasets.len(),
6110 "datasets": project.datasets,
6111 }))
6112 .expect("serialize datasets")
6113 );
6114 return;
6115 }
6116 println!();
6117 println!(
6118 " {}",
6119 format!("VELA · DATASETS · {}", frontier.display())
6120 .to_uppercase()
6121 .dimmed()
6122 );
6123 println!(" {}", style::tick_row(60));
6124 if project.datasets.is_empty() {
6125 println!(" (no datasets registered)");
6126 return;
6127 }
6128 for ds in &project.datasets {
6129 let v = ds
6130 .version
6131 .as_deref()
6132 .map(|s| format!("@{s}"))
6133 .unwrap_or_default();
6134 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
6135 if let Some(u) = &ds.url {
6136 println!(" url: {}", truncate(u, 80));
6137 }
6138 println!(" hash: {}", truncate(&ds.content_hash, 80));
6139 }
6140}
6141
6142#[allow(clippy::too_many_arguments)]
6144fn cmd_code_add(
6145 frontier: &Path,
6146 language: &str,
6147 repo_url: Option<&str>,
6148 commit: Option<&str>,
6149 path: &str,
6150 content_hash: &str,
6151 line_start: Option<u32>,
6152 line_end: Option<u32>,
6153 entry_point: Option<&str>,
6154 json: bool,
6155) {
6156 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6157
6158 let line_range = match (line_start, line_end) {
6159 (Some(a), Some(b)) => Some((a, b)),
6160 (Some(a), None) => Some((a, a)),
6161 _ => None,
6162 };
6163
6164 let artifact = crate::bundle::CodeArtifact::new(
6165 language.to_string(),
6166 repo_url.map(|s| s.to_string()),
6167 commit.map(|s| s.to_string()),
6168 path.to_string(),
6169 line_range,
6170 content_hash.to_string(),
6171 entry_point.map(|s| s.to_string()),
6172 );
6173
6174 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
6175 if json {
6176 println!(
6177 "{}",
6178 serde_json::to_string_pretty(&json!({
6179 "ok": false,
6180 "command": "code.add",
6181 "reason": "artifact_already_exists",
6182 "id": artifact.id,
6183 }))
6184 .expect("serialize")
6185 );
6186 } else {
6187 println!(
6188 "{} code artifact {} already exists in {}; skipping.",
6189 style::warn("code"),
6190 artifact.id,
6191 frontier.display()
6192 );
6193 }
6194 return;
6195 }
6196
6197 let new_id = artifact.id.clone();
6198 project.code_artifacts.push(artifact);
6199 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6200
6201 if json {
6202 println!(
6203 "{}",
6204 serde_json::to_string_pretty(&json!({
6205 "ok": true,
6206 "command": "code.add",
6207 "id": new_id,
6208 "language": language,
6209 "path": path,
6210 "frontier": frontier.display().to_string(),
6211 }))
6212 .expect("failed to serialize code.add result")
6213 );
6214 } else {
6215 println!();
6216 println!(
6217 " {}",
6218 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
6219 );
6220 println!(" {}", style::tick_row(60));
6221 println!(" language: {language}");
6222 if let Some(r) = repo_url {
6223 println!(" repo: {r}");
6224 }
6225 if let Some(c) = commit {
6226 println!(" commit: {c}");
6227 }
6228 println!(" path: {path}");
6229 if let Some((a, b)) = line_range {
6230 println!(" lines: {a}-{b}");
6231 }
6232 println!(" content_hash: {content_hash}");
6233 println!();
6234 println!(
6235 " {} code artifact recorded in {}",
6236 style::ok("ok"),
6237 frontier.display()
6238 );
6239 }
6240}
6241
6242fn cmd_code_artifacts(frontier: &Path, json: bool) {
6244 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6245 if json {
6246 println!(
6247 "{}",
6248 serde_json::to_string_pretty(&json!({
6249 "ok": true,
6250 "command": "code-artifacts",
6251 "frontier": frontier.display().to_string(),
6252 "count": project.code_artifacts.len(),
6253 "code_artifacts": project.code_artifacts,
6254 }))
6255 .expect("serialize code-artifacts")
6256 );
6257 return;
6258 }
6259 println!();
6260 println!(
6261 " {}",
6262 format!("VELA · CODE · {}", frontier.display())
6263 .to_uppercase()
6264 .dimmed()
6265 );
6266 println!(" {}", style::tick_row(60));
6267 if project.code_artifacts.is_empty() {
6268 println!(" (no code artifacts registered)");
6269 return;
6270 }
6271 for c in &project.code_artifacts {
6272 let lr = c
6273 .line_range
6274 .map(|(a, b)| format!(":{a}-{b}"))
6275 .unwrap_or_default();
6276 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
6277 if let Some(r) = &c.repo_url {
6278 println!(" repo: {}", truncate(r, 80));
6279 }
6280 if let Some(g) = &c.git_commit {
6281 println!(" commit: {g}");
6282 }
6283 }
6284}
6285
6286fn sha256_for_bytes(bytes: &[u8]) -> String {
6287 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
6288}
6289
6290fn sha256_hex_part(content_hash: &str) -> &str {
6291 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
6292}
6293
6294fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
6295 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
6296 return None;
6297 };
6298 let hex = sha256_hex_part(content_hash);
6299 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
6300 let path = root.join(&rel);
6301 if let Some(parent) = path.parent() {
6302 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
6303 fail(&format!(
6304 "Failed to create artifact blob directory {}: {e}",
6305 parent.display()
6306 ))
6307 });
6308 }
6309 if !path.is_file() {
6310 std::fs::write(&path, bytes)
6311 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
6312 }
6313 Some(rel)
6314}
6315
6316fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
6317 let mut out = BTreeMap::new();
6318 for pair in pairs {
6319 let Some((key, value)) = pair.split_once('=') else {
6320 fail(&format!("--metadata must be key=value, got {pair:?}"));
6321 };
6322 let key = key.trim();
6323 if key.is_empty() {
6324 fail("--metadata key must be non-empty");
6325 }
6326 out.insert(key.to_string(), Value::String(value.trim().to_string()));
6327 }
6328 out
6329}
6330
6331fn artifact_source_type(kind: &str) -> &'static str {
6332 match kind {
6333 "clinical_trial_record" | "protocol" => "clinical_trial",
6334 "dataset" => "data_release",
6335 "model_output" => "model_output",
6336 "registry_record" => "database_record",
6337 "lab_file" => "lab_notebook",
6338 _ => "database_record",
6339 }
6340}
6341
6342fn artifact_provenance(
6343 kind: &str,
6344 title: &str,
6345 url: Option<&str>,
6346 doi: Option<&str>,
6347 license: Option<&str>,
6348) -> crate::bundle::Provenance {
6349 crate::bundle::Provenance {
6350 source_type: artifact_source_type(kind).to_string(),
6351 doi: doi.map(str::to_string),
6352 pmid: None,
6353 pmc: None,
6354 openalex_id: None,
6355 url: url.map(str::to_string),
6356 title: title.to_string(),
6357 authors: Vec::new(),
6358 year: None,
6359 journal: None,
6360 license: license.map(str::to_string),
6361 publisher: None,
6362 funders: Vec::new(),
6363 extraction: crate::bundle::Extraction {
6364 method: "artifact_deposit".to_string(),
6365 model: None,
6366 model_version: None,
6367 extracted_at: chrono::Utc::now().to_rfc3339(),
6368 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6369 },
6370 review: None,
6371 citation_count: None,
6372 }
6373}
6374
6375#[allow(clippy::too_many_arguments)]
6376fn cmd_artifact_add(
6377 frontier: &Path,
6378 kind: &str,
6379 name: &str,
6380 file: Option<&Path>,
6381 url: Option<&str>,
6382 content_hash: Option<&str>,
6383 media_type: Option<&str>,
6384 license: Option<&str>,
6385 source_title: Option<&str>,
6386 source_url: Option<&str>,
6387 doi: Option<&str>,
6388 target: Vec<String>,
6389 metadata: Vec<String>,
6390 access_tier: &str,
6391 deposited_by: &str,
6392 reason: &str,
6393 json_out: bool,
6394) {
6395 let tier =
6396 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
6397 let mut size_bytes = None;
6398 let mut storage_mode = "pointer".to_string();
6399 let mut locator = url.map(str::to_string);
6400 let mut computed_hash = content_hash.map(str::to_string);
6401
6402 if let Some(path) = file {
6403 let bytes = std::fs::read(path)
6404 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
6405 let actual_hash = sha256_for_bytes(&bytes);
6406 if let Some(expected) = content_hash {
6407 let expected_hex = sha256_hex_part(expected);
6408 let actual_hex = sha256_hex_part(&actual_hash);
6409 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
6410 fail(&format!(
6411 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
6412 ));
6413 }
6414 }
6415 size_bytes = Some(bytes.len() as u64);
6416 computed_hash = Some(actual_hash.clone());
6417 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
6418 storage_mode = "local_blob".to_string();
6419 locator = Some(rel);
6420 } else {
6421 storage_mode = "local_file".to_string();
6422 locator = Some(path.display().to_string());
6423 }
6424 }
6425
6426 let Some(content_hash) = computed_hash else {
6427 fail("Provide --content-hash unless --file is present.");
6428 };
6429 let content_hash_for_print = content_hash.clone();
6430 if file.is_none() && url.is_some() {
6431 storage_mode = "remote".to_string();
6432 }
6433
6434 let source_url_effective = source_url.or(url);
6435 let source_title = source_title.unwrap_or(name);
6436 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
6437 let metadata = parse_metadata_pairs(metadata);
6438 let artifact = crate::bundle::Artifact::new(
6439 kind.to_string(),
6440 name.to_string(),
6441 content_hash,
6442 size_bytes,
6443 media_type.map(str::to_string),
6444 storage_mode,
6445 locator,
6446 source_url_effective.map(str::to_string),
6447 license.map(str::to_string),
6448 target,
6449 provenance,
6450 metadata,
6451 tier,
6452 )
6453 .unwrap_or_else(|e| fail_return(&e));
6454
6455 let artifact_id = artifact.id.clone();
6456 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6457 .unwrap_or_else(|e| fail_return(&e));
6458
6459 if json_out {
6460 println!(
6461 "{}",
6462 serde_json::to_string_pretty(&json!({
6463 "ok": true,
6464 "command": "artifact.add",
6465 "id": artifact_id,
6466 "frontier": frontier.display().to_string(),
6467 "event": report.applied_event_id,
6468 }))
6469 .expect("serialize artifact.add")
6470 );
6471 } else {
6472 println!();
6473 println!(
6474 " {}",
6475 format!("VELA · ARTIFACT · {}", artifact_id)
6476 .to_uppercase()
6477 .dimmed()
6478 );
6479 println!(" {}", style::tick_row(60));
6480 println!(" kind: {kind}");
6481 println!(" name: {name}");
6482 println!(" hash: {content_hash_for_print}");
6483 println!(
6484 " {} artifact recorded in {}",
6485 style::ok("ok"),
6486 frontier.display()
6487 );
6488 }
6489}
6490
6491fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
6492 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6493 let filtered: Vec<&crate::bundle::Artifact> = project
6494 .artifacts
6495 .iter()
6496 .filter(|artifact| {
6497 target
6498 .map(|t| artifact.target_findings.iter().any(|f| f == t))
6499 .unwrap_or(true)
6500 })
6501 .collect();
6502
6503 if json_out {
6504 println!(
6505 "{}",
6506 serde_json::to_string_pretty(&json!({
6507 "ok": true,
6508 "command": "artifacts",
6509 "frontier": frontier.display().to_string(),
6510 "count": filtered.len(),
6511 "artifacts": filtered,
6512 }))
6513 .expect("serialize artifacts")
6514 );
6515 return;
6516 }
6517
6518 println!();
6519 println!(
6520 " {}",
6521 format!("VELA · ARTIFACTS · {}", frontier.display())
6522 .to_uppercase()
6523 .dimmed()
6524 );
6525 println!(" {}", style::tick_row(60));
6526 if filtered.is_empty() {
6527 println!(" (no artifacts registered)");
6528 return;
6529 }
6530 for artifact in filtered {
6531 println!(
6532 " · {} {} · {}",
6533 artifact.id.dimmed(),
6534 artifact.kind,
6535 artifact.name
6536 );
6537 if let Some(locator) = &artifact.locator {
6538 println!(" locator: {}", truncate(locator, 88));
6539 }
6540 if !artifact.target_findings.is_empty() {
6541 println!(" targets: {}", artifact.target_findings.join(", "));
6542 }
6543 }
6544}
6545
6546fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
6547 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6548 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
6549 if json_out {
6550 println!(
6551 "{}",
6552 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
6553 );
6554 if !audit.ok {
6555 std::process::exit(1);
6556 }
6557 return;
6558 }
6559
6560 println!();
6561 println!(
6562 " {}",
6563 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
6564 .to_uppercase()
6565 .dimmed()
6566 );
6567 println!(" {}", style::tick_row(60));
6568 println!(" artifacts: {}", audit.artifact_count);
6569 println!(" checked local blobs: {}", audit.checked_local_blobs);
6570 println!(" local blob bytes: {}", audit.local_blob_bytes);
6571 if !audit.by_kind.is_empty() {
6572 let kinds = audit
6573 .by_kind
6574 .iter()
6575 .map(|(kind, count)| format!("{kind}:{count}"))
6576 .collect::<Vec<_>>()
6577 .join(", ");
6578 println!(" kinds: {kinds}");
6579 }
6580 if audit.ok {
6581 println!(" {} artifact audit passed.", style::ok("ok"));
6582 return;
6583 }
6584 for issue in &audit.issues {
6585 println!(
6586 " {} {} {}: {}",
6587 style::lost("invalid"),
6588 issue.id,
6589 issue.field,
6590 issue.message
6591 );
6592 }
6593 std::process::exit(1);
6594}
6595
6596fn cmd_decision_brief(frontier: &Path, json_out: bool) {
6597 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6598 let report = decision::load_decision_brief(frontier, &project);
6599 if json_out {
6600 println!(
6601 "{}",
6602 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
6603 );
6604 if !report.ok {
6605 std::process::exit(1);
6606 }
6607 return;
6608 }
6609 println!();
6610 println!(
6611 " {}",
6612 format!("VELA · DECISION BRIEF · {}", project.project.name)
6613 .to_uppercase()
6614 .dimmed()
6615 );
6616 println!(" {}", style::tick_row(60));
6617 if !report.ok {
6618 print_projection_issues(&report.issues, report.error.as_deref());
6619 std::process::exit(1);
6620 }
6621 let brief = report
6622 .projection
6623 .as_ref()
6624 .expect("ok decision report carries projection");
6625 for question in &brief.questions {
6626 println!(" · {} · {}", question.id.dimmed(), question.title);
6627 println!(" answer: {}", wrap_line(&question.short_answer, 82));
6628 println!(" caveat: {}", wrap_line(&question.caveat, 82));
6629 println!(" support: {}", question.supporting_findings.join(", "));
6630 if !question.tension_findings.is_empty() {
6631 println!(" tensions: {}", question.tension_findings.join(", "));
6632 }
6633 if !question.gap_findings.is_empty() {
6634 println!(" gaps: {}", question.gap_findings.join(", "));
6635 }
6636 if !question.artifact_ids.is_empty() {
6637 println!(" artifacts: {}", question.artifact_ids.join(", "));
6638 }
6639 println!(
6640 " would change: {}",
6641 wrap_line(&question.what_would_change_this_answer, 82)
6642 );
6643 }
6644}
6645
6646fn cmd_trial_summary(frontier: &Path, json_out: bool) {
6647 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6648 let report = decision::load_trial_outcomes(frontier, &project);
6649 if json_out {
6650 println!(
6651 "{}",
6652 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
6653 );
6654 if !report.ok {
6655 std::process::exit(1);
6656 }
6657 return;
6658 }
6659 println!();
6660 println!(
6661 " {}",
6662 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
6663 .to_uppercase()
6664 .dimmed()
6665 );
6666 println!(" {}", style::tick_row(60));
6667 if !report.ok {
6668 print_projection_issues(&report.issues, report.error.as_deref());
6669 std::process::exit(1);
6670 }
6671 let outcomes = report
6672 .projection
6673 .as_ref()
6674 .expect("ok trial report carries projection");
6675 for row in &outcomes.rows {
6676 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
6677 println!(" population: {}", wrap_line(&row.population, 82));
6678 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
6679 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
6680 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
6681 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
6682 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
6683 if !row.finding_ids.is_empty() {
6684 println!(" findings: {}", row.finding_ids.join(", "));
6685 }
6686 if !row.artifact_ids.is_empty() {
6687 println!(" artifacts: {}", row.artifact_ids.join(", "));
6688 }
6689 }
6690}
6691
6692fn cmd_source_verification(frontier: &Path, json_out: bool) {
6693 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6694 let report = decision::load_source_verification(frontier, &project);
6695 if json_out {
6696 println!(
6697 "{}",
6698 serde_json::to_string_pretty(&report).expect("serialize source verification report")
6699 );
6700 if !report.ok {
6701 std::process::exit(1);
6702 }
6703 return;
6704 }
6705 println!();
6706 println!(
6707 " {}",
6708 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
6709 .to_uppercase()
6710 .dimmed()
6711 );
6712 println!(" {}", style::tick_row(60));
6713 if !report.ok {
6714 print_projection_issues(&report.issues, report.error.as_deref());
6715 std::process::exit(1);
6716 }
6717 let verification = report
6718 .projection
6719 .as_ref()
6720 .expect("ok source verification report carries projection");
6721 println!(" verified_at: {}", verification.verified_at);
6722 for source in &verification.sources {
6723 println!(" · {} · {}", source.id.dimmed(), source.title);
6724 println!(" agency: {}", source.agency);
6725 println!(" url: {}", truncate(&source.url, 88));
6726 println!(" status: {}", wrap_line(&source.current_status, 82));
6727 }
6728}
6729
6730fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
6731 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6732 let report = decision::load_source_ingest_plan(frontier, &project);
6733 if json_out {
6734 println!(
6735 "{}",
6736 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
6737 );
6738 if !report.ok {
6739 std::process::exit(1);
6740 }
6741 return;
6742 }
6743 println!();
6744 println!(
6745 " {}",
6746 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6747 .to_uppercase()
6748 .dimmed()
6749 );
6750 println!(" {}", style::tick_row(60));
6751 if !report.ok {
6752 print_projection_issues(&report.issues, report.error.as_deref());
6753 std::process::exit(1);
6754 }
6755 let plan = report
6756 .projection
6757 .as_ref()
6758 .expect("ok source ingest plan report carries projection");
6759 println!(" verified_at: {}", plan.verified_at);
6760 println!(" entries: {}", plan.entries.len());
6761 for entry in &plan.entries {
6762 println!(
6763 " · {} · {} · {} · {}",
6764 entry.id.dimmed(),
6765 entry.category,
6766 entry.priority,
6767 entry.ingest_status
6768 );
6769 println!(" name: {}", wrap_line(&entry.name, 82));
6770 println!(" locator: {}", truncate(&entry.locator, 88));
6771 println!(" use: {}", wrap_line(&entry.target_use, 82));
6772 if let Some(id) = &entry.current_frontier_artifact_id {
6773 println!(" artifact: {id}");
6774 }
6775 if !entry.target_findings.is_empty() {
6776 println!(" findings: {}", entry.target_findings.join(", "));
6777 }
6778 }
6779}
6780
6781fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6782 if let Some(error) = error {
6783 println!(" {} {error}", style::lost("unavailable"));
6784 }
6785 for issue in issues {
6786 println!(
6787 " {} {}: {}",
6788 style::lost("invalid"),
6789 issue.path,
6790 issue.message
6791 );
6792 }
6793}
6794
6795fn wrap_line(text: &str, max_chars: usize) -> String {
6796 if text.chars().count() <= max_chars {
6797 return text.to_string();
6798 }
6799 let mut out = String::new();
6800 let mut line_len = 0usize;
6801 for word in text.split_whitespace() {
6802 let word_len = word.chars().count();
6803 if line_len > 0 && line_len + 1 + word_len > max_chars {
6804 out.push('\n');
6805 out.push_str(" ");
6806 out.push_str(word);
6807 line_len = word_len;
6808 } else {
6809 if line_len > 0 {
6810 out.push(' ');
6811 line_len += 1;
6812 }
6813 out.push_str(word);
6814 line_len += word_len;
6815 }
6816 }
6817 out
6818}
6819
6820fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6821 study.pointer(pointer).and_then(Value::as_str)
6822}
6823
6824fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6825 study
6826 .pointer(pointer)
6827 .and_then(Value::as_array)
6828 .map(|items| {
6829 items
6830 .iter()
6831 .filter_map(Value::as_str)
6832 .map(str::to_string)
6833 .collect()
6834 })
6835 .unwrap_or_default()
6836}
6837
6838fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6839 study
6840 .pointer(pointer)
6841 .and_then(Value::as_array)
6842 .map(|items| {
6843 items
6844 .iter()
6845 .filter_map(|item| item.get(field).and_then(Value::as_str))
6846 .map(str::to_string)
6847 .collect()
6848 })
6849 .unwrap_or_default()
6850}
6851
6852fn insert_string_vec_metadata(
6853 metadata: &mut BTreeMap<String, Value>,
6854 key: &str,
6855 values: Vec<String>,
6856) {
6857 if values.is_empty() {
6858 return;
6859 }
6860 metadata.insert(
6861 key.to_string(),
6862 Value::Array(values.into_iter().map(Value::String).collect()),
6863 );
6864}
6865
6866async fn cmd_clinical_trial_import(
6867 frontier: &Path,
6868 nct_id: &str,
6869 input_json: Option<&Path>,
6870 target: Vec<String>,
6871 deposited_by: &str,
6872 reason: &str,
6873 license: &str,
6874 json_out: bool,
6875) {
6876 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6877 let raw = if let Some(path) = input_json {
6878 std::fs::read_to_string(path)
6879 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6880 } else {
6881 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6882 fail(&format!(
6883 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6884 ))
6885 });
6886 let response = response.error_for_status().unwrap_or_else(|e| {
6887 fail(&format!(
6888 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6889 ))
6890 });
6891 response.text().await.unwrap_or_else(|e| {
6892 fail(&format!(
6893 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6894 ))
6895 })
6896 };
6897 let study: Value = serde_json::from_str(&raw)
6898 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6899 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6900 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6901 let content_hash = sha256_for_bytes(&canonical_bytes);
6902 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6903 .unwrap_or_else(|| api_url.clone());
6904 let storage_mode = if locator.starts_with(".vela/") {
6905 "local_blob"
6906 } else {
6907 "remote"
6908 };
6909
6910 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6911 .unwrap_or(nct_id)
6912 .to_string();
6913 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6914 .or_else(|| {
6915 clinical_str(
6916 &study,
6917 "/protocolSection/identificationModule/officialTitle",
6918 )
6919 })
6920 .unwrap_or(nct_id);
6921 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6922 let mut metadata = BTreeMap::new();
6923 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6924 metadata.insert(
6925 "source_api".to_string(),
6926 Value::String("clinicaltrials.gov-v2".to_string()),
6927 );
6928 metadata.insert(
6929 "retrieved_at".to_string(),
6930 Value::String(chrono::Utc::now().to_rfc3339()),
6931 );
6932 for (key, pointer) in [
6933 (
6934 "overall_status",
6935 "/protocolSection/statusModule/overallStatus",
6936 ),
6937 (
6938 "start_date",
6939 "/protocolSection/statusModule/startDateStruct/date",
6940 ),
6941 (
6942 "completion_date",
6943 "/protocolSection/statusModule/completionDateStruct/date",
6944 ),
6945 ] {
6946 if let Some(value) = clinical_str(&study, pointer) {
6947 metadata.insert(key.to_string(), Value::String(value.to_string()));
6948 }
6949 }
6950 insert_string_vec_metadata(
6951 &mut metadata,
6952 "phases",
6953 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6954 );
6955 insert_string_vec_metadata(
6956 &mut metadata,
6957 "conditions",
6958 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6959 );
6960 insert_string_vec_metadata(
6961 &mut metadata,
6962 "interventions",
6963 clinical_named_array(
6964 &study,
6965 "/protocolSection/armsInterventionsModule/interventions",
6966 "name",
6967 ),
6968 );
6969 insert_string_vec_metadata(
6970 &mut metadata,
6971 "primary_outcomes",
6972 clinical_named_array(
6973 &study,
6974 "/protocolSection/outcomesModule/primaryOutcomes",
6975 "measure",
6976 ),
6977 );
6978 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6979 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6980 }
6981
6982 let provenance = artifact_provenance(
6983 "clinical_trial_record",
6984 title,
6985 Some(&public_url),
6986 None,
6987 Some(license),
6988 );
6989 let artifact = crate::bundle::Artifact::new(
6990 "clinical_trial_record",
6991 title.to_string(),
6992 content_hash,
6993 Some(canonical_bytes.len() as u64),
6994 Some("application/json".to_string()),
6995 storage_mode.to_string(),
6996 Some(locator),
6997 Some(public_url.clone()),
6998 Some(license.to_string()),
6999 target,
7000 provenance,
7001 metadata,
7002 crate::access_tier::AccessTier::Public,
7003 )
7004 .unwrap_or_else(|e| fail_return(&e));
7005 let artifact_id = artifact.id.clone();
7006 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
7007 .unwrap_or_else(|e| fail_return(&e));
7008
7009 if json_out {
7010 println!(
7011 "{}",
7012 serde_json::to_string_pretty(&json!({
7013 "ok": true,
7014 "command": "clinical-trial-import",
7015 "nct_id": parsed_nct,
7016 "id": artifact_id,
7017 "frontier": frontier.display().to_string(),
7018 "event": report.applied_event_id,
7019 "source_url": public_url,
7020 }))
7021 .expect("serialize clinical-trial-import")
7022 );
7023 } else {
7024 println!();
7025 println!(
7026 " {}",
7027 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
7028 .to_uppercase()
7029 .dimmed()
7030 );
7031 println!(" {}", style::tick_row(60));
7032 println!(" nct_id: {parsed_nct}");
7033 println!(" title: {}", truncate(title, 96));
7034 println!(" source: {public_url}");
7035 println!(
7036 " {} trial record imported into {}",
7037 style::ok("ok"),
7038 frontier.display()
7039 );
7040 }
7041}
7042
7043#[allow(clippy::too_many_arguments)]
7050fn cmd_replicate(
7051 frontier: &Path,
7052 target: &str,
7053 outcome: &str,
7054 attempted_by: &str,
7055 conditions_text: &str,
7056 source_title: &str,
7057 doi: Option<&str>,
7058 pmid: Option<&str>,
7059 sample_size: Option<&str>,
7060 note: &str,
7061 previous_attempt: Option<&str>,
7062 no_cascade: bool,
7063 json: bool,
7064) {
7065 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
7066 fail(&format!(
7067 "invalid outcome '{outcome}'; valid: {:?}",
7068 crate::bundle::VALID_REPLICATION_OUTCOMES
7069 ));
7070 }
7071 if !target.starts_with("vf_") {
7072 fail(&format!("target '{target}' is not a vf_ finding id"));
7073 }
7074
7075 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7076
7077 if !project.findings.iter().any(|f| f.id == target) {
7078 fail(&format!(
7079 "target finding '{target}' not present in frontier '{}'",
7080 frontier.display()
7081 ));
7082 }
7083
7084 let lower = conditions_text.to_lowercase();
7089 let conditions = crate::bundle::Conditions {
7090 text: conditions_text.to_string(),
7091 species_verified: Vec::new(),
7092 species_unverified: Vec::new(),
7093 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
7094 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
7095 human_data: lower.contains("human")
7096 || lower.contains("clinical")
7097 || lower.contains("patient"),
7098 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
7099 concentration_range: None,
7100 duration: None,
7101 age_group: None,
7102 cell_type: None,
7103 };
7104
7105 let evidence = crate::bundle::Evidence {
7106 evidence_type: "experimental".to_string(),
7107 model_system: String::new(),
7108 species: None,
7109 method: "replication_attempt".to_string(),
7110 sample_size: sample_size.map(|s| s.to_string()),
7111 effect_size: None,
7112 p_value: None,
7113 replicated: outcome == "replicated",
7114 replication_count: None,
7115 evidence_spans: Vec::new(),
7116 };
7117
7118 let provenance = crate::bundle::Provenance {
7119 source_type: "published_paper".to_string(),
7120 doi: doi.map(|s| s.to_string()),
7121 pmid: pmid.map(|s| s.to_string()),
7122 pmc: None,
7123 openalex_id: None,
7124 url: None,
7125 title: source_title.to_string(),
7126 authors: Vec::new(),
7127 year: None,
7128 journal: None,
7129 license: None,
7130 publisher: None,
7131 funders: Vec::new(),
7132 extraction: crate::bundle::Extraction {
7133 method: "manual_curation".to_string(),
7134 model: None,
7135 model_version: None,
7136 extracted_at: chrono::Utc::now().to_rfc3339(),
7137 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
7138 },
7139 review: None,
7140 citation_count: None,
7141 };
7142
7143 let mut rep = crate::bundle::Replication::new(
7144 target.to_string(),
7145 attempted_by.to_string(),
7146 outcome.to_string(),
7147 evidence,
7148 conditions,
7149 provenance,
7150 note.to_string(),
7151 );
7152 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
7153
7154 if project.replications.iter().any(|r| r.id == rep.id) {
7157 if json {
7158 println!(
7159 "{}",
7160 serde_json::to_string_pretty(&json!({
7161 "ok": false,
7162 "command": "replicate",
7163 "reason": "replication_already_exists",
7164 "id": rep.id,
7165 }))
7166 .expect("serialize")
7167 );
7168 } else {
7169 println!(
7170 "{} replication {} already exists in {}; skipping.",
7171 style::warn("replicate"),
7172 rep.id,
7173 frontier.display()
7174 );
7175 }
7176 return;
7177 }
7178
7179 let new_id = rep.id.clone();
7180 project.replications.push(rep);
7181
7182 let cascade_result = if no_cascade {
7189 None
7190 } else {
7191 let result = propagate::propagate_correction(
7192 &mut project,
7193 target,
7194 propagate::PropagationAction::ReplicationOutcome {
7195 outcome: outcome.to_string(),
7196 vrep_id: new_id.clone(),
7197 },
7198 );
7199 project.review_events.extend(result.events.clone());
7202 project::recompute_stats(&mut project);
7203 Some(result)
7204 };
7205
7206 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
7207
7208 if json {
7209 let cascade_json = cascade_result.as_ref().map(|r| {
7210 json!({
7211 "affected": r.affected,
7212 "events": r.events.len(),
7213 })
7214 });
7215 println!(
7216 "{}",
7217 serde_json::to_string_pretty(&json!({
7218 "ok": true,
7219 "command": "replicate",
7220 "id": new_id,
7221 "target": target,
7222 "outcome": outcome,
7223 "attempted_by": attempted_by,
7224 "cascade": cascade_json,
7225 "frontier": frontier.display().to_string(),
7226 }))
7227 .expect("failed to serialize replicate result")
7228 );
7229 } else {
7230 println!();
7231 println!(
7232 " {}",
7233 format!("VELA · REPLICATE · {}", new_id)
7234 .to_uppercase()
7235 .dimmed()
7236 );
7237 println!(" {}", style::tick_row(60));
7238 println!(" target: {target}");
7239 println!(" outcome: {outcome}");
7240 println!(" attempted by: {attempted_by}");
7241 println!(" conditions: {conditions_text}");
7242 println!(" source: {source_title}");
7243 if let Some(d) = doi {
7244 println!(" doi: {d}");
7245 }
7246 println!();
7247 println!(
7248 " {} replication recorded in {}",
7249 style::ok("ok"),
7250 frontier.display()
7251 );
7252 if let Some(result) = cascade_result {
7253 println!(
7254 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
7255 style::ok("ok"),
7256 result.affected,
7257 result.events.len()
7258 );
7259 } else {
7260 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
7261 }
7262 }
7263}
7264
7265fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
7267 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7268 let filtered: Vec<&crate::bundle::Replication> = project
7269 .replications
7270 .iter()
7271 .filter(|r| target.is_none_or(|t| r.target_finding == t))
7272 .collect();
7273
7274 if json {
7275 let payload = json!({
7276 "ok": true,
7277 "command": "replications",
7278 "frontier": frontier.display().to_string(),
7279 "filter_target": target,
7280 "count": filtered.len(),
7281 "replications": filtered,
7282 });
7283 println!(
7284 "{}",
7285 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
7286 );
7287 return;
7288 }
7289
7290 println!();
7291 let header = match target {
7292 Some(t) => format!("VELA · REPLICATIONS · {t}"),
7293 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
7294 };
7295 println!(" {}", header.to_uppercase().dimmed());
7296 println!(" {}", style::tick_row(60));
7297 if filtered.is_empty() {
7298 println!(" (no replications recorded)");
7299 return;
7300 }
7301 for rep in &filtered {
7302 let outcome_chip = match rep.outcome.as_str() {
7303 "replicated" => style::ok(&rep.outcome),
7304 "failed" => style::lost(&rep.outcome),
7305 "partial" => style::warn(&rep.outcome),
7306 _ => rep.outcome.clone().normal().to_string(),
7307 };
7308 println!(
7309 " · {} {} by {}",
7310 rep.id.dimmed(),
7311 outcome_chip,
7312 rep.attempted_by
7313 );
7314 println!(" target: {}", rep.target_finding);
7315 if !rep.conditions.text.is_empty() {
7316 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
7317 }
7318 if !rep.provenance.title.is_empty() {
7319 println!(" source: {}", truncate(&rep.provenance.title, 80));
7320 }
7321 }
7322}
7323
7324async fn cmd_ingest(
7337 path: &str,
7338 frontier: &Path,
7339 backend: Option<&str>,
7340 actor: Option<&str>,
7341 dry_run: bool,
7342 json: bool,
7343) {
7344 let lowered = path.trim().to_lowercase();
7346 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
7347 cmd_source_fetch(path.trim(), None, None, false, json).await;
7348 if !json {
7354 eprintln!();
7355 eprintln!(
7356 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7357 );
7358 eprintln!(
7359 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7360 frontier.display()
7361 );
7362 }
7363 return;
7364 }
7365
7366 let p = std::path::PathBuf::from(path);
7367 if !p.exists() {
7368 fail(&format!(
7369 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7370 ));
7371 }
7372
7373 let ext = p
7375 .extension()
7376 .and_then(|s| s.to_str())
7377 .map(|s| s.to_ascii_lowercase());
7378
7379 if p.is_file() {
7380 match ext.as_deref() {
7381 Some("pdf") => {
7382 cmd_scout(&p, frontier, backend, dry_run, json).await;
7386 }
7387 Some("md") | Some("markdown") => {
7388 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7391 }
7392 Some("csv") | Some("tsv") => {
7393 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7396 }
7397 Some("json") => {
7398 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7400 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
7401 }
7402 other => {
7403 fail(&format!(
7404 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
7405 other.unwrap_or("(none)")
7406 ));
7407 }
7408 }
7409 return;
7410 }
7411
7412 if p.is_dir() {
7413 let mut pdf_count = 0usize;
7420 let mut md_count = 0usize;
7421 let mut data_count = 0usize;
7422 let mut json_count = 0usize;
7423 let mut unhandled_exts: std::collections::BTreeSet<String> =
7424 std::collections::BTreeSet::new();
7425 if let Ok(entries) = std::fs::read_dir(&p) {
7426 for entry in entries.flatten() {
7427 let path = entry.path();
7428 if !path.is_file() {
7429 continue;
7430 }
7431 if let Some(name) = entry.file_name().to_str()
7432 && let Some(dot) = name.rfind('.')
7433 {
7434 let ext = name[dot + 1..].to_ascii_lowercase();
7435 match ext.as_str() {
7436 "pdf" => pdf_count += 1,
7437 "md" | "markdown" => md_count += 1,
7438 "csv" | "tsv" => data_count += 1,
7439 "json" => json_count += 1,
7440 other => {
7441 if !name.starts_with('.') {
7444 unhandled_exts.insert(other.to_string());
7445 }
7446 }
7447 }
7448 }
7449 }
7450 }
7451
7452 let dispatched_types = (pdf_count > 0) as usize
7453 + (md_count > 0) as usize
7454 + (data_count > 0) as usize
7455 + (json_count > 0) as usize;
7456
7457 if dispatched_types == 0 {
7458 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
7461 return;
7462 }
7463
7464 if dispatched_types > 1 {
7465 eprintln!(
7466 " vela ingest · folder has multiple handlable types; running each in sequence"
7467 );
7468 eprintln!(
7469 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
7470 );
7471 }
7472
7473 if pdf_count > 0 {
7480 cmd_scout(&p, frontier, backend, dry_run, json).await;
7481 }
7482 if md_count > 0 {
7483 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7484 }
7485 if data_count > 0 {
7486 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7487 }
7488 if json_count > 0 {
7489 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7492 if let Ok(entries) = std::fs::read_dir(&p) {
7493 for entry in entries.flatten() {
7494 let path = entry.path();
7495 if path.is_file()
7496 && path
7497 .extension()
7498 .and_then(|s| s.to_str())
7499 .map(|s| s.eq_ignore_ascii_case("json"))
7500 .unwrap_or(false)
7501 {
7502 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
7503 }
7504 }
7505 }
7506 }
7507
7508 if !unhandled_exts.is_empty() {
7509 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
7510 eprintln!(
7511 " vela ingest · skipped {} file extension(s) with no handler: {}",
7512 kinds.len(),
7513 kinds.join(", ")
7514 );
7515 }
7516 return;
7517 }
7518
7519 fail(&format!(
7520 "ingest: path '{path}' is neither a file nor a directory"
7521 ));
7522}
7523
7524#[allow(clippy::too_many_arguments)]
7525async fn cmd_compile_data(
7527 root: &Path,
7528 frontier: &Path,
7529 backend: Option<&str>,
7530 sample_rows: Option<usize>,
7531 dry_run: bool,
7532 json_out: bool,
7533) {
7534 match DATASETS_HANDLER.get() {
7535 Some(handler) => {
7536 handler(
7537 root.to_path_buf(),
7538 frontier.to_path_buf(),
7539 backend.map(String::from),
7540 sample_rows,
7541 dry_run,
7542 json_out,
7543 )
7544 .await;
7545 }
7546 None => {
7547 eprintln!(
7548 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
7549 style::err_prefix()
7550 );
7551 std::process::exit(1);
7552 }
7553 }
7554}
7555
7556async fn cmd_review_pending(
7559 frontier: &Path,
7560 backend: Option<&str>,
7561 max_proposals: Option<usize>,
7562 batch_size: usize,
7563 dry_run: bool,
7564 json_out: bool,
7565) {
7566 match REVIEWER_HANDLER.get() {
7567 Some(handler) => {
7568 handler(
7569 frontier.to_path_buf(),
7570 backend.map(String::from),
7571 max_proposals,
7572 batch_size,
7573 dry_run,
7574 json_out,
7575 )
7576 .await;
7577 }
7578 None => {
7579 eprintln!(
7580 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
7581 style::err_prefix()
7582 );
7583 std::process::exit(1);
7584 }
7585 }
7586}
7587
7588async fn cmd_find_tensions(
7591 frontier: &Path,
7592 backend: Option<&str>,
7593 max_findings: Option<usize>,
7594 dry_run: bool,
7595 json_out: bool,
7596) {
7597 match TENSIONS_HANDLER.get() {
7598 Some(handler) => {
7599 handler(
7600 frontier.to_path_buf(),
7601 backend.map(String::from),
7602 max_findings,
7603 dry_run,
7604 json_out,
7605 )
7606 .await;
7607 }
7608 None => {
7609 eprintln!(
7610 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
7611 style::err_prefix()
7612 );
7613 std::process::exit(1);
7614 }
7615 }
7616}
7617
7618async fn cmd_plan_experiments(
7621 frontier: &Path,
7622 backend: Option<&str>,
7623 max_findings: Option<usize>,
7624 dry_run: bool,
7625 json_out: bool,
7626) {
7627 match EXPERIMENTS_HANDLER.get() {
7628 Some(handler) => {
7629 handler(
7630 frontier.to_path_buf(),
7631 backend.map(String::from),
7632 max_findings,
7633 dry_run,
7634 json_out,
7635 )
7636 .await;
7637 }
7638 None => {
7639 eprintln!(
7640 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
7641 style::err_prefix()
7642 );
7643 std::process::exit(1);
7644 }
7645 }
7646}
7647
7648async fn cmd_compile_code(
7651 root: &Path,
7652 frontier: &Path,
7653 backend: Option<&str>,
7654 max_files: Option<usize>,
7655 dry_run: bool,
7656 json_out: bool,
7657) {
7658 match CODE_HANDLER.get() {
7659 Some(handler) => {
7660 handler(
7661 root.to_path_buf(),
7662 frontier.to_path_buf(),
7663 backend.map(String::from),
7664 max_files,
7665 dry_run,
7666 json_out,
7667 )
7668 .await;
7669 }
7670 None => {
7671 eprintln!(
7672 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
7673 style::err_prefix()
7674 );
7675 std::process::exit(1);
7676 }
7677 }
7678}
7679
7680async fn cmd_compile_notes(
7685 vault: &Path,
7686 frontier: &Path,
7687 backend: Option<&str>,
7688 max_files: Option<usize>,
7689 max_items_per_category: Option<usize>,
7690 dry_run: bool,
7691 json_out: bool,
7692) {
7693 match NOTES_HANDLER.get() {
7694 Some(handler) => {
7695 handler(
7696 vault.to_path_buf(),
7697 frontier.to_path_buf(),
7698 backend.map(String::from),
7699 max_files,
7700 max_items_per_category,
7701 dry_run,
7702 json_out,
7703 )
7704 .await;
7705 }
7706 None => {
7707 eprintln!(
7708 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
7709 style::err_prefix()
7710 );
7711 std::process::exit(1);
7712 }
7713 }
7714}
7715
7716async fn cmd_scout(
7723 folder: &Path,
7724 frontier: &Path,
7725 backend: Option<&str>,
7726 dry_run: bool,
7727 json_out: bool,
7728) {
7729 match SCOUT_HANDLER.get() {
7730 Some(handler) => {
7731 handler(
7732 folder.to_path_buf(),
7733 frontier.to_path_buf(),
7734 backend.map(String::from),
7735 dry_run,
7736 json_out,
7737 )
7738 .await;
7739 }
7740 None => {
7741 eprintln!(
7742 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
7743 style::err_prefix()
7744 );
7745 std::process::exit(1);
7746 }
7747 }
7748}
7749
7750#[allow(clippy::too_many_arguments)]
7751pub fn scan_for_sensitive_paths(root: &Path) -> Vec<PathBuf> {
7760 let mut hits: Vec<PathBuf> = Vec::new();
7761 let skip_dirs: &[&str] = &[".git", "target", "node_modules", "dist", "build"];
7762 let bad_exts: &[&str] = &["key", "pem", "p12", "pfx"];
7763 let bad_substrings: &[&str] = &["private", "secret", "credential"];
7764 let mut stack: Vec<PathBuf> = vec![root.to_path_buf()];
7765 while let Some(dir) = stack.pop() {
7766 let Ok(entries) = std::fs::read_dir(&dir) else {
7767 continue;
7768 };
7769 for entry in entries.flatten() {
7770 let path = entry.path();
7771 let name_os = path.file_name();
7772 let Some(name) = name_os.and_then(|n| n.to_str()) else {
7773 continue;
7774 };
7775 let lower = name.to_lowercase();
7776 if path.is_dir() {
7777 if skip_dirs.contains(&name) {
7778 continue;
7779 }
7780 stack.push(path);
7781 continue;
7782 }
7783 if lower.ends_with(".pub") || lower.ends_with(".pubkey") {
7785 continue;
7786 }
7787 if lower == "public.key" {
7789 continue;
7790 }
7791 let ext = path
7792 .extension()
7793 .and_then(|e| e.to_str())
7794 .map(str::to_lowercase)
7795 .unwrap_or_default();
7796 let mut hit = false;
7797 if bad_exts.iter().any(|x| ext == *x) {
7798 hit = true;
7799 }
7800 if bad_substrings.iter().any(|s| lower.contains(s)) {
7801 hit = true;
7802 }
7803 if hit {
7804 hits.push(path);
7805 }
7806 }
7807 }
7808 hits.sort();
7809 hits
7810}
7811
7812fn cmd_check(
7813 source: Option<&Path>,
7814 schema: bool,
7815 stats: bool,
7816 conformance_flag: bool,
7817 conformance_dir: &Path,
7818 all: bool,
7819 schema_only: bool,
7820 strict: bool,
7821 fix: bool,
7822 json_output: bool,
7823) {
7824 if json_output {
7825 let Some(src) = source else {
7826 fail("--json requires a frontier source");
7827 };
7828 let payload = check_json_payload(src, schema_only, strict);
7829 println!(
7830 "{}",
7831 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
7832 );
7833 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
7834 std::process::exit(1);
7835 }
7836 return;
7837 }
7838
7839 if strict && let Some(src) = source {
7850 let hits = scan_for_sensitive_paths(src);
7851 if !hits.is_empty() {
7852 eprintln!(
7853 "{} secret-audit: {} sensitive path(s) found under {}",
7854 style::err_prefix(),
7855 hits.len(),
7856 src.display()
7857 );
7858 for hit in &hits {
7859 eprintln!(" - {}", hit.display());
7860 }
7861 eprintln!(
7862 " hint: add `keys/` and `*.key` to .gitignore so these never reach a public repo (see THREAT_MODEL.md A17)"
7863 );
7864 std::process::exit(1);
7865 }
7866 }
7867
7868 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
7869 if run_all || schema || schema_only {
7870 let Some(src) = source else {
7871 fail("check requires a frontier source");
7872 };
7873 validate::run(src);
7874 }
7875 if !schema_only && (run_all || stats) {
7876 let Some(src) = source else {
7877 fail("--stats requires a frontier source");
7878 };
7879 let frontier = load_frontier_or_fail(src);
7880 let report = lint::lint(&frontier, None, None);
7881 lint::print_report(&report);
7882 let replay_report = events::replay_report(&frontier);
7883 println!("event replay: {}", replay_report.status);
7884 if !replay_report.conflicts.is_empty() {
7885 for conflict in &replay_report.conflicts {
7886 println!(" - {conflict}");
7887 }
7888 }
7889 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
7890 && signature_report.signed > 0
7891 {
7892 println!(
7893 "Signatures: {} valid / {} invalid / {} unsigned",
7894 signature_report.valid, signature_report.invalid, signature_report.unsigned
7895 );
7896 }
7897 let signal_report = signals::analyze(&frontier, &[]);
7898 print_signal_summary(&signal_report, strict);
7899 if !replay_report.ok
7900 || (strict
7901 && (!signal_report.review_queue.is_empty()
7902 || signal_report.proof_readiness.status != "ready"))
7903 {
7904 std::process::exit(1);
7905 }
7906 }
7907 if run_all || conformance_flag {
7908 if conformance_flag || conformance_dir.is_dir() {
7918 conformance::run(conformance_dir);
7919 } else {
7920 eprintln!(
7921 " conformance: skipped ({} not present; pass --conformance-dir <path> to point at the source repo's tests/conformance)",
7922 conformance_dir.display()
7923 );
7924 }
7925 }
7926 let _ = fix;
7927}
7928
7929fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
7930 let report = validate::validate(src);
7931 let loaded = repo::load_from_path(src).ok();
7932 let (method_report, graph_report) = if schema_only {
7933 (None, None)
7934 } else if let Some(frontier) = loaded.as_ref() {
7935 (
7936 Some(lint::lint(frontier, None, None)),
7937 Some(lint::lint_frontier(frontier)),
7938 )
7939 } else {
7940 (None, None)
7941 };
7942 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7943 let mut diagnostics = Vec::new();
7944 diagnostics.extend(report.errors.iter().map(|e| {
7945 json!({
7946 "severity": "error",
7947 "rule_id": "schema",
7948 "finding_id": null,
7949 "file": &e.file,
7950 "field_path": null,
7951 "message": &e.error,
7952 "suggestion": schema_error_suggestion(&e.error),
7953 "fixable": schema_error_fix(&e.error),
7954 "normalize_action": schema_error_action(&e.error),
7955 })
7956 }));
7957 for (check_id, lint_report) in [
7958 ("methodology", method_report.as_ref()),
7959 ("frontier_graph", graph_report.as_ref()),
7960 ] {
7961 if let Some(lint_report) = lint_report {
7962 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7963 json!({
7964 "severity": d.severity.to_string(),
7965 "rule_id": &d.rule_id,
7966 "check": check_id,
7967 "finding_id": &d.finding_id,
7968 "field_path": null,
7969 "message": &d.message,
7970 "suggestion": &d.suggestion,
7971 "fixable": false,
7972 "normalize_action": null,
7973 })
7974 }));
7975 }
7976 }
7977 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7978 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7979 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7980 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7981 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7982 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7983 let replay_report = loaded.as_ref().map(events::replay_report);
7984 let state_integrity_report = if schema_only {
7985 loaded.as_ref().map(state_integrity::analyze)
7986 } else {
7987 state_integrity::analyze_path(src).ok()
7988 };
7989 if let Some(replay) = replay_report.as_ref()
7990 && !replay.ok
7991 {
7992 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7993 json!({
7994 "severity": "error",
7995 "rule_id": "event_replay",
7996 "check": "events",
7997 "finding_id": null,
7998 "field_path": null,
7999 "message": conflict,
8000 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
8001 "fixable": false,
8002 "normalize_action": null,
8003 })
8004 }));
8005 }
8006 let event_errors = replay_report
8007 .as_ref()
8008 .map_or(0, |replay| usize::from(!replay.ok));
8009 let state_integrity_errors = state_integrity_report
8010 .as_ref()
8011 .map_or(0, |report| report.structural_errors.len());
8012 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
8013 .as_ref()
8014 .map(|frontier| {
8015 (
8016 sources::source_summary(frontier),
8017 sources::evidence_summary(frontier),
8018 sources::condition_summary(frontier),
8019 proposals::summary(frontier),
8020 proposals::proof_state_json(&frontier.proof_state),
8021 )
8022 })
8023 .unwrap_or_else(|| {
8024 (
8025 sources::SourceRegistrySummary::default(),
8026 sources::EvidenceAtomSummary::default(),
8027 sources::ConditionSummary::default(),
8028 proposals::ProposalSummary::default(),
8029 Value::Null,
8030 )
8031 });
8032 let signature_report = loaded
8033 .as_ref()
8034 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
8035 if let Some(frontier) = loaded.as_ref()
8036 && !schema_only
8037 {
8038 let projection = sources::derive_projection(frontier);
8039 let existing_sources = frontier
8040 .sources
8041 .iter()
8042 .map(|source| source.id.as_str())
8043 .collect::<std::collections::BTreeSet<_>>();
8044 let existing_atoms = frontier
8045 .evidence_atoms
8046 .iter()
8047 .map(|atom| atom.id.as_str())
8048 .collect::<std::collections::BTreeSet<_>>();
8049 let existing_conditions = frontier
8050 .condition_records
8051 .iter()
8052 .map(|record| record.id.as_str())
8053 .collect::<std::collections::BTreeSet<_>>();
8054 for source in projection
8055 .sources
8056 .iter()
8057 .filter(|source| !existing_sources.contains(source.id.as_str()))
8058 {
8059 diagnostics.push(json!({
8060 "severity": "warning",
8061 "rule_id": "missing_source_record",
8062 "check": "source_registry",
8063 "finding_id": source.finding_ids.first(),
8064 "field_path": "sources",
8065 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
8066 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
8067 "fixable": true,
8068 "normalize_action": "materialize_source_record",
8069 }));
8070 }
8071 for atom in projection
8072 .evidence_atoms
8073 .iter()
8074 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
8075 {
8076 diagnostics.push(json!({
8077 "severity": "warning",
8078 "rule_id": "missing_evidence_atom",
8079 "check": "evidence_atoms",
8080 "finding_id": atom.finding_id,
8081 "field_path": "evidence_atoms",
8082 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
8083 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
8084 "fixable": true,
8085 "normalize_action": "materialize_evidence_atom",
8086 }));
8087 }
8088 for atom in projection
8089 .evidence_atoms
8090 .iter()
8091 .filter(|atom| atom.locator.is_none())
8092 {
8093 diagnostics.push(json!({
8094 "severity": "warning",
8095 "rule_id": "missing_evidence_locator",
8096 "check": "evidence_atoms",
8097 "finding_id": atom.finding_id,
8098 "field_path": "evidence_atoms[].locator",
8099 "message": format!("Evidence atom {} has no source locator.", atom.id),
8100 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
8101 "fixable": false,
8102 "normalize_action": null,
8103 }));
8104 }
8105 for condition in projection
8106 .condition_records
8107 .iter()
8108 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
8109 {
8110 diagnostics.push(json!({
8111 "severity": "warning",
8112 "rule_id": "condition_record_missing",
8113 "check": "conditions",
8114 "finding_id": condition.finding_id,
8115 "field_path": "condition_records",
8116 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
8117 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
8118 "fixable": true,
8119 "normalize_action": "materialize_condition_record",
8120 }));
8121 }
8122 for proposal in frontier.proposals.iter().filter(|proposal| {
8123 matches!(proposal.status.as_str(), "accepted" | "applied")
8124 && proposal
8125 .reviewed_by
8126 .as_deref()
8127 .is_none_or(proposals::is_placeholder_reviewer)
8128 }) {
8129 diagnostics.push(json!({
8130 "severity": "error",
8131 "rule_id": "reviewer_identity_missing",
8132 "check": "proposals",
8133 "finding_id": proposal.target.id,
8134 "field_path": "proposals[].reviewed_by",
8135 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
8136 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
8137 "fixable": false,
8138 "normalize_action": null,
8139 }));
8140 }
8141 }
8142 let signal_report = loaded
8143 .as_ref()
8144 .map(|frontier| signals::analyze(frontier, &diagnostics))
8145 .unwrap_or_else(empty_signal_report);
8146 let errors =
8147 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
8148 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
8149 let infos = method_infos + graph_infos;
8150 let strict_blockers = signal_report
8151 .signals
8152 .iter()
8153 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
8154 .count();
8155 let fixable = diagnostics
8156 .iter()
8157 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
8158 .count();
8159 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
8160
8161 json!({
8162 "ok": ok,
8163 "command": "check",
8164 "schema_version": project::VELA_SCHEMA_VERSION,
8165 "source": {
8166 "path": src.display().to_string(),
8167 "hash": format!("sha256:{source_hash}"),
8168 },
8169 "summary": {
8170 "status": if ok { "pass" } else { "fail" },
8171 "checked_findings": report.total_files,
8172 "valid_findings": report.valid,
8173 "invalid_findings": report.invalid,
8174 "errors": errors,
8175 "warnings": warnings,
8176 "info": infos,
8177 "fixable": fixable,
8178 "strict": strict,
8179 "schema_only": schema_only,
8180 },
8181 "checks": [
8182 {
8183 "id": "schema",
8184 "status": if report.invalid == 0 { "pass" } else { "fail" },
8185 "checked": report.total_files,
8186 "failed": report.invalid,
8187 "errors": report.errors.iter().map(|e| json!({
8188 "file": e.file,
8189 "message": e.error,
8190 })).collect::<Vec<_>>(),
8191 },
8192 {
8193 "id": "methodology",
8194 "status": if method_errors == 0 { "pass" } else { "fail" },
8195 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
8196 "failed": method_errors,
8197 "warnings": method_warnings,
8198 "info": method_infos,
8199 "skipped": schema_only,
8200 },
8201 {
8202 "id": "frontier_graph",
8203 "status": if graph_errors == 0 { "pass" } else { "fail" },
8204 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
8205 "failed": graph_errors,
8206 "warnings": graph_warnings,
8207 "info": graph_infos,
8208 "skipped": schema_only,
8209 },
8210 {
8211 "id": "signals",
8212 "status": if strict_blockers == 0 { "pass" } else { "fail" },
8213 "checked": signal_report.signals.len(),
8214 "failed": strict_blockers,
8215 "warnings": signal_report.proof_readiness.warnings,
8216 "skipped": loaded.is_none(),
8217 "blockers": signal_report.signals.iter()
8218 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
8219 .map(|s| json!({
8220 "id": s.id,
8221 "kind": s.kind,
8222 "severity": s.severity,
8223 "reason": s.reason,
8224 }))
8225 .collect::<Vec<_>>(),
8226 },
8227 {
8228 "id": "events",
8229 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
8230 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
8231 "failed": event_errors,
8232 "skipped": schema_only || loaded.is_none(),
8233 },
8234 {
8235 "id": "state_integrity",
8236 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
8237 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
8238 "failed": state_integrity_errors,
8239 "skipped": schema_only || loaded.is_none(),
8240 }
8241 ],
8242 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
8243 "replay": replay_report,
8244 "state_integrity": state_integrity_report,
8245 "source_registry": source_registry,
8246 "evidence_atoms": evidence_atoms,
8247 "conditions": conditions,
8248 "proposals": proposal_summary,
8249 "proof_state": proof_state,
8250 "signatures": signature_report,
8251 "diagnostics": diagnostics,
8252 "signals": signal_report.signals,
8253 "review_queue": signal_report.review_queue,
8254 "proof_readiness": signal_report.proof_readiness,
8255 "repair_plan": build_repair_plan(&diagnostics),
8256 })
8257}
8258
8259#[allow(clippy::too_many_arguments)]
8260fn cmd_normalize(
8261 source: &Path,
8262 out: Option<&Path>,
8263 write: bool,
8264 dry_run: bool,
8265 rewrite_ids: bool,
8266 id_map: Option<&Path>,
8267 resync_provenance: bool,
8268 json_output: bool,
8269) {
8270 if write && out.is_some() {
8271 fail("Use either --write or --out, not both.");
8272 }
8273 if dry_run && (write || out.is_some()) {
8274 fail("--dry-run cannot be combined with --write or --out.");
8275 }
8276 if id_map.is_some() && !rewrite_ids {
8277 fail("--id-map requires --rewrite-ids.");
8278 }
8279
8280 let detected = repo::detect(source).unwrap_or_else(|e| {
8281 eprintln!("{e}");
8282 std::process::exit(1);
8283 });
8284 if matches!(detected, repo::VelaSource::PacketDir(_)) {
8285 fail(
8286 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
8287 );
8288 }
8289 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
8290 let has_substantive_events = frontier
8295 .events
8296 .iter()
8297 .any(|event| event.kind != "frontier.created");
8298 if has_substantive_events && (write || out.is_some()) {
8299 fail(
8300 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
8301 );
8302 }
8303 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
8304 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8305 let (entity_type_fixes, entity_name_fixes) =
8306 normalize::normalize_findings(&mut frontier.findings);
8307 let confidence_updates =
8308 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
8309 let provenance_resync_count = if resync_provenance {
8313 sources::resync_provenance_from_sources(&mut frontier)
8314 } else {
8315 0
8316 };
8317 let before_source_count = frontier.sources.len();
8318 let before_evidence_atom_count = frontier.evidence_atoms.len();
8319 let before_condition_record_count = frontier.condition_records.len();
8320
8321 let mut id_rewrites = Vec::new();
8322 if rewrite_ids {
8323 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
8324 for finding in &frontier.findings {
8325 let expected =
8326 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
8327 if expected != finding.id {
8328 id_map_values.insert(finding.id.clone(), expected);
8329 }
8330 }
8331 let new_ids = id_map_values
8332 .values()
8333 .map(String::as_str)
8334 .collect::<std::collections::HashSet<_>>();
8335 if new_ids.len() != id_map_values.len() {
8336 fail("Refusing to rewrite IDs because two findings map to the same content address.");
8337 }
8338 for finding in &mut frontier.findings {
8339 if let Some(new_id) = id_map_values.get(&finding.id) {
8340 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
8341 finding.previous_version = Some(finding.id.clone());
8342 finding.id = new_id.clone();
8343 }
8344 }
8345 for finding in &mut frontier.findings {
8346 for link in &mut finding.links {
8347 if let Some(new_target) = id_map_values.get(&link.target) {
8348 link.target = new_target.clone();
8349 }
8350 }
8351 }
8352 if let Some(path) = id_map {
8353 std::fs::write(
8354 path,
8355 serde_json::to_string_pretty(&id_map_values)
8356 .expect("failed to serialize normalize id map"),
8357 )
8358 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
8359 }
8360 }
8361
8362 sources::materialize_project(&mut frontier);
8363 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
8364 let evidence_atoms_materialized = frontier
8365 .evidence_atoms
8366 .len()
8367 .saturating_sub(before_evidence_atom_count);
8368 let condition_records_materialized = frontier
8369 .condition_records
8370 .len()
8371 .saturating_sub(before_condition_record_count);
8372 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8373 let id_rewrite_count = id_rewrites.len();
8374 let wrote_to = if write {
8375 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
8376 Some(source.display().to_string())
8377 } else if let Some(out_path) = out {
8378 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
8379 Some(out_path.display().to_string())
8380 } else {
8381 None
8382 };
8383 let wrote = wrote_to.is_some();
8384 let planned_changes = entity_type_fixes
8385 + entity_name_fixes
8386 + confidence_updates
8387 + id_rewrite_count
8388 + source_records_materialized
8389 + evidence_atoms_materialized
8390 + condition_records_materialized
8391 + provenance_resync_count;
8392 let payload = json!({
8393 "ok": true,
8394 "command": "normalize",
8395 "schema_version": project::VELA_SCHEMA_VERSION,
8396 "source": {
8397 "path": source.display().to_string(),
8398 "hash": format!("sha256:{source_hash}"),
8399 },
8400 "dry_run": wrote_to.is_none(),
8401 "wrote_to": wrote_to,
8402 "summary": {
8403 "planned": planned_changes,
8404 "safe": planned_changes,
8405 "unsafe": 0,
8406 "applied": if wrote { planned_changes } else { 0 },
8407 },
8408 "changes": {
8409 "entity_type_fixes": entity_type_fixes,
8410 "entity_name_fixes": entity_name_fixes,
8411 "confidence_updates": confidence_updates,
8412 "id_rewrites": id_rewrite_count,
8413 "source_records_materialized": source_records_materialized,
8414 "evidence_atoms_materialized": evidence_atoms_materialized,
8415 "condition_records_materialized": condition_records_materialized,
8416 "provenance_resyncs": provenance_resync_count,
8417 "stats_changed": before_stats != after_stats,
8418 },
8419 "id_rewrites": id_rewrites,
8420 "repair_plan": if wrote { Vec::<Value>::new() } else {
8421 vec![json!({
8422 "action": "apply_normalization",
8423 "command": "vela normalize <frontier> --out frontier.normalized.json"
8424 })]
8425 },
8426 });
8427 if json_output {
8428 println!(
8429 "{}",
8430 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
8431 );
8432 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
8433 println!("{} normalized frontier written to {path}", style::ok("ok"));
8434 println!(
8435 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8436 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8437 );
8438 } else {
8439 println!("normalize dry run for {}", source.display());
8440 println!(
8441 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8442 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8443 );
8444 }
8445}
8446
8447fn cmd_proof(
8448 frontier: &Path,
8449 out: &Path,
8450 template: &str,
8451 gold: Option<&Path>,
8452 record_proof_state: bool,
8453 json_output: bool,
8454) {
8455 if template != "bbb-alzheimer" {
8456 fail(&format!(
8457 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
8458 ));
8459 }
8460 let mut loaded = load_frontier_or_fail(frontier);
8461 let source_hash = hash_path_or_fail(frontier);
8462 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
8463 .unwrap_or_else(|e| fail(&e));
8464 let benchmark_summary = gold.map(|gold_path| {
8465 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
8466 fail(&format!(
8467 "Failed to run proof benchmark '{}': {e}",
8468 gold_path.display()
8469 ))
8470 });
8471 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
8472 fail(&format!("Failed to write benchmark summary: {e}"));
8473 });
8474 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
8475 fail(&format!(
8476 "Proof benchmark failed for {}",
8477 gold_path.display()
8478 ));
8479 }
8480 summary
8481 });
8482 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
8483 fail(&format!("Proof packet validation failed: {e}"));
8484 });
8485 proposals::record_proof_export(
8486 &mut loaded,
8487 proposals::ProofPacketRecord {
8488 generated_at: export_record.generated_at.clone(),
8489 snapshot_hash: export_record.snapshot_hash.clone(),
8490 event_log_hash: export_record.event_log_hash.clone(),
8491 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
8492 },
8493 );
8494 project::recompute_stats(&mut loaded);
8495 if record_proof_state {
8496 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
8497 }
8498 let signal_report = signals::analyze(&loaded, &[]);
8499 if json_output {
8500 let payload = json!({
8501 "ok": true,
8502 "command": "proof",
8503 "schema_version": project::VELA_SCHEMA_VERSION,
8504 "recorded_proof_state": record_proof_state,
8505 "frontier": {
8506 "name": &loaded.project.name,
8507 "source": frontier.display().to_string(),
8508 "hash": format!("sha256:{source_hash}"),
8509 },
8510 "template": template,
8511 "gold": gold.map(|p| p.display().to_string()),
8512 "benchmark": benchmark_summary,
8513 "output": out.display().to_string(),
8514 "packet": {
8515 "manifest_path": out.join("manifest.json").display().to_string(),
8516 },
8517 "validation": {
8518 "status": "ok",
8519 "summary": validation_summary,
8520 },
8521 "proposals": proposals::summary(&loaded),
8522 "proof_state": loaded.proof_state,
8523 "signals": signal_report.signals,
8524 "review_queue": signal_report.review_queue,
8525 "proof_readiness": signal_report.proof_readiness,
8526 "trace_path": out.join("proof-trace.json").display().to_string(),
8527 });
8528 println!(
8529 "{}",
8530 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
8531 );
8532 } else {
8533 println!("vela proof");
8534 println!(" source: {}", frontier.display());
8535 println!(" template: {template}");
8536 println!(" output: {}", out.display());
8537 println!(" trace: {}", out.join("proof-trace.json").display());
8538 println!(
8539 " proof state: {}",
8540 if record_proof_state {
8541 "recorded"
8542 } else {
8543 "not recorded"
8544 }
8545 );
8546 println!();
8547 println!("{validation_summary}");
8548 }
8549}
8550
8551fn cmd_status(path: &Path, json: bool) {
8555 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8556
8557 let mut pending_total = 0usize;
8559 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
8560 std::collections::BTreeMap::new();
8561 for p in &project.proposals {
8562 if p.status == "pending_review" {
8563 pending_total += 1;
8564 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8565 }
8566 }
8567
8568 let audit = crate::causal_reasoning::audit_frontier(&project);
8570 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
8571
8572 let mut last_sync: Option<&crate::events::StateEvent> = None;
8574 let mut last_conflict: Option<&crate::events::StateEvent> = None;
8575 let mut total_conflicts = 0usize;
8576 for e in &project.events {
8577 match e.kind.as_str() {
8578 "frontier.synced_with_peer" => {
8579 if last_sync
8580 .map(|prev| e.timestamp > prev.timestamp)
8581 .unwrap_or(true)
8582 {
8583 last_sync = Some(e);
8584 }
8585 }
8586 "frontier.conflict_detected" => {
8587 total_conflicts += 1;
8588 if last_conflict
8589 .map(|prev| e.timestamp > prev.timestamp)
8590 .unwrap_or(true)
8591 {
8592 last_conflict = Some(e);
8593 }
8594 }
8595 _ => {}
8596 }
8597 }
8598
8599 let mut targets_with_success = std::collections::HashSet::new();
8601 let mut failed_replications = 0usize;
8602 for r in &project.replications {
8603 if r.outcome == "replicated" {
8604 targets_with_success.insert(r.target_finding.clone());
8605 } else if r.outcome == "failed" {
8606 failed_replications += 1;
8607 }
8608 }
8609
8610 if json {
8611 println!(
8612 "{}",
8613 serde_json::to_string_pretty(&json!({
8614 "ok": true,
8615 "command": "status",
8616 "frontier": frontier_label(&project),
8617 "vfr_id": project.frontier_id(),
8618 "findings": project.findings.len(),
8619 "events": project.events.len(),
8620 "actors": project.actors.len(),
8621 "peers": project.peers.len(),
8622 "inbox": {
8623 "pending_total": pending_total,
8624 "pending_by_kind": pending_by_kind,
8625 },
8626 "causal_audit": {
8627 "identified": audit_summary.identified,
8628 "conditional": audit_summary.conditional,
8629 "underidentified": audit_summary.underidentified,
8630 "underdetermined": audit_summary.underdetermined,
8631 },
8632 "replications": {
8633 "total": project.replications.len(),
8634 "findings_with_success": targets_with_success.len(),
8635 "failed": failed_replications,
8636 },
8637 "federation": {
8638 "peers": project.peers.len(),
8639 "last_sync": last_sync.map(|e| e.timestamp.clone()),
8640 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
8641 "total_conflicts": total_conflicts,
8642 },
8643 }))
8644 .expect("serialize status")
8645 );
8646 return;
8647 }
8648
8649 println!();
8650 println!(
8651 " {}",
8652 format!("VELA · STATUS · {}", path.display())
8653 .to_uppercase()
8654 .dimmed()
8655 );
8656 println!(" {}", style::tick_row(60));
8657 println!();
8658 println!(" frontier: {}", frontier_label(&project));
8659 println!(" vfr_id: {}", project.frontier_id());
8660 println!(
8661 " findings: {} events: {} peers: {} actors: {}",
8662 project.findings.len(),
8663 project.events.len(),
8664 project.peers.len(),
8665 project.actors.len(),
8666 );
8667 println!();
8668 if pending_total > 0 {
8669 println!(
8670 " {} {pending_total} pending proposals",
8671 style::warn("inbox")
8672 );
8673 for (k, n) in &pending_by_kind {
8674 println!(" · {n:>3} {k}");
8675 }
8676 } else {
8677 println!(" {} inbox clean", style::ok("ok"));
8678 }
8679 println!();
8680 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
8681 let chip = if audit_summary.underidentified > 0 {
8682 style::lost("audit")
8683 } else {
8684 style::warn("audit")
8685 };
8686 println!(
8687 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
8688 chip,
8689 audit_summary.identified,
8690 audit_summary.conditional,
8691 audit_summary.underidentified,
8692 audit_summary.underdetermined,
8693 );
8694 if audit_summary.underidentified > 0 {
8695 println!(
8696 " next: vela causal audit {} --problems-only",
8697 path.display()
8698 );
8699 }
8700 } else if audit_summary.underdetermined == 0 {
8701 println!(
8702 " {} causal audit: all {} identified",
8703 style::ok("ok"),
8704 audit_summary.identified
8705 );
8706 } else {
8707 println!(
8708 " {} causal audit: {} identified, {} ungraded",
8709 style::warn("audit"),
8710 audit_summary.identified,
8711 audit_summary.underdetermined,
8712 );
8713 }
8714 println!();
8715 if !project.replications.is_empty() {
8716 println!(
8717 " {} {} records · {} findings replicated · {} failed",
8718 style::ok("replications"),
8719 project.replications.len(),
8720 targets_with_success.len(),
8721 failed_replications,
8722 );
8723 }
8724 if project.peers.is_empty() {
8725 println!(
8726 " {} no federation peers registered",
8727 style::warn("federation")
8728 );
8729 } else {
8730 let last = last_sync
8731 .map(|e| fmt_timestamp(&e.timestamp))
8732 .unwrap_or_else(|| "never".to_string());
8733 let chip = if total_conflicts > 0 {
8734 style::warn("federation")
8735 } else {
8736 style::ok("federation")
8737 };
8738 println!(
8739 " {} {} peer(s) · last sync {} · {} conflict events",
8740 chip,
8741 project.peers.len(),
8742 last,
8743 total_conflicts,
8744 );
8745 }
8746 println!();
8747}
8748
8749fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
8751 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8752 let mut events: Vec<&crate::events::StateEvent> = project
8753 .events
8754 .iter()
8755 .filter(|e| match kind_filter {
8756 Some(k) => e.kind.contains(k),
8757 None => true,
8758 })
8759 .collect();
8760 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8761 events.truncate(limit);
8762
8763 if json {
8764 let payload: Vec<_> = events
8765 .iter()
8766 .map(|e| {
8767 json!({
8768 "id": e.id,
8769 "kind": e.kind,
8770 "actor": e.actor.id,
8771 "target": &e.target.id,
8772 "target_type": &e.target.r#type,
8773 "timestamp": e.timestamp,
8774 "reason": e.reason,
8775 })
8776 })
8777 .collect();
8778 println!(
8779 "{}",
8780 serde_json::to_string_pretty(&json!({
8781 "ok": true,
8782 "command": "log",
8783 "events": payload,
8784 }))
8785 .expect("serialize log")
8786 );
8787 return;
8788 }
8789
8790 println!();
8791 println!(
8792 " {}",
8793 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
8794 .to_uppercase()
8795 .dimmed()
8796 );
8797 println!(" {}", style::tick_row(60));
8798 if events.is_empty() {
8799 println!(" (no events)");
8800 return;
8801 }
8802 for e in &events {
8803 let when = fmt_timestamp(&e.timestamp);
8804 let target_short = if e.target.id.len() > 22 {
8805 format!("{}…", &e.target.id[..21])
8806 } else {
8807 e.target.id.clone()
8808 };
8809 let reason: String = e.reason.chars().take(70).collect();
8810 println!(
8811 " {:<19} {:<32} {:<24} {}",
8812 when, e.kind, target_short, reason
8813 );
8814 }
8815 println!();
8816}
8817
8818fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
8820 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8821
8822 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
8825 std::collections::HashMap::new();
8826 for p in &project.proposals {
8827 if p.kind != "finding.note" {
8828 continue;
8829 }
8830 if p.actor.id != "agent:reviewer-agent" {
8831 continue;
8832 }
8833 let reason = &p.reason;
8834 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
8835 continue;
8836 };
8837 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
8838 let extract = |k: &str| -> f64 {
8839 let pat = format!("{k} ");
8840 text.find(&pat)
8841 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
8842 .and_then(|t| t.parse::<f64>().ok())
8843 .unwrap_or(0.0)
8844 };
8845 score_map.insert(
8846 target.to_string(),
8847 (
8848 extract("plausibility"),
8849 extract("evidence"),
8850 extract("scope"),
8851 extract("duplicate-risk"),
8852 ),
8853 );
8854 }
8855
8856 let mut pending: Vec<&crate::proposals::StateProposal> = project
8857 .proposals
8858 .iter()
8859 .filter(|p| {
8860 p.status == "pending_review"
8861 && match kind_filter {
8862 Some(k) => p.kind.contains(k),
8863 None => true,
8864 }
8865 })
8866 .collect();
8867 pending.sort_by(|a, b| {
8869 let sa = score_map
8870 .get(&a.id)
8871 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8872 let sb = score_map
8873 .get(&b.id)
8874 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8875 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
8876 });
8877 pending.truncate(limit);
8878
8879 if json {
8880 let payload: Vec<_> = pending
8881 .iter()
8882 .map(|p| {
8883 let assertion_text = p
8884 .payload
8885 .get("finding")
8886 .and_then(|f| f.get("assertion"))
8887 .and_then(|a| a.get("text"))
8888 .and_then(|t| t.as_str());
8889 let assertion_type = p
8890 .payload
8891 .get("finding")
8892 .and_then(|f| f.get("assertion"))
8893 .and_then(|a| a.get("type"))
8894 .and_then(|t| t.as_str());
8895 let composite = score_map
8896 .get(&p.id)
8897 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8898 json!({
8899 "proposal_id": p.id,
8900 "kind": p.kind,
8901 "actor": p.actor,
8902 "reason": p.reason,
8903 "assertion_text": assertion_text,
8904 "assertion_type": assertion_type,
8905 "reviewer_composite": composite,
8906 })
8907 })
8908 .collect();
8909 println!(
8910 "{}",
8911 serde_json::to_string_pretty(&json!({
8912 "ok": true,
8913 "command": "inbox",
8914 "shown": pending.len(),
8915 "proposals": payload,
8916 }))
8917 .expect("serialize inbox")
8918 );
8919 return;
8920 }
8921
8922 println!();
8923 println!(
8924 " {}",
8925 format!(
8926 "VELA · INBOX · {} ({} pending shown)",
8927 path.display(),
8928 pending.len()
8929 )
8930 .to_uppercase()
8931 .dimmed()
8932 );
8933 println!(" {}", style::tick_row(60));
8934 if pending.is_empty() {
8935 println!(" (inbox clean)");
8936 return;
8937 }
8938 for p in &pending {
8939 let assertion_text = p
8940 .payload
8941 .get("finding")
8942 .and_then(|f| f.get("assertion"))
8943 .and_then(|a| a.get("text"))
8944 .and_then(|t| t.as_str())
8945 .unwrap_or("");
8946 let assertion_type = p
8947 .payload
8948 .get("finding")
8949 .and_then(|f| f.get("assertion"))
8950 .and_then(|a| a.get("type"))
8951 .and_then(|t| t.as_str())
8952 .unwrap_or("");
8953 let composite = score_map
8954 .get(&p.id)
8955 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8956 let score_str = composite
8957 .map(|c| format!("[{:.2}]", c))
8958 .unwrap_or_else(|| "[—] ".to_string());
8959 let kind_short = if p.kind.len() > 12 {
8960 format!("{}…", &p.kind[..11])
8961 } else {
8962 p.kind.clone()
8963 };
8964 let summary: String = if !assertion_text.is_empty() {
8965 assertion_text.chars().take(80).collect()
8966 } else {
8967 p.reason.chars().take(80).collect()
8968 };
8969 println!(
8970 " {} {} {:<13} {:<18} {}",
8971 score_str, p.id, kind_short, assertion_type, summary
8972 );
8973 }
8974 println!();
8975}
8976
8977fn cmd_ask(path: &Path, question: &str, json: bool) {
8982 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8983
8984 if question.trim().is_empty() {
8985 use std::io::{BufRead, Write};
8987 println!();
8988 println!(
8989 " {}",
8990 format!("VELA · ASK · {}", path.display())
8991 .to_uppercase()
8992 .dimmed()
8993 );
8994 println!(" {}", style::tick_row(60));
8995 println!(" Ask a question. Type `exit` to quit.");
8996 println!(" Examples:");
8997 println!(" · what's pending?");
8998 println!(" · what's underidentified?");
8999 println!(" · how many findings?");
9000 println!(" · what changed recently?");
9001 println!(" · who has what calibration?");
9002 println!();
9003 let stdin = std::io::stdin();
9004 let mut stdout = std::io::stdout();
9005 loop {
9006 print!(" ask> ");
9007 stdout.flush().ok();
9008 let mut line = String::new();
9009 if stdin.lock().read_line(&mut line).is_err() {
9010 break;
9011 }
9012 let q = line.trim();
9013 if q.is_empty() {
9014 continue;
9015 }
9016 if matches!(q, "exit" | "quit" | "q") {
9017 break;
9018 }
9019 answer(&project, q, false);
9020 }
9021 return;
9022 }
9023
9024 answer(&project, question, json);
9025}
9026
9027fn answer(project: &crate::project::Project, q: &str, json: bool) {
9028 let lower = q.to_lowercase();
9029
9030 if lower.contains("pending")
9032 || lower.contains("inbox")
9033 || lower.contains("queue")
9034 || lower.contains("to review")
9035 {
9036 let pending: Vec<&crate::proposals::StateProposal> = project
9037 .proposals
9038 .iter()
9039 .filter(|p| p.status == "pending_review")
9040 .collect();
9041 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
9042 for p in &pending {
9043 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
9044 }
9045 if json {
9046 println!(
9047 "{}",
9048 serde_json::to_string_pretty(&json!({
9049 "answer": "pending",
9050 "total": pending.len(),
9051 "by_kind": by_kind,
9052 }))
9053 .unwrap()
9054 );
9055 } else {
9056 println!(" {} pending proposals.", pending.len());
9057 for (k, n) in &by_kind {
9058 println!(" · {n:>3} {k}");
9059 }
9060 if pending.is_empty() {
9061 println!(" Inbox is clean.");
9062 } else {
9063 println!(" Run `vela inbox <frontier>` to triage.");
9064 }
9065 }
9066 return;
9067 }
9068
9069 if lower.contains("underident")
9071 || lower.contains("audit")
9072 || lower.contains("identif")
9073 || lower.contains("causal")
9074 {
9075 let entries = crate::causal_reasoning::audit_frontier(project);
9076 let summary = crate::causal_reasoning::summarize_audit(&entries);
9077 if json {
9078 println!(
9079 "{}",
9080 serde_json::to_string_pretty(&json!({
9081 "answer": "audit",
9082 "summary": {
9083 "identified": summary.identified,
9084 "conditional": summary.conditional,
9085 "underidentified": summary.underidentified,
9086 "underdetermined": summary.underdetermined,
9087 },
9088 }))
9089 .unwrap()
9090 );
9091 } else {
9092 println!(
9093 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
9094 summary.identified,
9095 summary.conditional,
9096 summary.underidentified,
9097 summary.underdetermined,
9098 );
9099 if summary.underidentified > 0 {
9100 println!(
9101 " The {} underidentified findings are concrete review items:",
9102 summary.underidentified
9103 );
9104 for e in entries
9105 .iter()
9106 .filter(|e| {
9107 matches!(
9108 e.verdict,
9109 crate::causal_reasoning::Identifiability::Underidentified
9110 )
9111 })
9112 .take(8)
9113 {
9114 let txt: String = e.assertion_text.chars().take(70).collect();
9115 println!(" · {} {}", e.finding_id, txt);
9116 }
9117 }
9118 }
9119 return;
9120 }
9121
9122 if lower.contains("recent")
9124 || lower.contains("changed")
9125 || lower.contains("latest")
9126 || lower.contains("happen")
9127 {
9128 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
9129 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
9130 events.truncate(8);
9131 if json {
9132 println!(
9133 "{}",
9134 serde_json::to_string_pretty(&json!({
9135 "answer": "recent_events",
9136 "events": events.iter().map(|e| json!({
9137 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
9138 "actor": e.actor.id, "target": e.target.id,
9139 })).collect::<Vec<_>>(),
9140 }))
9141 .unwrap()
9142 );
9143 } else {
9144 println!(" Most recent {} events:", events.len());
9145 for e in &events {
9146 let when = fmt_timestamp(&e.timestamp);
9147 println!(" · {when} {:<28} {}", e.kind, e.target.id);
9148 }
9149 }
9150 return;
9151 }
9152
9153 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
9155 let n = project.findings.len();
9156 let evs = project.events.len();
9157 let peers = project.peers.len();
9158 let actors = project.actors.len();
9159 if json {
9160 println!(
9161 "{}",
9162 serde_json::to_string_pretty(&json!({
9163 "answer": "counts",
9164 "findings": n,
9165 "events": evs,
9166 "peers": peers,
9167 "actors": actors,
9168 "replications": project.replications.len(),
9169 "predictions": project.predictions.len(),
9170 }))
9171 .unwrap()
9172 );
9173 } else {
9174 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
9175 println!(
9176 " {} replications · {} predictions · {} datasets · {} code artifacts.",
9177 project.replications.len(),
9178 project.predictions.len(),
9179 project.datasets.len(),
9180 project.code_artifacts.len(),
9181 );
9182 }
9183 return;
9184 }
9185
9186 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
9188 let records =
9189 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
9190 if json {
9191 println!("{}", serde_json::to_string_pretty(&records).unwrap());
9192 } else if records.is_empty() {
9193 println!(" No predictions yet. The calibration ledger is empty.");
9194 } else {
9195 println!(" Calibration over {} actor(s):", records.len());
9196 for r in &records {
9197 let brier = r
9198 .brier_score
9199 .map(|b| format!("{:.3}", b))
9200 .unwrap_or_else(|| "—".into());
9201 println!(
9202 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
9203 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
9204 );
9205 }
9206 }
9207 return;
9208 }
9209
9210 if lower.contains("peer")
9212 || lower.contains("federat")
9213 || lower.contains("sync")
9214 || lower.contains("conflict")
9215 {
9216 let mut total_conflicts = 0usize;
9217 for e in &project.events {
9218 if e.kind == "frontier.conflict_detected" {
9219 total_conflicts += 1;
9220 }
9221 }
9222 if json {
9223 println!(
9224 "{}",
9225 serde_json::to_string_pretty(&json!({
9226 "answer": "federation",
9227 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
9228 "total_conflicts": total_conflicts,
9229 }))
9230 .unwrap()
9231 );
9232 } else {
9233 println!(" {} peer(s) registered:", project.peers.len());
9234 for p in &project.peers {
9235 println!(" · {:<24} {}", p.id, p.url);
9236 }
9237 println!(" {total_conflicts} conflict events on the canonical log.");
9238 }
9239 return;
9240 }
9241
9242 if json {
9244 println!(
9245 "{}",
9246 serde_json::to_string_pretty(&json!({
9247 "answer": "unknown_question",
9248 "question": q,
9249 "hint": "Try: pending, audit, recent, how many, calibration, peers."
9250 }))
9251 .unwrap()
9252 );
9253 } else {
9254 println!(" Don't know how to route that question yet.");
9255 println!(" Try: pending · audit · recent · how many · calibration · peers");
9256 }
9257}
9258
9259fn frontier_label(p: &crate::project::Project) -> String {
9260 if p.project.name.trim().is_empty() {
9261 "(unnamed)".to_string()
9262 } else {
9263 p.project.name.clone()
9264 }
9265}
9266
9267fn fmt_timestamp(ts: &str) -> String {
9268 chrono::DateTime::parse_from_rfc3339(ts)
9271 .map(|dt| dt.format("%m-%d %H:%M").to_string())
9272 .unwrap_or_else(|_| ts.chars().take(16).collect())
9273}
9274
9275fn cmd_stats(path: &Path) {
9276 let frontier = load_frontier_or_fail(path);
9277 let s = &frontier.stats;
9278 println!();
9279 println!(" {}", "FRONTIER · V0.36.0".dimmed());
9280 println!(" {}", frontier.project.name.bold());
9281 println!(" {}", style::tick_row(60));
9282 println!(" id: {}", frontier.frontier_id());
9283 println!(" compiled: {}", frontier.project.compiled_at);
9284 println!(" papers: {}", frontier.project.papers_processed);
9285 println!(" findings: {}", s.findings);
9286 println!(" links: {}", s.links);
9287 println!(" replicated: {}", s.replicated);
9288 println!(" avg confidence: {}", s.avg_confidence);
9289 println!(" gaps: {}", s.gaps);
9290 println!(" contested: {}", s.contested);
9291 println!(" reviewed: {}", s.human_reviewed);
9292 println!(" proposals: {}", s.proposal_count);
9293 println!(
9294 " recorded proof: {}",
9295 frontier.proof_state.latest_packet.status
9296 );
9297 if frontier.proof_state.latest_packet.status != "never_exported" {
9298 println!(
9299 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
9300 );
9301 }
9302 if !s.categories.is_empty() {
9303 println!();
9304 println!(" {}", "categories".dimmed());
9305 let mut categories = s.categories.iter().collect::<Vec<_>>();
9306 categories.sort_by(|a, b| b.1.cmp(a.1));
9307 for (category, count) in categories {
9308 println!(" {category}: {}", count);
9309 }
9310 }
9311 println!();
9312 println!(" {}", style::tick_row(60));
9313 println!();
9314}
9315
9316fn cmd_proposals(action: ProposalAction) {
9317 match action {
9318 ProposalAction::List {
9319 frontier,
9320 status,
9321 json,
9322 } => {
9323 let frontier_state =
9324 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9325 let proposals_list = proposals::list(&frontier_state, status.as_deref());
9326 let payload = json!({
9327 "ok": true,
9328 "command": "proposals.list",
9329 "frontier": frontier_state.project.name,
9330 "status_filter": status,
9331 "summary": proposals::summary(&frontier_state),
9332 "proposals": proposals_list,
9333 });
9334 if json {
9335 println!(
9336 "{}",
9337 serde_json::to_string_pretty(&payload)
9338 .expect("failed to serialize proposals list")
9339 );
9340 } else {
9341 println!("vela proposals list");
9342 println!(" frontier: {}", frontier_state.project.name);
9343 println!(
9344 " proposals: {}",
9345 payload["proposals"].as_array().map_or(0, Vec::len)
9346 );
9347 }
9348 }
9349 ProposalAction::Show {
9350 frontier,
9351 proposal_id,
9352 json,
9353 } => {
9354 let frontier_state =
9355 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9356 let proposal =
9357 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
9358 let payload = json!({
9359 "ok": true,
9360 "command": "proposals.show",
9361 "frontier": frontier_state.project.name,
9362 "proposal": proposal,
9363 });
9364 if json {
9365 println!(
9366 "{}",
9367 serde_json::to_string_pretty(&payload)
9368 .expect("failed to serialize proposal show")
9369 );
9370 } else {
9371 println!("vela proposals show");
9372 println!(" frontier: {}", frontier_state.project.name);
9373 println!(" proposal: {}", proposal_id);
9374 println!(" kind: {}", proposal.kind);
9375 println!(" status: {}", proposal.status);
9376 }
9377 }
9378 ProposalAction::Preview {
9379 frontier,
9380 proposal_id,
9381 reviewer,
9382 json,
9383 } => {
9384 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
9385 .unwrap_or_else(|e| fail_return(&e));
9386 let payload = json!({
9387 "ok": true,
9388 "command": "proposals.preview",
9389 "frontier": frontier.display().to_string(),
9390 "preview": preview,
9391 });
9392 if json {
9393 println!(
9394 "{}",
9395 serde_json::to_string_pretty(&payload)
9396 .expect("failed to serialize proposal preview")
9397 );
9398 } else {
9399 println!("vela proposals preview");
9400 println!(" proposal: {}", proposal_id);
9401 println!(" kind: {}", preview.kind);
9402 println!(
9403 " findings: {} -> {}",
9404 preview.findings_before, preview.findings_after
9405 );
9406 println!(
9407 " artifacts: {} -> {}",
9408 preview.artifacts_before, preview.artifacts_after
9409 );
9410 println!(
9411 " events: {} -> {}",
9412 preview.events_before, preview.events_after
9413 );
9414 if !preview.changed_findings.is_empty() {
9415 println!(
9416 " findings changed: {}",
9417 preview.changed_findings.join(", ")
9418 );
9419 }
9420 if !preview.changed_artifacts.is_empty() {
9421 println!(
9422 " artifacts changed: {}",
9423 preview.changed_artifacts.join(", ")
9424 );
9425 }
9426 if !preview.event_kinds.is_empty() {
9427 println!(" event kinds: {}", preview.event_kinds.join(", "));
9428 }
9429 println!(" event: {}", preview.applied_event_id);
9430 }
9431 }
9432 ProposalAction::Import {
9433 frontier,
9434 source,
9435 json,
9436 } => {
9437 let report =
9438 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
9439 let payload = json!({
9440 "ok": true,
9441 "command": "proposals.import",
9442 "frontier": frontier.display().to_string(),
9443 "source": source.display().to_string(),
9444 "summary": {
9445 "imported": report.imported,
9446 "applied": report.applied,
9447 "rejected": report.rejected,
9448 "duplicates": report.duplicates,
9449 },
9450 });
9451 if json {
9452 println!(
9453 "{}",
9454 serde_json::to_string_pretty(&payload)
9455 .expect("failed to serialize proposal import")
9456 );
9457 } else {
9458 println!(
9459 "Imported {} proposals into {}",
9460 report.imported, report.wrote_to
9461 );
9462 }
9463 }
9464 ProposalAction::Validate { source, json } => {
9465 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
9466 let payload = json!({
9467 "ok": report.ok,
9468 "command": "proposals.validate",
9469 "source": source.display().to_string(),
9470 "summary": {
9471 "checked": report.checked,
9472 "valid": report.valid,
9473 "invalid": report.invalid,
9474 },
9475 "proposal_ids": report.proposal_ids,
9476 "errors": report.errors,
9477 });
9478 if json {
9479 println!(
9480 "{}",
9481 serde_json::to_string_pretty(&payload)
9482 .expect("failed to serialize proposal validation")
9483 );
9484 } else if report.ok {
9485 println!("{} validated {} proposals", style::ok("ok"), report.valid);
9486 } else {
9487 println!(
9488 "{} validated {} proposals, {} invalid",
9489 style::lost("lost"),
9490 report.valid,
9491 report.invalid
9492 );
9493 for error in &report.errors {
9494 println!(" · {error}");
9495 }
9496 std::process::exit(1);
9497 }
9498 }
9499 ProposalAction::Export {
9500 frontier,
9501 output,
9502 status,
9503 json,
9504 } => {
9505 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
9506 .unwrap_or_else(|e| fail_return(&e));
9507 let payload = json!({
9508 "ok": true,
9509 "command": "proposals.export",
9510 "frontier": frontier.display().to_string(),
9511 "output": output.display().to_string(),
9512 "status": status,
9513 "exported": count,
9514 });
9515 if json {
9516 println!(
9517 "{}",
9518 serde_json::to_string_pretty(&payload)
9519 .expect("failed to serialize proposal export")
9520 );
9521 } else {
9522 println!("sealed · {count} proposals · {}", output.display());
9523 }
9524 }
9525 ProposalAction::Accept {
9526 frontier,
9527 proposal_id,
9528 reviewer,
9529 reason,
9530 json,
9531 } => {
9532 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
9533 .unwrap_or_else(|e| fail_return(&e));
9534 let payload = json!({
9535 "ok": true,
9536 "command": "proposals.accept",
9537 "frontier": frontier.display().to_string(),
9538 "proposal_id": proposal_id,
9539 "reviewer": reviewer,
9540 "applied_event_id": event_id,
9541 });
9542 if json {
9543 println!(
9544 "{}",
9545 serde_json::to_string_pretty(&payload)
9546 .expect("failed to serialize proposal accept")
9547 );
9548 } else {
9549 println!(
9550 "{} accepted and applied proposal {}",
9551 style::ok("ok"),
9552 proposal_id
9553 );
9554 println!(" event: {}", event_id);
9555 }
9556 }
9557 ProposalAction::Reject {
9558 frontier,
9559 proposal_id,
9560 reviewer,
9561 reason,
9562 json,
9563 } => {
9564 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
9565 .unwrap_or_else(|e| fail_return(&e));
9566 let payload = json!({
9567 "ok": true,
9568 "command": "proposals.reject",
9569 "frontier": frontier.display().to_string(),
9570 "proposal_id": proposal_id,
9571 "reviewer": reviewer,
9572 "status": "rejected",
9573 });
9574 if json {
9575 println!(
9576 "{}",
9577 serde_json::to_string_pretty(&payload)
9578 .expect("failed to serialize proposal reject")
9579 );
9580 } else {
9581 println!(
9582 "{} rejected proposal {}",
9583 style::warn("rejected"),
9584 proposal_id
9585 );
9586 }
9587 }
9588 }
9589}
9590
9591fn cmd_artifact_to_state(
9592 frontier: &Path,
9593 packet: &Path,
9594 actor: &str,
9595 apply_artifacts: bool,
9596 json: bool,
9597) {
9598 let report =
9599 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
9600 .unwrap_or_else(|e| fail_return(&e));
9601 if json {
9602 println!(
9603 "{}",
9604 serde_json::to_string_pretty(&report)
9605 .expect("failed to serialize artifact-to-state report")
9606 );
9607 } else {
9608 println!("vela artifact-to-state");
9609 println!(" packet: {}", report.packet_id);
9610 println!(" frontier: {}", report.frontier);
9611 println!(" artifact proposals: {}", report.artifact_proposals);
9612 println!(" finding proposals: {}", report.finding_proposals);
9613 println!(" gap proposals: {}", report.gap_proposals);
9614 println!(
9615 " applied artifact events: {}",
9616 report.applied_artifact_events
9617 );
9618 println!(
9619 " pending truth proposals: {}",
9620 report.pending_truth_proposals
9621 );
9622 }
9623}
9624
9625async fn cmd_bridge_kit(action: BridgeKitAction) {
9626 match action {
9627 BridgeKitAction::Validate { source, json } => {
9628 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
9629 if json {
9630 println!(
9631 "{}",
9632 serde_json::to_string_pretty(&report)
9633 .expect("failed to serialize bridge-kit validation report")
9634 );
9635 } else {
9636 println!("vela bridge-kit validate");
9637 println!(" source: {}", report.source);
9638 println!(" packets: {}", report.packet_count);
9639 println!(" valid: {}", report.valid_packet_count);
9640 println!(" invalid: {}", report.invalid_packet_count);
9641 for packet in &report.packets {
9642 if packet.ok {
9643 println!(
9644 " ok: {} · {} artifacts · {} claims · {} needs",
9645 packet
9646 .packet_id
9647 .as_deref()
9648 .unwrap_or("packet id unavailable"),
9649 packet.artifact_count,
9650 packet.candidate_claim_count,
9651 packet.open_need_count
9652 );
9653 } else {
9654 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
9655 }
9656 }
9657 for error in &report.errors {
9658 println!(" error: {error}");
9659 }
9660 }
9661 if !report.ok {
9662 std::process::exit(1);
9663 }
9664 }
9665 BridgeKitAction::VerifyProvenance { packet, json } => {
9666 let report = verify_packet_provenance(&packet).await;
9667 if json {
9668 println!(
9669 "{}",
9670 serde_json::to_string_pretty(&report)
9671 .expect("failed to serialize provenance verification report")
9672 );
9673 } else {
9674 println!("vela bridge-kit verify-provenance");
9675 println!(" packet: {}", report.packet);
9676 println!(" identifiers: {}", report.identifiers.len());
9677 println!(" resolved: {}", report.resolved_count);
9678 println!(" unresolved: {}", report.unresolved_count);
9679 println!(" skipped: {}", report.skipped_count);
9680 for entry in &report.identifiers {
9681 let status = match entry.status.as_str() {
9682 "resolved" => "ok ",
9683 "unresolved" => "FAIL",
9684 "skipped" => "skip",
9685 _ => "? ",
9686 };
9687 println!(
9688 " {} {} ({})",
9689 status,
9690 entry.identifier,
9691 entry.note.as_deref().unwrap_or(entry.kind.as_str())
9692 );
9693 }
9694 }
9695 if report.unresolved_count > 0 {
9696 std::process::exit(1);
9697 }
9698 }
9699 }
9700}
9701
9702#[derive(Debug, Clone, Serialize)]
9703struct ProvenanceVerificationReport {
9704 command: String,
9705 packet: String,
9706 identifiers: Vec<ProvenanceVerificationEntry>,
9707 resolved_count: usize,
9708 unresolved_count: usize,
9709 skipped_count: usize,
9710}
9711
9712#[derive(Debug, Clone, Serialize)]
9713struct ProvenanceVerificationEntry {
9714 identifier: String,
9715 kind: String,
9716 status: String,
9717 #[serde(skip_serializing_if = "Option::is_none")]
9718 note: Option<String>,
9719}
9720
9721async fn verify_packet_provenance(packet_path: &Path) -> ProvenanceVerificationReport {
9726 use crate::artifact_to_state::ArtifactPacket;
9727 let raw = std::fs::read_to_string(packet_path)
9728 .unwrap_or_else(|e| fail_return(&format!("read packet: {e}")));
9729 let parsed: ArtifactPacket =
9730 serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse packet: {e}")));
9731 let packet = parsed
9732 .validate()
9733 .unwrap_or_else(|e| fail_return(&format!("validate packet: {e}")));
9734
9735 let mut candidates: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
9737 for artifact in &packet.artifacts {
9738 if let Some(ident) = extract_identifier(&artifact.locator) {
9739 candidates.insert(ident);
9740 }
9741 }
9742 for claim in &packet.candidate_claims {
9743 for source_ref in &claim.source_refs {
9744 if let Some(ident) = extract_identifier(source_ref) {
9745 candidates.insert(ident);
9746 }
9747 }
9748 }
9749
9750 let client = reqwest::Client::builder()
9751 .user_agent("vela/0.108 (+https://github.com/vela-science/vela)")
9752 .timeout(std::time::Duration::from_secs(15))
9753 .build()
9754 .unwrap_or_else(|e| fail_return(&format!("build http client: {e}")));
9755
9756 let mut entries: Vec<ProvenanceVerificationEntry> = Vec::new();
9757 let mut resolved = 0usize;
9758 let mut unresolved = 0usize;
9759 let mut skipped = 0usize;
9760 for candidate in &candidates {
9761 let entry = if let Some(doi) = candidate.strip_prefix("doi:") {
9762 verify_doi(&client, doi).await
9763 } else if let Some(pmid) = candidate.strip_prefix("pmid:") {
9764 verify_pmid(&client, pmid).await
9765 } else if let Some(s2_id) = candidate.strip_prefix("s2:") {
9766 verify_s2(&client, s2_id).await
9767 } else if let Some(arxiv_id) = candidate.strip_prefix("arxiv:") {
9768 verify_arxiv(&client, arxiv_id).await
9769 } else {
9770 ProvenanceVerificationEntry {
9771 identifier: candidate.clone(),
9772 kind: "unknown".to_string(),
9773 status: "skipped".to_string(),
9774 note: Some("no recognized identifier prefix".to_string()),
9775 }
9776 };
9777 match entry.status.as_str() {
9778 "resolved" => resolved += 1,
9779 "unresolved" => unresolved += 1,
9780 _ => skipped += 1,
9781 }
9782 entries.push(entry);
9783 }
9784
9785 ProvenanceVerificationReport {
9786 command: "bridge-kit.verify-provenance".to_string(),
9787 packet: packet_path.display().to_string(),
9788 identifiers: entries,
9789 resolved_count: resolved,
9790 unresolved_count: unresolved,
9791 skipped_count: skipped,
9792 }
9793}
9794
9795fn extract_identifier(s: &str) -> Option<String> {
9800 let trimmed = s.trim();
9801 if trimmed.is_empty() {
9802 return None;
9803 }
9804 if trimmed.starts_with("doi:")
9806 || trimmed.starts_with("pmid:")
9807 || trimmed.starts_with("s2:")
9808 || trimmed.starts_with("arxiv:")
9809 {
9810 return Some(trimmed.to_string());
9811 }
9812 for prefix in ["https://doi.org/", "http://doi.org/", "https://dx.doi.org/"] {
9814 if let Some(rest) = trimmed.strip_prefix(prefix) {
9815 return Some(format!("doi:{rest}"));
9816 }
9817 }
9818 for prefix in [
9820 "https://pubmed.ncbi.nlm.nih.gov/",
9821 "http://pubmed.ncbi.nlm.nih.gov/",
9822 ] {
9823 if let Some(rest) = trimmed.strip_prefix(prefix) {
9824 let pmid = rest.trim_end_matches('/');
9825 return Some(format!("pmid:{pmid}"));
9826 }
9827 }
9828 for prefix in [
9833 "https://www.semanticscholar.org/paper/",
9834 "http://www.semanticscholar.org/paper/",
9835 "https://api.semanticscholar.org/graph/v1/paper/",
9836 "https://api.semanticscholar.org/v1/paper/",
9837 ] {
9838 if let Some(rest) = trimmed.strip_prefix(prefix) {
9839 let s2_id = rest
9840 .split('/')
9841 .next_back()
9842 .unwrap_or(rest)
9843 .split('?')
9844 .next()
9845 .unwrap_or(rest);
9846 if !s2_id.is_empty() {
9847 return Some(format!("s2:{s2_id}"));
9848 }
9849 }
9850 }
9851 for prefix in [
9856 "https://arxiv.org/abs/",
9857 "http://arxiv.org/abs/",
9858 "https://arxiv.org/pdf/",
9859 "http://arxiv.org/pdf/",
9860 "https://www.arxiv.org/abs/",
9861 ] {
9862 if let Some(rest) = trimmed.strip_prefix(prefix) {
9863 let arxiv_id = rest
9864 .trim_end_matches('/')
9865 .trim_end_matches(".pdf")
9866 .split('?')
9867 .next()
9868 .unwrap_or(rest);
9869 if !arxiv_id.is_empty() {
9870 return Some(format!("arxiv:{arxiv_id}"));
9871 }
9872 }
9873 }
9874 if trimmed.starts_with("10.") && trimmed.contains('/') && !trimmed.contains(' ') {
9876 return Some(format!("doi:{trimmed}"));
9877 }
9878 None
9879}
9880
9881async fn verify_doi(client: &reqwest::Client, doi: &str) -> ProvenanceVerificationEntry {
9882 let url = format!("https://api.crossref.org/works/{doi}");
9883 match client.get(&url).send().await {
9884 Ok(resp) if resp.status().is_success() => ProvenanceVerificationEntry {
9885 identifier: format!("doi:{doi}"),
9886 kind: "doi".to_string(),
9887 status: "resolved".to_string(),
9888 note: None,
9889 },
9890 Ok(resp) => ProvenanceVerificationEntry {
9891 identifier: format!("doi:{doi}"),
9892 kind: "doi".to_string(),
9893 status: "unresolved".to_string(),
9894 note: Some(format!("crossref returned {}", resp.status())),
9895 },
9896 Err(e) => ProvenanceVerificationEntry {
9897 identifier: format!("doi:{doi}"),
9898 kind: "doi".to_string(),
9899 status: "skipped".to_string(),
9900 note: Some(format!("crossref unreachable: {e}")),
9901 },
9902 }
9903}
9904
9905async fn verify_pmid(client: &reqwest::Client, pmid: &str) -> ProvenanceVerificationEntry {
9906 let url = format!(
9907 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id={pmid}&retmode=json"
9908 );
9909 match client.get(&url).send().await {
9910 Ok(resp) if resp.status().is_success() => {
9911 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
9914 let result = body.get("result");
9915 let uids = result
9916 .and_then(|r| r.get("uids"))
9917 .and_then(|u| u.as_array());
9918 let resolved = uids.is_some_and(|a| !a.is_empty());
9919 if resolved {
9920 ProvenanceVerificationEntry {
9921 identifier: format!("pmid:{pmid}"),
9922 kind: "pmid".to_string(),
9923 status: "resolved".to_string(),
9924 note: None,
9925 }
9926 } else {
9927 ProvenanceVerificationEntry {
9928 identifier: format!("pmid:{pmid}"),
9929 kind: "pmid".to_string(),
9930 status: "unresolved".to_string(),
9931 note: Some("eutils returned empty uids".to_string()),
9932 }
9933 }
9934 }
9935 Ok(resp) => ProvenanceVerificationEntry {
9936 identifier: format!("pmid:{pmid}"),
9937 kind: "pmid".to_string(),
9938 status: "unresolved".to_string(),
9939 note: Some(format!("eutils returned {}", resp.status())),
9940 },
9941 Err(e) => ProvenanceVerificationEntry {
9942 identifier: format!("pmid:{pmid}"),
9943 kind: "pmid".to_string(),
9944 status: "skipped".to_string(),
9945 note: Some(format!("eutils unreachable: {e}")),
9946 },
9947 }
9948}
9949
9950async fn verify_s2(client: &reqwest::Client, s2_id: &str) -> ProvenanceVerificationEntry {
9957 let url = format!("https://api.semanticscholar.org/graph/v1/paper/{s2_id}");
9958 match client.get(&url).send().await {
9959 Ok(resp) if resp.status().is_success() => {
9960 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
9961 let has_paper_id = body
9962 .get("paperId")
9963 .and_then(serde_json::Value::as_str)
9964 .is_some_and(|v| !v.is_empty());
9965 if has_paper_id {
9966 ProvenanceVerificationEntry {
9967 identifier: format!("s2:{s2_id}"),
9968 kind: "s2".to_string(),
9969 status: "resolved".to_string(),
9970 note: None,
9971 }
9972 } else {
9973 ProvenanceVerificationEntry {
9974 identifier: format!("s2:{s2_id}"),
9975 kind: "s2".to_string(),
9976 status: "unresolved".to_string(),
9977 note: Some("semantic scholar returned 200 with no paperId".to_string()),
9978 }
9979 }
9980 }
9981 Ok(resp) => ProvenanceVerificationEntry {
9982 identifier: format!("s2:{s2_id}"),
9983 kind: "s2".to_string(),
9984 status: "unresolved".to_string(),
9985 note: Some(format!("semantic scholar returned {}", resp.status())),
9986 },
9987 Err(e) => ProvenanceVerificationEntry {
9988 identifier: format!("s2:{s2_id}"),
9989 kind: "s2".to_string(),
9990 status: "skipped".to_string(),
9991 note: Some(format!("semantic scholar unreachable: {e}")),
9992 },
9993 }
9994}
9995
9996async fn verify_arxiv(client: &reqwest::Client, arxiv_id: &str) -> ProvenanceVerificationEntry {
10005 let url = format!("https://export.arxiv.org/api/query?id_list={arxiv_id}&max_results=1");
10006 match client.get(&url).send().await {
10007 Ok(resp) if resp.status().is_success() => {
10008 let body = resp.text().await.unwrap_or_default();
10009 let has_entry = body.contains("<entry>") || body.contains("<entry ");
10013 let has_id_url = body.contains("http://arxiv.org/abs/");
10017 if has_entry && has_id_url {
10018 ProvenanceVerificationEntry {
10019 identifier: format!("arxiv:{arxiv_id}"),
10020 kind: "arxiv".to_string(),
10021 status: "resolved".to_string(),
10022 note: None,
10023 }
10024 } else {
10025 ProvenanceVerificationEntry {
10026 identifier: format!("arxiv:{arxiv_id}"),
10027 kind: "arxiv".to_string(),
10028 status: "unresolved".to_string(),
10029 note: Some("arxiv returned 200 with no matching entry".to_string()),
10030 }
10031 }
10032 }
10033 Ok(resp) => ProvenanceVerificationEntry {
10034 identifier: format!("arxiv:{arxiv_id}"),
10035 kind: "arxiv".to_string(),
10036 status: "unresolved".to_string(),
10037 note: Some(format!("arxiv returned {}", resp.status())),
10038 },
10039 Err(e) => ProvenanceVerificationEntry {
10040 identifier: format!("arxiv:{arxiv_id}"),
10041 kind: "arxiv".to_string(),
10042 status: "skipped".to_string(),
10043 note: Some(format!("arxiv unreachable: {e}")),
10044 },
10045 }
10046}
10047
10048async fn cmd_source_adapter(action: SourceAdapterAction) {
10049 match action {
10050 SourceAdapterAction::Run {
10051 frontier,
10052 adapter,
10053 actor,
10054 entries,
10055 priority,
10056 include_excluded,
10057 allow_partial,
10058 dry_run,
10059 input_dir,
10060 apply_artifacts,
10061 json,
10062 } => {
10063 let report = crate::source_adapters::run(
10064 &frontier,
10065 crate::source_adapters::SourceAdapterRunOptions {
10066 adapter,
10067 actor,
10068 entries,
10069 priority,
10070 include_excluded,
10071 allow_partial,
10072 dry_run,
10073 input_dir,
10074 apply_artifacts,
10075 },
10076 )
10077 .await
10078 .unwrap_or_else(|e| fail_return(&e));
10079 if json {
10080 println!(
10081 "{}",
10082 serde_json::to_string_pretty(&report)
10083 .expect("failed to serialize source adapter report")
10084 );
10085 } else {
10086 println!("vela source-adapter run");
10087 println!(" adapter: {}", report.adapter);
10088 println!(" run: {}", report.run_id);
10089 println!(" frontier: {}", report.frontier);
10090 println!(" selected entries: {}", report.selected_entries);
10091 println!(" fetched records: {}", report.fetched_records);
10092 println!(" changed records: {}", report.changed_records);
10093 println!(" unchanged records: {}", report.unchanged_records);
10094 println!(" failed records: {}", report.failed_records.len());
10095 if let Some(packet_id) = report.packet_id {
10096 println!(" packet: {packet_id}");
10097 }
10098 println!(" artifact proposals: {}", report.artifact_proposals);
10099 println!(" review note proposals: {}", report.review_note_proposals);
10100 println!(" applied events: {}", report.applied_event_ids.len());
10101 }
10102 }
10103 }
10104}
10105
10106fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
10107 match action {
10108 RuntimeAdapterAction::Run {
10109 frontier,
10110 adapter,
10111 input,
10112 actor,
10113 dry_run,
10114 apply_artifacts,
10115 json,
10116 } => {
10117 let report = crate::runtime_adapters::run(
10118 &frontier,
10119 crate::runtime_adapters::RuntimeAdapterRunOptions {
10120 adapter,
10121 input,
10122 actor,
10123 dry_run,
10124 apply_artifacts,
10125 },
10126 )
10127 .unwrap_or_else(|e| fail_return(&e));
10128 if json {
10129 println!(
10130 "{}",
10131 serde_json::to_string_pretty(&report)
10132 .expect("failed to serialize runtime adapter report")
10133 );
10134 } else {
10135 println!("vela runtime-adapter run");
10136 println!(" adapter: {}", report.adapter);
10137 println!(" run: {}", report.run_id);
10138 println!(" frontier: {}", report.frontier);
10139 if let Some(packet_id) = report.packet_id {
10140 println!(" packet: {packet_id}");
10141 }
10142 println!(" artifact proposals: {}", report.artifact_proposals);
10143 println!(" finding proposals: {}", report.finding_proposals);
10144 println!(" gap proposals: {}", report.gap_proposals);
10145 println!(" review note proposals: {}", report.review_note_proposals);
10146 println!(
10147 " applied artifact events: {}",
10148 report.applied_artifact_events
10149 );
10150 println!(
10151 " pending truth proposals: {}",
10152 report.pending_truth_proposals
10153 );
10154 }
10155 }
10156 }
10157}
10158
10159fn cmd_sign(action: SignAction) {
10160 match action {
10161 SignAction::GenerateKeypair { out, json } => {
10162 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
10163 let payload = json!({
10164 "ok": true,
10165 "command": "sign.generate-keypair",
10166 "output_dir": out.display().to_string(),
10167 "public_key": public_key,
10168 });
10169 if json {
10170 println!(
10171 "{}",
10172 serde_json::to_string_pretty(&payload)
10173 .expect("failed to serialize sign.generate-keypair")
10174 );
10175 } else {
10176 println!("{} keypair · {}", style::ok("generated"), out.display());
10177 println!(" public key: {public_key}");
10178 }
10179 }
10180 SignAction::Apply {
10181 frontier,
10182 private_key,
10183 json,
10184 } => {
10185 let count =
10186 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
10187 let payload = json!({
10188 "ok": true,
10189 "command": "sign.apply",
10190 "frontier": frontier.display().to_string(),
10191 "private_key": private_key.display().to_string(),
10192 "signed": count,
10193 });
10194 if json {
10195 println!(
10196 "{}",
10197 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
10198 );
10199 } else {
10200 println!(
10201 "{} {count} findings in {}",
10202 style::ok("signed"),
10203 frontier.display()
10204 );
10205 }
10206 }
10207 SignAction::Verify {
10208 frontier,
10209 public_key,
10210 json,
10211 } => {
10212 let report = sign::verify_frontier(&frontier, public_key.as_deref())
10213 .unwrap_or_else(|e| fail_return(&e));
10214 if json {
10215 println!(
10216 "{}",
10217 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
10218 );
10219 } else {
10220 println!();
10221 println!(
10222 " {}",
10223 format!("VELA · SIGN · VERIFY · {}", frontier.display())
10224 .to_uppercase()
10225 .dimmed()
10226 );
10227 println!(" {}", style::tick_row(60));
10228 println!(" total findings: {}", report.total_findings);
10229 println!(" signed: {}", report.signed);
10230 println!(" unsigned: {}", report.unsigned);
10231 println!(" valid: {}", report.valid);
10232 println!(" invalid: {}", report.invalid);
10233 if report.findings_with_threshold > 0 {
10234 println!(" with threshold: {}", report.findings_with_threshold);
10235 println!(" jointly accepted: {}", report.jointly_accepted);
10236 }
10237 }
10238 }
10239 SignAction::ThresholdSet {
10240 frontier,
10241 finding_id,
10242 to,
10243 json,
10244 } => {
10245 if to == 0 {
10246 fail("--to must be >= 1");
10247 }
10248 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10249 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
10250 fail(&format!("finding '{finding_id}' not present in frontier"));
10251 };
10252 project.findings[idx].flags.signature_threshold = Some(to);
10253 sign::refresh_jointly_accepted(&mut project);
10257 let met = project.findings[idx].flags.jointly_accepted;
10258 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10259
10260 if json {
10261 println!(
10262 "{}",
10263 serde_json::to_string_pretty(&json!({
10264 "ok": true,
10265 "command": "sign.threshold-set",
10266 "finding_id": finding_id,
10267 "threshold": to,
10268 "jointly_accepted": met,
10269 "frontier": frontier.display().to_string(),
10270 }))
10271 .expect("failed to serialize sign.threshold-set")
10272 );
10273 } else {
10274 println!(
10275 "{} signature_threshold={to} on {finding_id} ({})",
10276 style::ok("set"),
10277 if met {
10278 "jointly accepted"
10279 } else {
10280 "awaiting signatures"
10281 }
10282 );
10283 }
10284 }
10285 }
10286}
10287
10288fn cmd_actor(action: ActorAction) {
10289 match action {
10290 ActorAction::Add {
10291 frontier,
10292 id,
10293 pubkey,
10294 tier,
10295 orcid,
10296 clearance,
10297 json,
10298 } => {
10299 let trimmed = pubkey.trim();
10301 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
10302 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
10303 }
10304 let orcid_normalized = orcid
10306 .as_deref()
10307 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
10308 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
10311 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
10312 });
10313
10314 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10315 if project.actors.iter().any(|actor| actor.id == id) {
10316 fail(&format!(
10317 "Actor '{id}' already registered in this frontier."
10318 ));
10319 }
10320 project.actors.push(sign::ActorRecord {
10321 id: id.clone(),
10322 public_key: trimmed.to_string(),
10323 algorithm: "ed25519".to_string(),
10324 created_at: chrono::Utc::now().to_rfc3339(),
10325 tier: tier.clone(),
10326 orcid: orcid_normalized.clone(),
10327 access_clearance: clearance,
10328 });
10329 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10330 let payload = json!({
10331 "ok": true,
10332 "command": "actor.add",
10333 "frontier": frontier.display().to_string(),
10334 "actor_id": id,
10335 "public_key": trimmed,
10336 "tier": tier,
10337 "orcid": orcid_normalized,
10338 "registered_count": project.actors.len(),
10339 });
10340 if json {
10341 println!(
10342 "{}",
10343 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
10344 );
10345 } else {
10346 let tier_suffix = tier
10347 .as_deref()
10348 .map_or_else(String::new, |t| format!(" tier={t}"));
10349 println!(
10350 "{} actor {} (pubkey {}{tier_suffix})",
10351 style::ok("registered"),
10352 id,
10353 &trimmed[..16]
10354 );
10355 }
10356 }
10357 ActorAction::List { frontier, json } => {
10358 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10359 if json {
10360 let payload = json!({
10361 "ok": true,
10362 "command": "actor.list",
10363 "frontier": frontier.display().to_string(),
10364 "actors": project.actors,
10365 });
10366 println!(
10367 "{}",
10368 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
10369 );
10370 } else {
10371 println!();
10372 println!(
10373 " {}",
10374 format!("VELA · ACTOR · LIST · {}", frontier.display())
10375 .to_uppercase()
10376 .dimmed()
10377 );
10378 println!(" {}", style::tick_row(60));
10379 if project.actors.is_empty() {
10380 println!(" (no actors registered)");
10381 } else {
10382 for actor in &project.actors {
10383 println!(
10384 " {:<28} {}… registered {}",
10385 actor.id,
10386 &actor.public_key[..16],
10387 actor.created_at
10388 );
10389 }
10390 }
10391 }
10392 }
10393 }
10394}
10395
10396fn cmd_causal(action: CausalAction) {
10398 use crate::causal_reasoning;
10399
10400 match action {
10401 CausalAction::Audit {
10402 frontier,
10403 problems_only,
10404 json,
10405 } => {
10406 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10407 let mut entries = causal_reasoning::audit_frontier(&project);
10408 if problems_only {
10409 entries.retain(|e| e.verdict.needs_reviewer_attention());
10410 }
10411 let summary = causal_reasoning::summarize_audit(&entries);
10412
10413 if json {
10414 println!(
10415 "{}",
10416 serde_json::to_string_pretty(&json!({
10417 "ok": true,
10418 "command": "causal.audit",
10419 "frontier": frontier.display().to_string(),
10420 "summary": summary,
10421 "entries": entries,
10422 }))
10423 .expect("serialize causal.audit")
10424 );
10425 return;
10426 }
10427
10428 println!();
10429 println!(
10430 " {}",
10431 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
10432 .to_uppercase()
10433 .dimmed()
10434 );
10435 println!(" {}", style::tick_row(60));
10436 println!(
10437 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
10438 summary.total,
10439 summary.identified,
10440 summary.conditional,
10441 summary.underidentified,
10442 summary.underdetermined,
10443 );
10444 if entries.is_empty() {
10445 println!(" (no entries to report)");
10446 return;
10447 }
10448 for e in &entries {
10449 let chip = match e.verdict {
10450 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
10451 crate::causal_reasoning::Identifiability::Conditional => {
10452 style::warn("conditional")
10453 }
10454 crate::causal_reasoning::Identifiability::Underidentified => {
10455 style::lost("underidentified")
10456 }
10457 crate::causal_reasoning::Identifiability::Underdetermined => {
10458 style::warn("underdetermined")
10459 }
10460 };
10461 let claim = e
10462 .causal_claim
10463 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
10464 let grade = e
10465 .causal_evidence_grade
10466 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
10467 println!();
10468 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
10469 let assertion_short: String = e.assertion_text.chars().take(78).collect();
10470 println!(" {assertion_short}");
10471 println!(" {} {}", style::ok("why:"), e.rationale);
10472 if e.verdict.needs_reviewer_attention()
10473 || matches!(
10474 e.verdict,
10475 crate::causal_reasoning::Identifiability::Underdetermined
10476 )
10477 {
10478 println!(" {} {}", style::ok("fix:"), e.remediation);
10479 }
10480 }
10481 }
10482 CausalAction::Effect {
10483 frontier,
10484 source,
10485 on: target,
10486 json,
10487 } => {
10488 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
10489
10490 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10491 let verdict = identify_effect(&project, &source, &target);
10492
10493 if json {
10494 println!(
10495 "{}",
10496 serde_json::to_string_pretty(&json!({
10497 "ok": true,
10498 "command": "causal.effect",
10499 "frontier": frontier.display().to_string(),
10500 "source": source,
10501 "target": target,
10502 "verdict": verdict,
10503 }))
10504 .expect("serialize causal.effect")
10505 );
10506 return;
10507 }
10508
10509 println!();
10510 println!(
10511 " {}",
10512 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
10513 .to_uppercase()
10514 .dimmed()
10515 );
10516 println!(" {}", style::tick_row(60));
10517 match verdict {
10518 CausalEffectVerdict::Identified {
10519 adjustment_set,
10520 back_door_paths_considered,
10521 } => {
10522 if adjustment_set.is_empty() {
10523 println!(
10524 " {} no back-door adjustment needed",
10525 style::ok("identified")
10526 );
10527 } else {
10528 println!(" {} identified by adjusting on:", style::ok("identified"));
10529 for z in &adjustment_set {
10530 println!(" · {z}");
10531 }
10532 }
10533 println!(
10534 " back-door paths considered: {}",
10535 back_door_paths_considered
10536 );
10537 }
10538 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
10539 println!(
10540 " {} identified via front-door criterion (Pearl 1995 §3.3)",
10541 style::ok("identified")
10542 );
10543 println!(" mediators that intercept all directed paths:");
10544 for m in &mediator_set {
10545 println!(" · {m}");
10546 }
10547 println!(
10548 " applies when source-target confounders are unobserved but the mediator chain is."
10549 );
10550 }
10551 CausalEffectVerdict::NoCausalPath { reason } => {
10552 println!(" {} no causal path: {reason}", style::warn("no_path"));
10553 }
10554 CausalEffectVerdict::Underidentified {
10555 unblocked_back_door_paths,
10556 candidates_tried,
10557 } => {
10558 println!(
10559 " {} no observational adjustment set found ({} candidates tried)",
10560 style::lost("underidentified"),
10561 candidates_tried
10562 );
10563 println!(" open back-door paths:");
10564 for path in unblocked_back_door_paths.iter().take(5) {
10565 println!(" · {}", path.join(" — "));
10566 }
10567 println!(
10568 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
10569 );
10570 }
10571 CausalEffectVerdict::UnknownNode { which } => {
10572 fail(&which);
10573 }
10574 }
10575 println!();
10576 }
10577 CausalAction::Graph {
10578 frontier,
10579 node,
10580 json,
10581 } => {
10582 use crate::causal_graph::CausalGraph;
10583 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10584 let graph = CausalGraph::from_project(&project);
10585
10586 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
10589 if !graph.contains(n) {
10590 fail(&format!("node not in frontier: {n}"));
10591 }
10592 vec![n]
10593 } else {
10594 project.findings.iter().map(|f| f.id.as_str()).collect()
10595 };
10596
10597 if json {
10598 let payload: Vec<_> = nodes
10599 .iter()
10600 .map(|n| {
10601 let parents: Vec<&str> = graph.parents_of(n).collect();
10602 let children: Vec<&str> = graph.children_of(n).collect();
10603 json!({
10604 "node": n,
10605 "parents": parents,
10606 "children": children,
10607 })
10608 })
10609 .collect();
10610 println!(
10611 "{}",
10612 serde_json::to_string_pretty(&json!({
10613 "ok": true,
10614 "command": "causal.graph",
10615 "node_count": graph.node_count(),
10616 "edge_count": graph.edge_count(),
10617 "nodes": payload,
10618 }))
10619 .expect("serialize causal.graph")
10620 );
10621 return;
10622 }
10623
10624 println!();
10625 println!(
10626 " {}",
10627 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
10628 .to_uppercase()
10629 .dimmed()
10630 );
10631 println!(" {}", style::tick_row(60));
10632 println!(
10633 " {} nodes · {} edges",
10634 graph.node_count(),
10635 graph.edge_count()
10636 );
10637 println!();
10638 for n in &nodes {
10639 let parents: Vec<&str> = graph.parents_of(n).collect();
10640 let children: Vec<&str> = graph.children_of(n).collect();
10641 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
10642 continue; }
10644 println!(" {n}");
10645 if !parents.is_empty() {
10646 println!(" parents: {}", parents.join(", "));
10647 }
10648 if !children.is_empty() {
10649 println!(" children: {}", children.join(", "));
10650 }
10651 }
10652 }
10653 CausalAction::Counterfactual {
10654 frontier,
10655 intervene_on,
10656 set_to,
10657 target,
10658 json,
10659 } => {
10660 use crate::counterfactual::{
10661 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
10662 };
10663
10664 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10665 let query = CounterfactualQuery {
10666 intervene_on: intervene_on.clone(),
10667 set_to,
10668 target: target.clone(),
10669 };
10670 let verdict = answer_counterfactual(&project, &query);
10671
10672 if json {
10673 println!(
10674 "{}",
10675 serde_json::to_string_pretty(&json!({
10676 "ok": true,
10677 "command": "causal.counterfactual",
10678 "frontier": frontier.display().to_string(),
10679 "query": query,
10680 "verdict": verdict,
10681 }))
10682 .expect("serialize causal.counterfactual")
10683 );
10684 return;
10685 }
10686
10687 println!();
10688 println!(
10689 " {}",
10690 format!(
10691 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
10692 )
10693 .to_uppercase()
10694 .dimmed()
10695 );
10696 println!(" {}", style::tick_row(72));
10697 match verdict {
10698 CounterfactualVerdict::Resolved {
10699 factual,
10700 counterfactual,
10701 delta,
10702 paths_used,
10703 } => {
10704 println!(
10705 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
10706 style::ok("resolved")
10707 );
10708 println!(
10709 " twin-network propagation through {} causal path(s):",
10710 paths_used.len()
10711 );
10712 for p in paths_used.iter().take(5) {
10713 println!(" · {}", p.join(" → "));
10714 }
10715 println!(
10716 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
10717 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
10718 );
10719 }
10720 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
10721 println!(
10722 " {} causal path exists but {} edge(s) lack a mechanism annotation",
10723 style::warn("mechanism_unspecified"),
10724 unspecified_edges.len()
10725 );
10726 for (parent, child) in unspecified_edges.iter().take(8) {
10727 println!(" · {parent} → {child}");
10728 }
10729 println!(
10730 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
10731 );
10732 }
10733 CounterfactualVerdict::NoCausalPath { factual } => {
10734 println!(
10735 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
10736 style::warn("no_path")
10737 );
10738 }
10739 CounterfactualVerdict::UnknownNode { which } => {
10740 fail(&format!("node not in frontier: {which}"));
10741 }
10742 CounterfactualVerdict::InvalidIntervention { reason } => {
10743 fail(&reason);
10744 }
10745 }
10746 println!();
10747 }
10748 }
10749}
10750
10751fn cmd_bridges(action: BridgesAction) {
10754 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
10755 use std::collections::HashMap;
10756
10757 fn bridges_dir(frontier: &Path) -> PathBuf {
10758 frontier.join(".vela/bridges")
10759 }
10760
10761 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
10762 let path = bridges_dir(frontier).join(format!("{id}.json"));
10763 if !path.is_file() {
10764 return Err(format!("bridge not found: {id}"));
10765 }
10766 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
10767 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
10768 }
10769
10770 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
10771 let dir = bridges_dir(frontier);
10772 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
10773 let path = dir.join(format!("{}.json", b.id));
10774 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
10775 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
10776 }
10777
10778 fn default_reviewer_id() -> String {
10781 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
10782 }
10783
10784 fn emit_bridge_reviewed_event(
10795 frontier: &Path,
10796 bridge_id: &str,
10797 status: &str,
10798 reviewer_id: &str,
10799 note: Option<&str>,
10800 ) -> Result<(), String> {
10801 let mut payload = serde_json::json!({
10802 "bridge_id": bridge_id,
10803 "status": status,
10804 });
10805 if let Some(n) = note
10806 && !n.trim().is_empty()
10807 {
10808 payload["note"] = serde_json::Value::String(n.to_string());
10809 }
10810 let known_ids: Vec<String> = list_bridges(frontier)
10812 .unwrap_or_default()
10813 .into_iter()
10814 .map(|b| b.id)
10815 .collect();
10816 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
10817 let event = crate::events::new_bridge_reviewed_event(
10818 bridge_id,
10819 reviewer_id,
10820 "human",
10821 &format!("Bridge {status} by {reviewer_id}"),
10822 payload,
10823 Vec::new(),
10824 );
10825 let events_dir = frontier.join(".vela/events");
10826 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
10827 let event_path = events_dir.join(format!("{}.json", event.id));
10828 let data =
10829 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
10830 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
10831 }
10832
10833 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
10834 let dir = bridges_dir(frontier);
10835 if !dir.is_dir() {
10836 return Ok(Vec::new());
10837 }
10838 let mut out = Vec::new();
10839 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
10840 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
10841 let path = entry.path();
10842 if path.extension().and_then(|s| s.to_str()) != Some("json") {
10843 continue;
10844 }
10845 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
10846 let b: Bridge =
10847 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
10848 out.push(b);
10849 }
10850 out.sort_by(|a, b| {
10851 b.finding_refs
10852 .len()
10853 .cmp(&a.finding_refs.len())
10854 .then(a.entity_name.cmp(&b.entity_name))
10855 });
10856 Ok(out)
10857 }
10858
10859 match action {
10860 BridgesAction::Derive {
10861 frontier_a,
10862 label_a,
10863 frontier_b,
10864 label_b,
10865 json,
10866 } => {
10867 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
10868 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
10869 let now = chrono::Utc::now().to_rfc3339();
10870 let new_bridges =
10871 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
10872
10873 let existing = list_bridges(&frontier_a).unwrap_or_default();
10877 let existing_by_id: HashMap<String, Bridge> =
10878 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
10879 let mut written = 0;
10880 let mut preserved = 0;
10881 let mut new_ids = Vec::new();
10882 for mut bridge in new_bridges {
10883 if let Some(prev) = existing_by_id.get(&bridge.id)
10884 && prev.status != BridgeStatus::Derived
10885 {
10886 bridge.status = prev.status;
10888 bridge.derived_at = prev.derived_at.clone();
10889 preserved += 1;
10890 }
10891 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
10892 new_ids.push(bridge.id.clone());
10893 written += 1;
10894 }
10895
10896 if json {
10897 println!(
10898 "{}",
10899 serde_json::to_string_pretty(&json!({
10900 "ok": true,
10901 "command": "bridges.derive",
10902 "frontier_a": frontier_a.display().to_string(),
10903 "frontier_b": frontier_b.display().to_string(),
10904 "bridges_written": written,
10905 "reviewer_judgments_preserved": preserved,
10906 "ids": new_ids,
10907 }))
10908 .expect("serialize bridges.derive")
10909 );
10910 return;
10911 }
10912
10913 println!();
10914 println!(
10915 " {}",
10916 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
10917 .to_uppercase()
10918 .dimmed()
10919 );
10920 println!(" {}", style::tick_row(60));
10921 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
10922 if preserved > 0 {
10923 println!(
10924 " {} {} reviewer judgment(s) preserved",
10925 style::ok("kept"),
10926 preserved
10927 );
10928 }
10929 for id in new_ids.iter().take(10) {
10930 println!(" · {id}");
10931 }
10932 if new_ids.len() > 10 {
10933 println!(" … and {} more", new_ids.len() - 10);
10934 }
10935 println!();
10936 }
10937 BridgesAction::List {
10938 frontier,
10939 status,
10940 json,
10941 } => {
10942 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
10943 if let Some(s) = status.as_deref() {
10944 let want = match s.to_lowercase().as_str() {
10945 "derived" => BridgeStatus::Derived,
10946 "confirmed" => BridgeStatus::Confirmed,
10947 "refuted" => BridgeStatus::Refuted,
10948 other => fail_return(&format!(
10949 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
10950 )),
10951 };
10952 bridges.retain(|b| b.status == want);
10953 }
10954 if json {
10955 println!(
10956 "{}",
10957 serde_json::to_string_pretty(&json!({
10958 "ok": true,
10959 "command": "bridges.list",
10960 "frontier": frontier.display().to_string(),
10961 "count": bridges.len(),
10962 "bridges": bridges,
10963 }))
10964 .expect("serialize bridges.list")
10965 );
10966 return;
10967 }
10968 println!();
10969 println!(
10970 " {}",
10971 format!("VELA · BRIDGES · LIST · {}", frontier.display())
10972 .to_uppercase()
10973 .dimmed()
10974 );
10975 println!(" {}", style::tick_row(60));
10976 println!(" {} bridge(s)", bridges.len());
10977 for b in &bridges {
10978 let chip = match b.status {
10979 BridgeStatus::Derived => style::warn("derived"),
10980 BridgeStatus::Confirmed => style::ok("confirmed"),
10981 BridgeStatus::Refuted => style::lost("refuted"),
10982 };
10983 println!();
10984 println!(
10985 " {chip} {} {} ↔ findings:{}",
10986 b.id,
10987 b.entity_name,
10988 b.finding_refs.len()
10989 );
10990 println!(" frontiers: {}", b.frontiers.join(", "));
10991 if let Some(t) = &b.tension {
10992 println!(" tension: {t}");
10993 }
10994 }
10995 println!();
10996 }
10997 BridgesAction::Show {
10998 frontier,
10999 bridge_id,
11000 json,
11001 } => {
11002 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
11003 if json {
11004 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
11005 return;
11006 }
11007 println!();
11008 println!(
11009 " {}",
11010 format!("VELA · BRIDGES · SHOW · {}", b.id)
11011 .to_uppercase()
11012 .dimmed()
11013 );
11014 println!(" {}", style::tick_row(60));
11015 println!(" entity: {}", b.entity_name);
11016 println!(" status: {:?}", b.status);
11017 println!(" frontiers: {}", b.frontiers.join(", "));
11018 if !b.frontier_ids.is_empty() {
11019 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
11020 }
11021 if let Some(t) = &b.tension {
11022 println!(" tension: {t}");
11023 }
11024 println!(" derived_at: {}", b.derived_at);
11025 println!(" finding refs ({}):", b.finding_refs.len());
11026 for r in &b.finding_refs {
11027 let dir = r.direction.as_deref().unwrap_or("—");
11028 let truncated: String = r.assertion_text.chars().take(72).collect();
11029 println!(
11030 " · [{}] {} (conf={:.2}, dir={})",
11031 r.frontier, r.finding_id, r.confidence, dir
11032 );
11033 println!(" {truncated}");
11034 }
11035 println!();
11036 }
11037 BridgesAction::Confirm {
11038 frontier,
11039 bridge_id,
11040 reviewer,
11041 note,
11042 json,
11043 } => {
11044 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
11045 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
11046 b.status = BridgeStatus::Confirmed;
11047 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
11048 let _ = emit_bridge_reviewed_event(
11052 &frontier,
11053 &bridge_id,
11054 "confirmed",
11055 &reviewer_id,
11056 note.as_deref(),
11057 );
11058 if json {
11059 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
11060 return;
11061 }
11062 println!();
11063 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
11064 println!();
11065 }
11066 BridgesAction::Refute {
11067 frontier,
11068 bridge_id,
11069 reviewer,
11070 note,
11071 json,
11072 } => {
11073 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
11074 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
11075 b.status = BridgeStatus::Refuted;
11076 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
11077 let _ = emit_bridge_reviewed_event(
11078 &frontier,
11079 &bridge_id,
11080 "refuted",
11081 &reviewer_id,
11082 note.as_deref(),
11083 );
11084 if json {
11085 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
11086 return;
11087 }
11088 println!();
11089 println!(" {} {} now refuted", style::lost("refuted"), b.id);
11090 println!();
11091 }
11092 }
11093}
11094
11095fn cmd_federation(action: FederationAction) {
11097 use crate::federation::PeerHub;
11098
11099 match action {
11100 FederationAction::PeerAdd {
11101 frontier,
11102 id,
11103 url,
11104 pubkey,
11105 note,
11106 json,
11107 } => {
11108 let peer = PeerHub {
11109 id: id.clone(),
11110 url: url.clone(),
11111 public_key: pubkey.trim().to_string(),
11112 added_at: chrono::Utc::now().to_rfc3339(),
11113 note: note.clone(),
11114 };
11115 peer.validate().unwrap_or_else(|e| fail_return(&e));
11116
11117 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11118 if project.peers.iter().any(|p| p.id == id) {
11119 fail(&format!("peer '{id}' already in registry"));
11120 }
11121 project.peers.push(peer.clone());
11122 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11123
11124 if json {
11125 println!(
11126 "{}",
11127 serde_json::to_string_pretty(&json!({
11128 "ok": true,
11129 "command": "federation.peer-add",
11130 "frontier": frontier.display().to_string(),
11131 "peer": peer,
11132 "registered_count": project.peers.len(),
11133 }))
11134 .expect("serialize federation.peer-add")
11135 );
11136 } else {
11137 println!(
11138 "{} peer {} (pubkey {}…) at {}",
11139 style::ok("registered"),
11140 id,
11141 &peer.public_key[..16],
11142 peer.url
11143 );
11144 }
11145 }
11146 FederationAction::PeerList { frontier, json } => {
11147 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11148 if json {
11149 println!(
11150 "{}",
11151 serde_json::to_string_pretty(&json!({
11152 "ok": true,
11153 "command": "federation.peer-list",
11154 "frontier": frontier.display().to_string(),
11155 "peers": project.peers,
11156 }))
11157 .expect("serialize federation.peer-list")
11158 );
11159 } else {
11160 println!();
11161 println!(
11162 " {}",
11163 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
11164 .to_uppercase()
11165 .dimmed()
11166 );
11167 println!(" {}", style::tick_row(60));
11168 if project.peers.is_empty() {
11169 println!(" (no peers registered)");
11170 } else {
11171 for p in &project.peers {
11172 let note_suffix = if p.note.is_empty() {
11173 String::new()
11174 } else {
11175 format!(" · {}", p.note)
11176 };
11177 println!(
11178 " {:<24} {} {}…{note_suffix}",
11179 p.id,
11180 p.url,
11181 &p.public_key[..16]
11182 );
11183 }
11184 }
11185 }
11186 }
11187 FederationAction::Sync {
11188 frontier,
11189 peer_id,
11190 url,
11191 via_hub,
11192 vfr_id,
11193 allow_cross_vfr,
11194 dry_run,
11195 json,
11196 } => {
11197 use crate::federation::{self, DiscoveryResult};
11198
11199 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11200 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
11201 fail(&format!(
11202 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
11203 ));
11204 };
11205 let local_frontier_id = project.frontier_id();
11206
11207 if via_hub
11214 && let Some(target) = vfr_id.as_deref()
11215 && target != local_frontier_id
11216 && !allow_cross_vfr
11217 {
11218 fail(&format!(
11219 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
11220 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
11221 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
11222 ));
11223 }
11224
11225 #[derive(Debug)]
11227 enum SyncOutcome {
11228 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
11232 }
11233
11234 let outcome = if via_hub {
11235 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
11236 match federation::discover_peer_frontier(
11237 &peer.url,
11238 &target_vfr,
11239 Some(&peer.public_key),
11240 ) {
11241 DiscoveryResult::Resolved(p) => {
11242 let src =
11243 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
11244 SyncOutcome::Resolved(p, src)
11245 }
11246 DiscoveryResult::BrokenLocator {
11247 vfr_id,
11248 locator,
11249 status,
11250 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
11251 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
11252 SyncOutcome::UnverifiedEntry(vfr_id, reason)
11253 }
11254 DiscoveryResult::EntryNotFound { vfr_id, status } => {
11255 SyncOutcome::EntryNotFound(vfr_id, status)
11256 }
11257 DiscoveryResult::Unreachable { url, error } => {
11258 fail(&format!("peer hub unreachable ({url}): {error}"));
11259 }
11260 }
11261 } else {
11262 let resolved_url = url.unwrap_or_else(|| {
11263 let base = peer.url.trim_end_matches('/');
11264 format!("{base}/manifest/{local_frontier_id}.json")
11265 });
11266 match federation::fetch_peer_frontier(&resolved_url) {
11267 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
11268 Err(e) => fail(&format!("direct fetch failed: {e}")),
11269 }
11270 };
11271
11272 let peer_source: String;
11275 let peer_state = match outcome {
11276 SyncOutcome::Resolved(p, src) => {
11277 if !json {
11278 println!(" · resolved via {src}");
11279 }
11280 peer_source = src;
11281 p
11282 }
11283 SyncOutcome::BrokenLocator(vfr, locator, status) => {
11284 if dry_run {
11285 if json {
11286 println!(
11287 "{}",
11288 serde_json::to_string_pretty(&json!({
11289 "ok": true,
11290 "command": "federation.sync",
11291 "dry_run": true,
11292 "outcome": "broken_locator",
11293 "vfr_id": vfr,
11294 "locator": locator,
11295 "http_status": status,
11296 }))
11297 .expect("serialize")
11298 );
11299 } else {
11300 println!(
11301 "{} dry-run: peer entry resolved but locator dead",
11302 style::warn("broken_locator")
11303 );
11304 println!(" vfr_id: {vfr}");
11305 println!(" locator: {locator} (HTTP {status})");
11306 }
11307 return;
11308 }
11309 let report = federation::record_locator_failure(
11310 &mut project,
11311 &peer_id,
11312 &vfr,
11313 &locator,
11314 status,
11315 );
11316 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11317 if json {
11318 println!(
11319 "{}",
11320 serde_json::to_string_pretty(&json!({
11321 "ok": true,
11322 "command": "federation.sync",
11323 "outcome": "broken_locator",
11324 "report": report,
11325 }))
11326 .expect("serialize")
11327 );
11328 } else {
11329 println!(
11330 "{} sync recorded broken-locator conflict against {peer_id}",
11331 style::warn("broken_locator")
11332 );
11333 println!(" vfr_id: {vfr}");
11334 println!(" locator: {locator} (HTTP {status})");
11335 println!(" events appended: {}", report.events_appended);
11336 }
11337 return;
11338 }
11339 SyncOutcome::UnverifiedEntry(vfr, reason) => {
11340 if dry_run {
11341 if json {
11342 println!(
11343 "{}",
11344 serde_json::to_string_pretty(&json!({
11345 "ok": true,
11346 "command": "federation.sync",
11347 "dry_run": true,
11348 "outcome": "unverified_peer_entry",
11349 "vfr_id": vfr,
11350 "reason": reason,
11351 }))
11352 .expect("serialize")
11353 );
11354 } else {
11355 println!(
11356 "{} dry-run: peer entry signature did not verify",
11357 style::lost("unverified_peer_entry")
11358 );
11359 println!(" vfr_id: {vfr}");
11360 println!(" reason: {reason}");
11361 }
11362 return;
11363 }
11364 let report =
11365 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
11366 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11367 if json {
11368 println!(
11369 "{}",
11370 serde_json::to_string_pretty(&json!({
11371 "ok": true,
11372 "command": "federation.sync",
11373 "outcome": "unverified_peer_entry",
11374 "report": report,
11375 }))
11376 .expect("serialize")
11377 );
11378 } else {
11379 println!(
11380 "{} sync halted; peer's registry entry signature did not verify",
11381 style::lost("unverified_peer_entry")
11382 );
11383 println!(" vfr_id: {vfr}");
11384 println!(" reason: {reason}");
11385 }
11386 return;
11387 }
11388 SyncOutcome::EntryNotFound(vfr, status) => {
11389 if json {
11390 println!(
11391 "{}",
11392 serde_json::to_string_pretty(&json!({
11393 "ok": false,
11394 "command": "federation.sync",
11395 "outcome": "entry_not_found",
11396 "vfr_id": vfr,
11397 "http_status": status,
11398 }))
11399 .expect("serialize")
11400 );
11401 } else {
11402 println!(
11403 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
11404 style::warn("entry_not_found")
11405 );
11406 }
11407 return;
11408 }
11409 };
11410
11411 if dry_run {
11412 let conflicts = federation::diff_frontiers(&project, &peer_state);
11413 if json {
11414 println!(
11415 "{}",
11416 serde_json::to_string_pretty(&json!({
11417 "ok": true,
11418 "command": "federation.sync",
11419 "dry_run": true,
11420 "peer_id": peer_id,
11421 "peer_source": peer_source,
11422 "conflicts": conflicts,
11423 }))
11424 .expect("serialize federation.sync (dry-run)")
11425 );
11426 } else {
11427 println!(
11428 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
11429 style::ok("ok"),
11430 peer_source,
11431 conflicts.len()
11432 );
11433 for c in &conflicts {
11434 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
11435 }
11436 }
11437 return;
11438 }
11439
11440 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
11441 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11442
11443 if json {
11444 println!(
11445 "{}",
11446 serde_json::to_string_pretty(&json!({
11447 "ok": true,
11448 "command": "federation.sync",
11449 "peer_id": peer_id,
11450 "peer_source": peer_source,
11451 "report": report,
11452 }))
11453 .expect("serialize federation.sync")
11454 );
11455 } else {
11456 println!(
11457 "{} synced with {} ({})",
11458 style::ok("ok"),
11459 peer_id,
11460 peer_source
11461 );
11462 println!(
11463 " our: {}",
11464 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
11465 );
11466 println!(
11467 " peer: {}",
11468 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
11469 );
11470 println!(
11471 " conflicts: {} events appended: {}",
11472 report.conflicts.len(),
11473 report.events_appended
11474 );
11475 for c in &report.conflicts {
11476 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
11477 }
11478 }
11479 }
11480 FederationAction::PushResolution {
11481 frontier,
11482 conflict_event_id,
11483 to,
11484 key,
11485 vfr_id,
11486 json,
11487 } => {
11488 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
11489 }
11490 FederationAction::PeerRemove { frontier, id, json } => {
11491 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11492 let before = project.peers.len();
11493 project.peers.retain(|p| p.id != id);
11494 if project.peers.len() == before {
11495 fail(&format!("peer '{id}' not found in registry"));
11496 }
11497 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11498
11499 if json {
11500 println!(
11501 "{}",
11502 serde_json::to_string_pretty(&json!({
11503 "ok": true,
11504 "command": "federation.peer-remove",
11505 "frontier": frontier.display().to_string(),
11506 "removed": id,
11507 "remaining": project.peers.len(),
11508 }))
11509 .expect("serialize federation.peer-remove")
11510 );
11511 } else {
11512 println!(
11513 "{} peer {} ({} remaining)",
11514 style::ok("removed"),
11515 id,
11516 project.peers.len()
11517 );
11518 }
11519 }
11520 }
11521}
11522
11523fn cmd_federation_push_resolution(
11535 frontier: PathBuf,
11536 conflict_event_id: String,
11537 to: String,
11538 key: Option<PathBuf>,
11539 vfr_id: Option<String>,
11540 json: bool,
11541) {
11542 use crate::canonical;
11543 use crate::sign;
11544
11545 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11546
11547 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
11548 fail(&format!(
11549 "peer '{to}' not in registry; run `vela federation peer-add` first"
11550 ));
11551 };
11552
11553 let Some(resolution) = project
11555 .events
11556 .iter()
11557 .find(|e| {
11558 e.kind == "frontier.conflict_resolved"
11559 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
11560 == Some(conflict_event_id.as_str())
11561 })
11562 .cloned()
11563 else {
11564 fail(&format!(
11565 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
11566 frontier.display()
11567 ));
11568 };
11569
11570 let actor_id = resolution.actor.id.clone();
11573 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
11574 fail(&format!(
11575 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
11576 register the reviewer with `vela actor add` before pushing"
11577 ));
11578 };
11579
11580 let key_path = key.unwrap_or_else(|| {
11583 let home = std::env::var("HOME").unwrap_or_default();
11584 let base = PathBuf::from(home)
11585 .join(".config")
11586 .join("vela")
11587 .join("keys");
11588 let safe_id = actor.id.replace([':', '/'], "_");
11589 let by_actor = base.join(format!("{safe_id}.key"));
11590 if by_actor.exists() {
11591 by_actor
11592 } else {
11593 base.join("private.key")
11594 }
11595 });
11596
11597 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
11598 fail_return(&format!(
11599 "load private key from {}: {e}",
11600 key_path.display()
11601 ))
11602 });
11603 let pubkey_hex = sign::pubkey_hex(&signing_key);
11604 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
11605 fail(&format!(
11606 "private key at {} does not match actor {}'s registered public key. \
11607 Loaded pubkey {}, expected {}.",
11608 key_path.display(),
11609 actor.id,
11610 &pubkey_hex[..16],
11611 &actor.public_key[..16]
11612 ));
11613 }
11614
11615 let signature_hex = sign::sign_event(&resolution, &signing_key)
11618 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
11619
11620 let mut body = resolution.clone();
11625 body.signature = None;
11626 let body_value =
11627 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
11628 let _canonical_check = canonical::to_canonical_bytes(&body_value)
11629 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
11630
11631 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
11632 let url = format!(
11633 "{}/entries/{}/events",
11634 peer.url.trim_end_matches('/'),
11635 target_vfr
11636 );
11637
11638 let url_owned = url.clone();
11640 let pubkey_owned = pubkey_hex.clone();
11641 let signature_owned = signature_hex.clone();
11642 let body_owned = body_value.clone();
11643 let response: Result<(u16, String), String> = std::thread::spawn(move || {
11644 let client = reqwest::blocking::Client::new();
11645 let resp = client
11646 .post(&url_owned)
11647 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
11648 .header("X-Vela-Signature", &signature_owned)
11649 .json(&body_owned)
11650 .send()
11651 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
11652 let status = resp.status().as_u16();
11653 let text = resp.text().unwrap_or_default();
11654 Ok((status, text))
11655 })
11656 .join()
11657 .map_err(|_| "push thread panicked".to_string())
11658 .unwrap_or_else(|e| fail_return(&e));
11659
11660 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
11661 let parsed: serde_json::Value =
11662 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
11663
11664 let accepted = matches!(status, 200..=202);
11665 if json {
11666 println!(
11667 "{}",
11668 serde_json::to_string_pretty(&json!({
11669 "ok": accepted,
11670 "command": "federation.push-resolution",
11671 "frontier": frontier.display().to_string(),
11672 "peer_id": to,
11673 "url": url,
11674 "conflict_event_id": conflict_event_id,
11675 "event_id": resolution.id,
11676 "actor_id": actor.id,
11677 "http_status": status,
11678 "response": parsed,
11679 }))
11680 .expect("serialize federation.push-resolution")
11681 );
11682 } else if accepted {
11683 println!(
11684 "{} resolution {} pushed to {} (HTTP {})",
11685 style::ok("ok"),
11686 &resolution.id[..16.min(resolution.id.len())],
11687 to,
11688 status
11689 );
11690 println!(" url: {url}");
11691 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
11692 } else {
11693 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
11694 println!(" url: {url}");
11695 println!(" response: {text}");
11696 std::process::exit(1);
11697 }
11698}
11699
11700fn cmd_queue(action: QueueAction) {
11705 use crate::queue;
11706 match action {
11707 QueueAction::List { queue_file, json } => {
11708 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11709 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11710 if json {
11711 let payload = json!({
11712 "ok": true,
11713 "command": "queue.list",
11714 "queue_file": path.display().to_string(),
11715 "schema": q.schema,
11716 "actions": q.actions,
11717 });
11718 println!(
11719 "{}",
11720 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
11721 );
11722 } else {
11723 println!();
11724 println!(
11725 " {}",
11726 format!("VELA · QUEUE · LIST · {}", path.display())
11727 .to_uppercase()
11728 .dimmed()
11729 );
11730 println!(" {}", style::tick_row(60));
11731 if q.actions.is_empty() {
11732 println!(" (queue is empty)");
11733 } else {
11734 for (idx, action) in q.actions.iter().enumerate() {
11735 println!(
11736 " [{idx}] {} → {} queued {}",
11737 action.kind,
11738 action.frontier.display(),
11739 action.queued_at
11740 );
11741 }
11742 }
11743 }
11744 }
11745 QueueAction::Clear { queue_file, json } => {
11746 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11747 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
11748 if json {
11749 let payload = json!({
11750 "ok": true,
11751 "command": "queue.clear",
11752 "queue_file": path.display().to_string(),
11753 "dropped": dropped,
11754 });
11755 println!(
11756 "{}",
11757 serde_json::to_string_pretty(&payload)
11758 .expect("failed to serialize queue.clear")
11759 );
11760 } else {
11761 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
11762 }
11763 }
11764 QueueAction::Sign {
11765 actor,
11766 key,
11767 queue_file,
11768 yes_to_all,
11769 json,
11770 } => {
11771 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11772 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11773 if q.actions.is_empty() {
11774 if json {
11775 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
11776 } else {
11777 println!("{} queue is empty", style::ok("ok"));
11778 }
11779 return;
11780 }
11781 let key_hex = std::fs::read_to_string(&key)
11782 .map(|s| s.trim().to_string())
11783 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
11784 let signing_key = parse_signing_key(&key_hex);
11785 let mut signed_count = 0usize;
11786 let mut remaining = Vec::new();
11787 for action in q.actions.iter() {
11788 if !yes_to_all && !confirm_action(action) {
11789 remaining.push(action.clone());
11790 continue;
11791 }
11792 match sign_and_apply(&signing_key, &actor, action) {
11793 Ok(report) => {
11794 signed_count += 1;
11795 if !json {
11796 println!(
11797 "{} {} on {} → {}",
11798 style::ok("signed"),
11799 action.kind,
11800 action.frontier.display(),
11801 report
11802 );
11803 }
11804 }
11805 Err(error) => {
11806 remaining.push(action.clone());
11808 if !json {
11809 eprintln!(
11810 "{} {} on {}: {error}",
11811 style::warn("failed"),
11812 action.kind,
11813 action.frontier.display()
11814 );
11815 }
11816 }
11817 }
11818 }
11819 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
11820 if json {
11821 let payload = json!({
11822 "ok": true,
11823 "command": "queue.sign",
11824 "signed": signed_count,
11825 "remaining": remaining.len(),
11826 });
11827 println!(
11828 "{}",
11829 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
11830 );
11831 } else {
11832 println!(
11833 "{} signed {signed_count} action(s); {} remaining in queue",
11834 style::ok("ok"),
11835 remaining.len()
11836 );
11837 }
11838 }
11839 }
11840}
11841
11842fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
11843 let bytes = hex::decode(hex_str)
11844 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
11845 let key_bytes: [u8; 32] = bytes
11846 .try_into()
11847 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
11848 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
11849}
11850
11851fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
11852 use std::io::{self, BufRead, Write};
11853 let mut stdout = io::stdout().lock();
11854 let _ = writeln!(
11855 stdout,
11856 " sign {} on {}? [y/N] ",
11857 action.kind,
11858 action.frontier.display()
11859 );
11860 let _ = stdout.flush();
11861 drop(stdout);
11862 let stdin = io::stdin();
11863 let mut line = String::new();
11864 if stdin.lock().read_line(&mut line).is_err() {
11865 return false;
11866 }
11867 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
11868}
11869
11870fn sign_and_apply(
11875 signing_key: &ed25519_dalek::SigningKey,
11876 actor: &str,
11877 action: &crate::queue::QueuedAction,
11878) -> Result<String, String> {
11879 use crate::events::StateTarget;
11880 use crate::proposals;
11881 let args = &action.args;
11882 match action.kind.as_str() {
11883 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
11884 let kind = match action.kind.as_str() {
11885 "propose_review" => "finding.review",
11886 "propose_note" => "finding.note",
11887 "propose_revise_confidence" => "finding.confidence_revise",
11888 "propose_retract" => "finding.retract",
11889 _ => unreachable!(),
11890 };
11891 let target_id = args
11892 .get("target_finding_id")
11893 .and_then(Value::as_str)
11894 .ok_or("target_finding_id missing")?;
11895 let reason = args
11896 .get("reason")
11897 .and_then(Value::as_str)
11898 .ok_or("reason missing")?;
11899 let payload = match action.kind.as_str() {
11900 "propose_review" => {
11901 let status = args
11902 .get("status")
11903 .and_then(Value::as_str)
11904 .ok_or("status missing")?;
11905 json!({"status": status})
11906 }
11907 "propose_note" => {
11908 let text = args
11909 .get("text")
11910 .and_then(Value::as_str)
11911 .ok_or("text missing")?;
11912 json!({"text": text})
11913 }
11914 "propose_revise_confidence" => {
11915 let new_score = args
11916 .get("new_score")
11917 .and_then(Value::as_f64)
11918 .ok_or("new_score missing")?;
11919 json!({"new_score": new_score})
11920 }
11921 "propose_retract" => json!({}),
11922 _ => unreachable!(),
11923 };
11924 let created_at = args
11925 .get("created_at")
11926 .and_then(Value::as_str)
11927 .map(String::from)
11928 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11929 let mut proposal = proposals::new_proposal(
11930 kind,
11931 StateTarget {
11932 r#type: "finding".to_string(),
11933 id: target_id.to_string(),
11934 },
11935 actor,
11936 "human",
11937 reason,
11938 payload,
11939 Vec::new(),
11940 Vec::new(),
11941 );
11942 proposal.created_at = created_at;
11943 proposal.id = proposals::proposal_id(&proposal);
11944 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
11948 let result = proposals::create_or_apply(&action.frontier, proposal, false)
11949 .map_err(|e| format!("create_or_apply: {e}"))?;
11950 Ok(format!("proposal {}", result.proposal_id))
11951 }
11952 "accept_proposal" | "reject_proposal" => {
11953 let proposal_id = args
11954 .get("proposal_id")
11955 .and_then(Value::as_str)
11956 .ok_or("proposal_id missing")?;
11957 let reason = args
11958 .get("reason")
11959 .and_then(Value::as_str)
11960 .ok_or("reason missing")?;
11961 let timestamp = args
11962 .get("timestamp")
11963 .and_then(Value::as_str)
11964 .map(String::from)
11965 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11966 let preimage = json!({
11968 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
11969 "proposal_id": proposal_id,
11970 "reviewer_id": actor,
11971 "reason": reason,
11972 "timestamp": timestamp,
11973 });
11974 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
11975 use ed25519_dalek::Signer;
11976 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
11977 if action.kind == "accept_proposal" {
11978 let event_id =
11979 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
11980 .map_err(|e| format!("accept_at_path: {e}"))?;
11981 Ok(format!("event {event_id}"))
11982 } else {
11983 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
11984 .map_err(|e| format!("reject_at_path: {e}"))?;
11985 Ok(format!("rejected {proposal_id}"))
11986 }
11987 }
11988 other => Err(format!("unsupported queued action kind '{other}'")),
11989 }
11990}
11991
11992fn cmd_entity(action: EntityAction) {
12004 use crate::entity_resolve;
12005 match action {
12006 EntityAction::Resolve {
12007 frontier,
12008 force,
12009 json,
12010 } => {
12011 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12012 let report = entity_resolve::resolve_frontier(&mut p, force);
12013 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12014 if json {
12015 println!(
12016 "{}",
12017 serde_json::to_string_pretty(&serde_json::json!({
12018 "ok": true,
12019 "command": "entity.resolve",
12020 "frontier_path": frontier.display().to_string(),
12021 "report": report,
12022 }))
12023 .expect("serialize")
12024 );
12025 } else {
12026 println!(
12027 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
12028 style::ok("entity"),
12029 report.resolved,
12030 report.total_entities,
12031 report.already_resolved,
12032 report.unresolved_count,
12033 report.findings_touched,
12034 );
12035 let unresolved_summary: std::collections::BTreeSet<&str> = report
12036 .per_finding
12037 .iter()
12038 .flat_map(|f| f.unresolved.iter().map(String::as_str))
12039 .collect();
12040 if !unresolved_summary.is_empty() {
12041 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
12042 println!(
12043 " unresolved (first {}): {}",
12044 take.len(),
12045 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
12046 );
12047 }
12048 }
12049 }
12050 EntityAction::List { json } => {
12051 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
12052 .map(|(name, etype, source, id)| {
12053 serde_json::json!({
12054 "canonical_name": name,
12055 "entity_type": etype,
12056 "source": source,
12057 "id": id,
12058 })
12059 })
12060 .collect();
12061 if json {
12062 println!(
12063 "{}",
12064 serde_json::to_string_pretty(&serde_json::json!({
12065 "ok": true,
12066 "command": "entity.list",
12067 "count": entries.len(),
12068 "entries": entries,
12069 }))
12070 .expect("serialize")
12071 );
12072 } else {
12073 println!("{} {} bundled entries", style::ok("entity"), entries.len());
12074 for e in &entries {
12075 println!(
12076 " {:32} {:18} {} {}",
12077 e["canonical_name"].as_str().unwrap_or("?"),
12078 e["entity_type"].as_str().unwrap_or("?"),
12079 e["source"].as_str().unwrap_or("?"),
12080 e["id"].as_str().unwrap_or("?"),
12081 );
12082 }
12083 }
12084 }
12085 }
12086}
12087
12088fn cmd_link(action: LinkAction) {
12089 use crate::bundle::{Link, LinkRef};
12090 match action {
12091 LinkAction::Add {
12092 frontier,
12093 from,
12094 to,
12095 r#type,
12096 note,
12097 inferred_by,
12098 no_check_target,
12099 json,
12100 } => {
12101 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
12102 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
12103 fail(&format!(
12104 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
12105 ));
12106 }
12107 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
12108 fail(&format!(
12109 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
12110 ))
12111 });
12112 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12113 let source_idx = p
12114 .findings
12115 .iter()
12116 .position(|f| f.id == from)
12117 .unwrap_or_else(|| {
12118 fail_return(&format!("--from finding '{from}' not in frontier"))
12119 });
12120 if let LinkRef::Local { vf_id } = &parsed
12121 && !p.findings.iter().any(|f| &f.id == vf_id)
12122 {
12123 fail(&format!(
12124 "local --to target '{vf_id}' not in frontier; add the target finding first"
12125 ));
12126 }
12127 if let LinkRef::Cross { vfr_id, .. } = &parsed
12128 && p.dep_for_vfr(vfr_id).is_none()
12129 {
12130 fail(&format!(
12131 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
12132 ));
12133 }
12134
12135 let mut target_warning: Option<String> = None;
12141 if let LinkRef::Cross {
12142 vfr_id: target_vfr,
12143 vf_id: target_vf,
12144 } = &parsed
12145 && !no_check_target
12146 && let Some(dep) = p.dep_for_vfr(target_vfr)
12147 && let Some(locator) = dep.locator.as_deref()
12148 && (locator.starts_with("http://") || locator.starts_with("https://"))
12149 {
12150 let client = reqwest::blocking::Client::builder()
12151 .timeout(std::time::Duration::from_secs(15))
12152 .build()
12153 .ok();
12154 if let Some(client) = client
12155 && let Ok(resp) = client.get(locator).send()
12156 && resp.status().is_success()
12157 && let Ok(dep_project) = resp.json::<crate::project::Project>()
12158 {
12159 if let Some(target_finding) =
12160 dep_project.findings.iter().find(|f| &f.id == target_vf)
12161 {
12162 if target_finding.flags.superseded {
12163 target_warning = Some(format!(
12164 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
12165You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
12166Use --no-check-target to skip this check."
12167 ));
12168 }
12169 } else {
12170 target_warning = Some(format!(
12171 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
12172The target may have been removed or never existed in the pinned snapshot."
12173 ));
12174 }
12175 }
12176 }
12177
12178 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
12179 let link = Link {
12180 target: to.clone(),
12181 link_type: r#type.clone(),
12182 note: note.clone(),
12183 inferred_by: inferred_by.clone(),
12184 created_at: now,
12185 mechanism: None,
12186 };
12187 p.findings[source_idx].links.push(link);
12188 project::recompute_stats(&mut p);
12189 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12190 let payload = json!({
12191 "ok": true,
12192 "command": "link.add",
12193 "frontier": frontier.display().to_string(),
12194 "from": from,
12195 "to": to,
12196 "type": r#type,
12197 "cross_frontier": parsed.is_cross_frontier(),
12198 });
12199 if json {
12200 let mut p2 = payload.clone();
12201 if let Some(w) = &target_warning
12202 && let serde_json::Value::Object(m) = &mut p2
12203 {
12204 m.insert(
12205 "target_warning".to_string(),
12206 serde_json::Value::String(w.clone()),
12207 );
12208 }
12209 println!(
12210 "{}",
12211 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
12212 );
12213 } else {
12214 println!(
12215 "{} {} --[{}]--> {}{}",
12216 style::ok("link"),
12217 from,
12218 r#type,
12219 to,
12220 if parsed.is_cross_frontier() {
12221 " (cross-frontier)"
12222 } else {
12223 ""
12224 }
12225 );
12226 if let Some(w) = target_warning {
12227 println!(" {w}");
12228 }
12229 }
12230 }
12231 }
12232}
12233
12234fn cmd_frontier(action: FrontierAction) {
12235 use crate::project::ProjectDependency;
12236 use crate::repo;
12237 match action {
12238 FrontierAction::New {
12239 path,
12240 name,
12241 description,
12242 force,
12243 json,
12244 } => {
12245 if path.exists() && !force {
12246 fail(&format!(
12247 "{} already exists; pass --force to overwrite",
12248 path.display()
12249 ));
12250 }
12251 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
12252 let project = project::Project {
12253 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
12254 schema: project::VELA_SCHEMA_URL.to_string(),
12255 frontier_id: None,
12256 project: project::ProjectMeta {
12257 name: name.clone(),
12258 description: description.clone(),
12259 compiled_at: now,
12260 compiler: project::VELA_COMPILER_VERSION.to_string(),
12261 papers_processed: 0,
12262 errors: 0,
12263 dependencies: Vec::new(),
12264 },
12265 stats: project::ProjectStats::default(),
12266 findings: Vec::new(),
12267 sources: Vec::new(),
12268 evidence_atoms: Vec::new(),
12269 condition_records: Vec::new(),
12270 review_events: Vec::new(),
12271 confidence_updates: Vec::new(),
12272 events: Vec::new(),
12273 proposals: Vec::new(),
12274 proof_state: proposals::ProofState::default(),
12275 signatures: Vec::new(),
12276 actors: Vec::new(),
12277 replications: Vec::new(),
12278 datasets: Vec::new(),
12279 code_artifacts: Vec::new(),
12280 artifacts: Vec::new(),
12281 predictions: Vec::new(),
12282 resolutions: Vec::new(),
12283 peers: Vec::new(),
12284 negative_results: Vec::new(),
12285 trajectories: Vec::new(),
12286 };
12287 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
12288 let payload = json!({
12289 "ok": true,
12290 "command": "frontier.new",
12291 "path": path.display().to_string(),
12292 "name": name,
12293 "schema": project::VELA_SCHEMA_URL,
12294 "vela_version": env!("CARGO_PKG_VERSION"),
12295 "next_steps": [
12296 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
12297 "vela sign generate-keypair --out keys",
12298 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
12299 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
12300 ],
12301 });
12302 if json {
12303 println!(
12304 "{}",
12305 serde_json::to_string_pretty(&payload)
12306 .expect("failed to serialize frontier.new")
12307 );
12308 } else {
12309 println!(
12310 "{} scaffolded frontier '{name}' at {}",
12311 style::ok("frontier"),
12312 path.display()
12313 );
12314 println!(" next steps:");
12315 println!(
12316 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
12317 path.display()
12318 );
12319 println!(" 2. vela sign generate-keypair --out keys");
12320 println!(
12321 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
12322 path.display()
12323 );
12324 println!(
12325 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
12326 path.display()
12327 );
12328 }
12329 }
12330 FrontierAction::Materialize { frontier, json } => {
12331 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
12332 if json {
12333 println!(
12334 "{}",
12335 serde_json::to_string_pretty(&payload)
12336 .expect("failed to serialize frontier materialize")
12337 );
12338 } else {
12339 println!(
12340 "{} materialized frontier repo at {}",
12341 style::ok("frontier"),
12342 frontier.display()
12343 );
12344 }
12345 }
12346 FrontierAction::AddDep {
12347 frontier,
12348 vfr_id,
12349 locator,
12350 snapshot,
12351 name,
12352 json,
12353 } => {
12354 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12355 if p.project
12356 .dependencies
12357 .iter()
12358 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
12359 {
12360 fail(&format!(
12361 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
12362 ));
12363 }
12364 let dep = ProjectDependency {
12365 name: name.unwrap_or_else(|| vfr_id.clone()),
12366 source: "vela.hub".into(),
12367 version: None,
12368 pinned_hash: None,
12369 vfr_id: Some(vfr_id.clone()),
12370 locator: Some(locator.clone()),
12371 pinned_snapshot_hash: Some(snapshot.clone()),
12372 };
12373 p.project.dependencies.push(dep);
12374 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12375 let payload = json!({
12376 "ok": true,
12377 "command": "frontier.add-dep",
12378 "frontier": frontier.display().to_string(),
12379 "vfr_id": vfr_id,
12380 "locator": locator,
12381 "pinned_snapshot_hash": snapshot,
12382 "declared_count": p.project.dependencies.len(),
12383 });
12384 if json {
12385 println!(
12386 "{}",
12387 serde_json::to_string_pretty(&payload)
12388 .expect("failed to serialize frontier.add-dep")
12389 );
12390 } else {
12391 println!(
12392 "{} declared cross-frontier dep {vfr_id}",
12393 style::ok("frontier")
12394 );
12395 println!(" locator: {locator}");
12396 println!(" snapshot: {snapshot}");
12397 }
12398 }
12399 FrontierAction::ListDeps { frontier, json } => {
12400 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12401 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
12402 if json {
12403 let payload = json!({
12404 "ok": true,
12405 "command": "frontier.list-deps",
12406 "frontier": frontier.display().to_string(),
12407 "count": deps.len(),
12408 "dependencies": deps,
12409 });
12410 println!(
12411 "{}",
12412 serde_json::to_string_pretty(&payload)
12413 .expect("failed to serialize frontier.list-deps")
12414 );
12415 } else {
12416 println!();
12417 println!(
12418 " {}",
12419 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
12420 .to_uppercase()
12421 .dimmed()
12422 );
12423 println!(" {}", style::tick_row(60));
12424 if deps.is_empty() {
12425 println!(" (no dependencies declared)");
12426 } else {
12427 for d in &deps {
12428 let kind = if d.is_cross_frontier() {
12429 "cross-frontier"
12430 } else {
12431 "compile-time"
12432 };
12433 println!(" · {} [{kind}]", d.name);
12434 if let Some(v) = &d.vfr_id {
12435 println!(" vfr_id: {v}");
12436 }
12437 if let Some(l) = &d.locator {
12438 println!(" locator: {l}");
12439 }
12440 if let Some(s) = &d.pinned_snapshot_hash {
12441 println!(" snapshot: {s}");
12442 }
12443 }
12444 }
12445 }
12446 }
12447 FrontierAction::RemoveDep {
12448 frontier,
12449 vfr_id,
12450 json,
12451 } => {
12452 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12453 for f in &p.findings {
12455 for l in &f.links {
12456 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
12457 crate::bundle::LinkRef::parse(&l.target)
12458 && v == &vfr_id
12459 {
12460 fail(&format!(
12461 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
12462 f.id, l.target
12463 ));
12464 }
12465 }
12466 }
12467 let before = p.project.dependencies.len();
12468 p.project
12469 .dependencies
12470 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
12471 let removed = before - p.project.dependencies.len();
12472 if removed == 0 {
12473 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
12474 }
12475 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12476 let payload = json!({
12477 "ok": true,
12478 "command": "frontier.remove-dep",
12479 "frontier": frontier.display().to_string(),
12480 "vfr_id": vfr_id,
12481 "removed": removed,
12482 });
12483 if json {
12484 println!(
12485 "{}",
12486 serde_json::to_string_pretty(&payload)
12487 .expect("failed to serialize frontier.remove-dep")
12488 );
12489 } else {
12490 println!(
12491 "{} removed cross-frontier dep {vfr_id}",
12492 style::ok("frontier")
12493 );
12494 }
12495 }
12496 FrontierAction::RefreshDeps {
12497 frontier,
12498 from,
12499 dry_run,
12500 json,
12501 } => {
12502 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12503 let cross_deps: Vec<String> = p
12504 .project
12505 .dependencies
12506 .iter()
12507 .filter_map(|d| d.vfr_id.clone())
12508 .collect();
12509 if cross_deps.is_empty() {
12510 if json {
12511 println!(
12512 "{}",
12513 serde_json::to_string_pretty(&json!({
12514 "ok": true,
12515 "command": "frontier.refresh-deps",
12516 "frontier": frontier.display().to_string(),
12517 "from": from,
12518 "dry_run": dry_run,
12519 "deps": [],
12520 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
12521 })).expect("serialize")
12522 );
12523 } else {
12524 println!(
12525 "{} no cross-frontier deps declared in {}",
12526 style::ok("frontier"),
12527 frontier.display()
12528 );
12529 }
12530 return;
12531 }
12532 let client = reqwest::blocking::Client::builder()
12533 .timeout(std::time::Duration::from_secs(20))
12534 .build()
12535 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
12536 let base = from.trim_end_matches('/');
12537 #[derive(serde::Deserialize)]
12538 struct HubEntry {
12539 latest_snapshot_hash: String,
12540 }
12541 let mut per_dep: Vec<serde_json::Value> = Vec::new();
12542 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
12543 (0u32, 0u32, 0u32, 0u32);
12544 for vfr in &cross_deps {
12545 let url = format!("{base}/entries/{vfr}");
12546 let resp = client.get(&url).send();
12547 let outcome = match resp {
12548 Ok(r) if r.status().as_u16() == 404 => {
12549 missing += 1;
12550 json!({ "vfr_id": vfr, "status": "missing", "url": url })
12551 }
12552 Ok(r) if !r.status().is_success() => {
12553 unreachable += 1;
12554 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
12555 }
12556 Err(e) => {
12557 unreachable += 1;
12558 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
12559 }
12560 Ok(r) => match r.json::<HubEntry>() {
12561 Err(e) => {
12562 unreachable += 1;
12563 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
12564 }
12565 Ok(entry) => {
12566 match p
12568 .project
12569 .dependencies
12570 .iter()
12571 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
12572 {
12573 None => {
12574 unreachable += 1;
12575 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
12576 }
12577 Some(idx) => {
12578 let local_pin =
12579 p.project.dependencies[idx].pinned_snapshot_hash.clone();
12580 let new_pin = entry.latest_snapshot_hash;
12581 if local_pin.as_deref() == Some(new_pin.as_str()) {
12582 unchanged += 1;
12583 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
12584 } else {
12585 if !dry_run {
12586 p.project.dependencies[idx].pinned_snapshot_hash =
12587 Some(new_pin.clone());
12588 }
12589 refreshed += 1;
12590 json!({
12591 "vfr_id": vfr,
12592 "status": "refreshed",
12593 "old_snapshot": local_pin,
12594 "new_snapshot": new_pin,
12595 })
12596 }
12597 }
12598 }
12599 }
12600 },
12601 };
12602 per_dep.push(outcome);
12603 }
12604 if !dry_run && refreshed > 0 {
12605 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12606 }
12607 let payload = json!({
12608 "ok": true,
12609 "command": "frontier.refresh-deps",
12610 "frontier": frontier.display().to_string(),
12611 "from": from,
12612 "dry_run": dry_run,
12613 "deps": per_dep,
12614 "summary": {
12615 "total": cross_deps.len(),
12616 "refreshed": refreshed,
12617 "unchanged": unchanged,
12618 "missing": missing,
12619 "unreachable": unreachable,
12620 },
12621 });
12622 if json {
12623 println!(
12624 "{}",
12625 serde_json::to_string_pretty(&payload)
12626 .expect("failed to serialize frontier.refresh-deps")
12627 );
12628 } else {
12629 let mode = if dry_run { " (dry-run)" } else { "" };
12630 println!(
12631 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
12632 style::ok("frontier"),
12633 cross_deps.len()
12634 );
12635 for d in &per_dep {
12636 let vfr = d["vfr_id"].as_str().unwrap_or("?");
12637 let status = d["status"].as_str().unwrap_or("?");
12638 match status {
12639 "refreshed" => println!(
12640 " {vfr} refreshed {} → {}",
12641 d["old_snapshot"]
12642 .as_str()
12643 .unwrap_or("(none)")
12644 .chars()
12645 .take(16)
12646 .collect::<String>(),
12647 d["new_snapshot"]
12648 .as_str()
12649 .unwrap_or("?")
12650 .chars()
12651 .take(16)
12652 .collect::<String>(),
12653 ),
12654 "unchanged" => println!(" {vfr} unchanged"),
12655 "missing" => println!(" {vfr} missing on hub"),
12656 _ => println!(" {vfr} unreachable"),
12657 }
12658 }
12659 }
12660 }
12661 FrontierAction::Diff {
12662 frontier,
12663 since,
12664 week,
12665 json,
12666 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
12667 }
12668}
12669
12670fn cmd_repo(action: RepoAction) {
12671 match action {
12672 RepoAction::Status { frontier, json } => {
12673 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
12674 if json {
12675 println!(
12676 "{}",
12677 serde_json::to_string_pretty(&payload)
12678 .expect("failed to serialize repo status")
12679 );
12680 } else {
12681 let summary = payload.get("summary").unwrap_or(&Value::Null);
12682 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
12683 println!("vela repo status");
12684 println!(" frontier: {}", frontier.display());
12685 println!(
12686 " events: {}",
12687 summary
12688 .get("accepted_events")
12689 .and_then(Value::as_u64)
12690 .unwrap_or_default()
12691 );
12692 println!(
12693 " open proposals: {}",
12694 summary
12695 .get("open_proposals")
12696 .and_then(Value::as_u64)
12697 .unwrap_or_default()
12698 );
12699 println!(
12700 " state: {}",
12701 freshness
12702 .get("materialized_state")
12703 .and_then(Value::as_str)
12704 .unwrap_or("unknown")
12705 );
12706 println!(
12707 " proof: {}",
12708 freshness
12709 .get("proof")
12710 .and_then(Value::as_str)
12711 .unwrap_or("unknown")
12712 );
12713 }
12714 }
12715 RepoAction::Doctor { frontier, json } => {
12716 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
12717 if json {
12718 println!(
12719 "{}",
12720 serde_json::to_string_pretty(&payload)
12721 .expect("failed to serialize repo doctor")
12722 );
12723 } else {
12724 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12725 let issues = payload
12726 .get("issues")
12727 .and_then(Value::as_array)
12728 .map_or(0, Vec::len);
12729 println!("vela repo doctor");
12730 println!(" frontier: {}", frontier.display());
12731 println!(" status: {}", if ok { "ok" } else { "needs attention" });
12732 println!(" issues: {issues}");
12733 }
12734 }
12735 }
12736}
12737
12738fn cmd_proof_verify(frontier: &Path, json_output: bool) {
12739 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
12740 if json_output {
12741 println!(
12742 "{}",
12743 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
12744 );
12745 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12746 std::process::exit(1);
12747 }
12748 } else {
12749 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12750 println!("vela proof verify");
12751 println!(" frontier: {}", frontier.display());
12752 println!(" status: {}", if ok { "ok" } else { "failed" });
12753 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
12754 for issue in issues {
12755 if let Some(message) = issue.get("message").and_then(Value::as_str) {
12756 println!(" issue: {message}");
12757 }
12758 }
12759 }
12760 if !ok {
12761 std::process::exit(1);
12762 }
12763 }
12764}
12765
12766fn cmd_proof_explain(frontier: &Path) {
12767 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
12768 print!("{text}");
12769}
12770
12771fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
12780 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
12781
12782 let now = chrono::Utc::now();
12784 let (window_start, window_end, week_label): (
12785 chrono::DateTime<chrono::Utc>,
12786 chrono::DateTime<chrono::Utc>,
12787 Option<String>,
12788 ) = if let Some(s) = since {
12789 let parsed = chrono::DateTime::parse_from_rfc3339(s)
12790 .map(|d| d.with_timezone(&chrono::Utc))
12791 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
12792 (parsed, now, None)
12793 } else {
12794 let key = week
12795 .map(str::to_owned)
12796 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
12797 let (start, end) = iso_week_bounds(&key)
12798 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
12799 (start, end, Some(key))
12800 };
12801
12802 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
12804 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
12805 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
12806 let mut cumulative: usize = 0;
12807
12808 for f in &project.findings {
12809 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
12810 .map(|d| d.with_timezone(&chrono::Utc))
12811 .ok();
12812 let updated_ts = f
12813 .updated
12814 .as_deref()
12815 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
12816 .map(|d| d.with_timezone(&chrono::Utc));
12817
12818 if let Some(c) = created
12819 && c < window_end
12820 {
12821 cumulative += 1;
12822 }
12823
12824 if let Some(c) = created
12825 && c >= window_start
12826 && c < window_end
12827 {
12828 added.push(f);
12829 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
12830 if is_tension {
12831 new_contradictions.push(f);
12832 }
12833 continue;
12834 }
12835 if let Some(u) = updated_ts
12836 && u >= window_start
12837 && u < window_end
12838 {
12839 updated.push(f);
12840 }
12841 }
12842
12843 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
12845 list.iter()
12846 .map(|f| {
12847 json!({
12848 "id": f.id,
12849 "assertion": f.assertion.text,
12850 "evidence_type": f.evidence.evidence_type,
12851 "confidence": f.confidence.score,
12852 "doi": f.provenance.doi,
12853 "pmid": f.provenance.pmid,
12854 })
12855 })
12856 .collect()
12857 };
12858
12859 let payload = json!({
12860 "ok": true,
12861 "command": "frontier.diff",
12862 "frontier": frontier.display().to_string(),
12863 "frontier_id": project.frontier_id,
12864 "window": {
12865 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12866 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12867 "iso_week": week_label,
12868 },
12869 "totals": {
12870 "added": added.len(),
12871 "updated": updated.len(),
12872 "new_contradictions": new_contradictions.len(),
12873 "cumulative_claims": cumulative,
12874 },
12875 "added": summary_for(&added),
12876 "updated": summary_for(&updated),
12877 "new_contradictions": summary_for(&new_contradictions),
12878 });
12879
12880 if json {
12881 println!(
12882 "{}",
12883 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
12884 );
12885 return;
12886 }
12887
12888 let label = week_label
12889 .clone()
12890 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
12891 println!();
12892 println!(
12893 " {}",
12894 format!("VELA · FRONTIER · DIFF · {label}")
12895 .to_uppercase()
12896 .dimmed()
12897 );
12898 println!(" {}", style::tick_row(60));
12899 println!(
12900 " range: {} → {}",
12901 window_start.format("%Y-%m-%d %H:%M"),
12902 window_end.format("%Y-%m-%d %H:%M")
12903 );
12904 println!(" added: {}", added.len());
12905 println!(" updated: {}", updated.len());
12906 println!(" contradictions: {}", new_contradictions.len());
12907 println!(" cumulative: {cumulative}");
12908 if added.is_empty() && updated.is_empty() {
12909 println!();
12910 println!(" (quiet window — no findings added or updated)");
12911 } else {
12912 println!();
12913 println!(" added:");
12914 for f in &added {
12915 println!(
12916 " · {} {}",
12917 f.id.dimmed(),
12918 truncate(&f.assertion.text, 88)
12919 );
12920 }
12921 if !updated.is_empty() {
12922 println!();
12923 println!(" updated:");
12924 for f in &updated {
12925 println!(
12926 " · {} {}",
12927 f.id.dimmed(),
12928 truncate(&f.assertion.text, 88)
12929 );
12930 }
12931 }
12932 }
12933}
12934
12935fn truncate(s: &str, n: usize) -> String {
12936 if s.chars().count() <= n {
12937 s.to_string()
12938 } else {
12939 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
12940 out.push('…');
12941 out
12942 }
12943}
12944
12945fn iso_week_key_for(d: chrono::NaiveDate) -> String {
12947 use chrono::Datelike;
12948 let iso = d.iso_week();
12949 format!("{:04}-W{:02}", iso.year(), iso.week())
12950}
12951
12952fn iso_week_bounds(
12955 key: &str,
12956) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
12957 let (year_str, week_str) = key
12958 .split_once("-W")
12959 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
12960 let year: i32 = year_str
12961 .parse()
12962 .map_err(|e| format!("bad year in '{key}': {e}"))?;
12963 let week: u32 = week_str
12964 .parse()
12965 .map_err(|e| format!("bad week in '{key}': {e}"))?;
12966 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
12967 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
12968 let next_monday = monday + chrono::Duration::days(7);
12969 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
12970 let end = next_monday
12971 .and_hms_opt(0, 0, 0)
12972 .expect("00:00 valid")
12973 .and_utc();
12974 Ok((start, end))
12975}
12976
12977fn cmd_registry(action: RegistryAction) {
12982 use crate::registry;
12983 let default_registry = || -> PathBuf {
12984 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12985 PathBuf::from(home)
12986 .join(".vela")
12987 .join("registry")
12988 .join("entries.json")
12989 };
12990 match action {
12991 RegistryAction::DependsOn { vfr_id, from, json } => {
12992 let base = from.trim_end_matches('/');
12993 let url = format!("{base}/entries/{vfr_id}/depends-on");
12994 let client = reqwest::blocking::Client::builder()
12995 .timeout(std::time::Duration::from_secs(30))
12996 .build()
12997 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12998 let resp = client
12999 .get(&url)
13000 .send()
13001 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
13002 if !resp.status().is_success() {
13003 fail(&format!("GET {url}: HTTP {}", resp.status()));
13004 }
13005 let body: serde_json::Value = resp
13006 .json()
13007 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
13008 if json {
13009 println!(
13010 "{}",
13011 serde_json::to_string_pretty(&body).expect("serialize")
13012 );
13013 } else {
13014 let dependents = body
13015 .get("dependents")
13016 .and_then(|v| v.as_array())
13017 .cloned()
13018 .unwrap_or_default();
13019 let count = dependents.len();
13020 println!(
13021 "{} {count} {} on {vfr_id}",
13022 style::ok("registry"),
13023 if count == 1 {
13024 "frontier depends"
13025 } else {
13026 "frontiers depend"
13027 },
13028 );
13029 for e in &dependents {
13030 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
13031 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
13032 let o = e
13033 .get("owner_actor_id")
13034 .and_then(|v| v.as_str())
13035 .unwrap_or("?");
13036 println!(" {v} {n} ({o})");
13037 }
13038 }
13039 }
13040 RegistryAction::Mirror {
13041 vfr_id,
13042 from,
13043 to,
13044 json,
13045 } => {
13046 let src_base = from.trim_end_matches('/');
13047 let dst_base = to.trim_end_matches('/');
13048 let src_url = format!("{src_base}/entries/{vfr_id}");
13049 let dst_url = format!("{dst_base}/entries");
13050 let client = reqwest::blocking::Client::builder()
13051 .timeout(std::time::Duration::from_secs(30))
13052 .build()
13053 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
13054
13055 let entry: serde_json::Value = client
13056 .get(&src_url)
13057 .send()
13058 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
13059 .error_for_status()
13060 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
13061 .json()
13062 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
13063
13064 let resp = client
13065 .post(&dst_url)
13066 .header("content-type", "application/json")
13067 .body(
13068 serde_json::to_vec(&entry)
13069 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
13070 )
13071 .send()
13072 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
13073 let status = resp.status();
13074 if !status.is_success() {
13075 let body = resp.text().unwrap_or_default();
13076 fail(&format!(
13077 "POST {dst_url}: HTTP {status}: {}",
13078 body.chars().take(300).collect::<String>()
13079 ));
13080 }
13081 let body: serde_json::Value = resp
13082 .json()
13083 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
13084 let duplicate = body
13085 .get("duplicate")
13086 .and_then(serde_json::Value::as_bool)
13087 .unwrap_or(false);
13088 let payload = json!({
13089 "ok": true,
13090 "command": "registry.mirror",
13091 "vfr_id": vfr_id,
13092 "from": src_base,
13093 "to": dst_base,
13094 "duplicate_on_destination": duplicate,
13095 "destination_response": body,
13096 });
13097 if json {
13098 println!(
13099 "{}",
13100 serde_json::to_string_pretty(&payload).expect("serialize")
13101 );
13102 } else {
13103 println!(
13104 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
13105 style::ok("registry"),
13106 if duplicate {
13107 " (duplicate; signature already known)"
13108 } else {
13109 " (fresh insert)"
13110 }
13111 );
13112 }
13113 }
13114 RegistryAction::List { from, json } => {
13115 let (label, registry_data) = match &from {
13118 Some(loc) if loc.starts_with("http") => (
13119 loc.clone(),
13120 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
13121 ),
13122 Some(loc) => {
13123 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
13124 (
13125 p.display().to_string(),
13126 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13127 )
13128 }
13129 None => {
13130 let p = default_registry();
13131 (
13132 p.display().to_string(),
13133 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13134 )
13135 }
13136 };
13137 let r = registry_data;
13138 let path_label = label;
13139 if json {
13140 let payload = json!({
13141 "ok": true,
13142 "command": "registry.list",
13143 "registry": path_label,
13144 "entry_count": r.entries.len(),
13145 "entries": r.entries,
13146 });
13147 println!(
13148 "{}",
13149 serde_json::to_string_pretty(&payload)
13150 .expect("failed to serialize registry.list")
13151 );
13152 } else {
13153 println!();
13154 println!(
13155 " {}",
13156 format!("VELA · REGISTRY · LIST · {}", path_label)
13157 .to_uppercase()
13158 .dimmed()
13159 );
13160 println!(" {}", style::tick_row(60));
13161 if r.entries.is_empty() {
13162 println!(" (registry is empty)");
13163 } else {
13164 for entry in &r.entries {
13165 println!(
13166 " {} {} ({}) by {} published {}",
13167 entry.vfr_id,
13168 entry.name,
13169 entry.network_locator,
13170 entry.owner_actor_id,
13171 entry.signed_publish_at
13172 );
13173 }
13174 }
13175 }
13176 }
13177 RegistryAction::Publish {
13178 frontier,
13179 owner,
13180 key,
13181 locator,
13182 to,
13183 json,
13184 } => {
13185 let key_hex = std::fs::read_to_string(&key)
13188 .map(|s| s.trim().to_string())
13189 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
13190 let signing_key = parse_signing_key(&key_hex);
13191 let derived = hex::encode(signing_key.verifying_key().to_bytes());
13192
13193 let mut frontier_data =
13195 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
13196
13197 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
13198 Some(actor) => actor.public_key.clone(),
13199 None => {
13200 eprintln!(
13208 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
13209 &derived[..16]
13210 );
13211 frontier_data.actors.push(sign::ActorRecord {
13212 id: owner.clone(),
13213 public_key: derived.clone(),
13214 algorithm: "ed25519".to_string(),
13215 created_at: chrono::Utc::now().to_rfc3339(),
13216 tier: None,
13217 orcid: None,
13218 access_clearance: None,
13219 });
13220 repo::save_to_path(&frontier, &frontier_data)
13221 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
13222 derived.clone()
13223 }
13224 };
13225
13226 let snapshot_hash = events::snapshot_hash(&frontier_data);
13230 let event_log_hash = events::event_log_hash(&frontier_data.events);
13231 let vfr_id = frontier_data.frontier_id();
13232 let name = frontier_data.project.name.clone();
13233
13234 if derived != pubkey {
13236 fail(&format!(
13237 "private key does not match registered pubkey for owner '{owner}'"
13238 ));
13239 }
13240
13241 let to_is_remote = matches!(
13249 to.as_deref(),
13250 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
13251 );
13252 let resolved_locator = match locator {
13253 Some(l) => l,
13254 None => {
13255 if to_is_remote {
13256 let hub = to.as_deref().unwrap().trim_end_matches('/');
13257 let hub_root = hub.trim_end_matches("/entries");
13258 format!("{hub_root}/entries/{vfr_id}/snapshot")
13259 } else {
13260 fail_return(
13261 "--locator is required for local publishes; pass e.g. \
13262 --locator file:///path/to/frontier.json or an HTTPS URL.",
13263 )
13264 }
13265 }
13266 };
13267
13268 let mut entry = registry::RegistryEntry {
13269 schema: registry::ENTRY_SCHEMA.to_string(),
13270 vfr_id: vfr_id.clone(),
13271 name: name.clone(),
13272 owner_actor_id: owner.clone(),
13273 owner_pubkey: pubkey,
13274 latest_snapshot_hash: snapshot_hash,
13275 latest_event_log_hash: event_log_hash,
13276 network_locator: resolved_locator,
13277 signed_publish_at: chrono::Utc::now().to_rfc3339(),
13278 signature: String::new(),
13279 };
13280 entry.signature =
13281 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
13282
13283 let (registry_label, duplicate) = if to_is_remote {
13284 let hub_url = to.clone().unwrap();
13285 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
13289 .unwrap_or_else(|e| fail_return(&e));
13290 (hub_url, resp.duplicate)
13291 } else {
13292 let registry_path = match &to {
13293 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
13294 None => default_registry(),
13295 };
13296 registry::publish_entry(®istry_path, entry.clone())
13297 .unwrap_or_else(|e| fail_return(&e));
13298 (registry_path.display().to_string(), false)
13299 };
13300
13301 let payload = json!({
13302 "ok": true,
13303 "command": "registry.publish",
13304 "registry": registry_label,
13305 "vfr_id": vfr_id,
13306 "name": name,
13307 "owner": owner,
13308 "snapshot_hash": entry.latest_snapshot_hash,
13309 "event_log_hash": entry.latest_event_log_hash,
13310 "signed_publish_at": entry.signed_publish_at,
13311 "signature": entry.signature,
13312 "duplicate": duplicate,
13313 });
13314 if json {
13315 println!(
13316 "{}",
13317 serde_json::to_string_pretty(&payload)
13318 .expect("failed to serialize registry.publish")
13319 );
13320 } else {
13321 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
13322 println!(
13323 "{} published {vfr_id} → {}{}",
13324 style::ok("registry"),
13325 registry_label,
13326 dup_suffix
13327 );
13328 println!(" snapshot: {}", entry.latest_snapshot_hash);
13329 println!(" event_log: {}", entry.latest_event_log_hash);
13330 println!(" signature: {}…", &entry.signature[..16]);
13331 }
13332 }
13333 RegistryAction::Pull {
13334 vfr_id,
13335 from,
13336 out,
13337 transitive,
13338 depth,
13339 json,
13340 } => {
13341 let (registry_label, registry_data) = match &from {
13345 Some(loc) if loc.starts_with("http") => (
13346 loc.clone(),
13347 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
13348 ),
13349 Some(loc) => {
13350 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
13351 (
13352 p.display().to_string(),
13353 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13354 )
13355 }
13356 None => {
13357 let p = default_registry();
13358 (
13359 p.display().to_string(),
13360 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13361 )
13362 }
13363 };
13364 let entry = registry::find_latest(®istry_data, &vfr_id)
13365 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
13366
13367 if transitive {
13368 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
13372 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
13373
13374 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
13375 result
13376 .deps
13377 .iter()
13378 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
13379 .collect(),
13380 );
13381 let payload = json!({
13382 "ok": true,
13383 "command": "registry.pull",
13384 "registry": registry_label,
13385 "vfr_id": vfr_id,
13386 "transitive": true,
13387 "depth": depth,
13388 "out_dir": out.display().to_string(),
13389 "primary": result.primary_path.display().to_string(),
13390 "verified": result.verified,
13391 "deps": dep_paths_json,
13392 });
13393 if json {
13394 println!(
13395 "{}",
13396 serde_json::to_string_pretty(&payload)
13397 .expect("failed to serialize registry.pull")
13398 );
13399 } else {
13400 println!(
13401 "{} pulled {vfr_id} (transitive) → {}",
13402 style::ok("registry"),
13403 out.display()
13404 );
13405 println!(" verified {} frontier(s):", result.verified.len());
13406 for v in &result.verified {
13407 println!(" · {v}");
13408 }
13409 println!(" every cross-frontier dependency's pinned snapshot hash matched");
13410 }
13411 return;
13412 }
13413
13414 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
13417 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
13418 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
13419 let _ = std::fs::remove_file(&out);
13420 fail_return(&format!("pull verification failed: {e}"))
13421 });
13422
13423 let payload = json!({
13424 "ok": true,
13425 "command": "registry.pull",
13426 "registry": registry_label,
13427 "vfr_id": vfr_id,
13428 "out": out.display().to_string(),
13429 "snapshot_hash": entry.latest_snapshot_hash,
13430 "event_log_hash": entry.latest_event_log_hash,
13431 "verified": true,
13432 });
13433 if json {
13434 println!(
13435 "{}",
13436 serde_json::to_string_pretty(&payload)
13437 .expect("failed to serialize registry.pull")
13438 );
13439 } else {
13440 println!(
13441 "{} pulled {vfr_id} → {}",
13442 style::ok("registry"),
13443 out.display()
13444 );
13445 println!(" verified snapshot+event_log hashes match registry; signature ok");
13446 }
13447 }
13448 }
13449}
13450
13451fn print_stats_json(path: &Path) {
13452 let frontier = load_frontier_or_fail(path);
13453 let source_hash = hash_path_or_fail(path);
13454 let payload = json!({
13455 "ok": true,
13456 "command": "stats",
13457 "schema_version": project::VELA_SCHEMA_VERSION,
13458 "frontier": {
13459 "name": &frontier.project.name,
13460 "description": &frontier.project.description,
13461 "source": path.display().to_string(),
13462 "hash": format!("sha256:{source_hash}"),
13463 "compiled_at": &frontier.project.compiled_at,
13464 "compiler": &frontier.project.compiler,
13465 "papers_processed": frontier.project.papers_processed,
13466 "errors": frontier.project.errors,
13467 },
13468 "stats": frontier.stats,
13469 "proposals": proposals::summary(&frontier),
13470 "proof_state": frontier.proof_state,
13471 });
13472 println!(
13473 "{}",
13474 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
13475 );
13476}
13477
13478fn cmd_search(
13479 source: Option<&Path>,
13480 query: &str,
13481 entity: Option<&str>,
13482 assertion_type: Option<&str>,
13483 all: Option<&Path>,
13484 limit: usize,
13485 json_output: bool,
13486) {
13487 if let Some(dir) = all {
13488 search::run_all(dir, query, entity, assertion_type, limit);
13489 return;
13490 }
13491 let Some(src) = source else {
13492 fail("Provide --source <frontier> or --all <directory>.");
13493 };
13494 if json_output {
13495 let results = search::search(src, query, entity, assertion_type, limit);
13496 let loaded = load_frontier_or_fail(src);
13497 let source_hash = hash_path_or_fail(src);
13498 let payload = json!({
13499 "ok": true,
13500 "command": "search",
13501 "schema_version": project::VELA_SCHEMA_VERSION,
13502 "query": query,
13503 "frontier": {
13504 "name": &loaded.project.name,
13505 "source": src.display().to_string(),
13506 "hash": format!("sha256:{source_hash}"),
13507 },
13508 "filters": {
13509 "entity": entity,
13510 "assertion_type": assertion_type,
13511 "limit": limit,
13512 },
13513 "count": results.len(),
13514 "results": results.iter().map(|result| json!({
13515 "id": &result.id,
13516 "score": result.score,
13517 "assertion": &result.assertion,
13518 "assertion_type": &result.assertion_type,
13519 "confidence": result.confidence,
13520 "entities": &result.entities,
13521 "doi": &result.doi,
13522 })).collect::<Vec<_>>()
13523 });
13524 println!(
13525 "{}",
13526 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
13527 );
13528 } else {
13529 search::run(src, query, entity, assertion_type, limit);
13530 }
13531}
13532
13533fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
13534 let frontier = load_frontier_or_fail(source);
13535 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
13536 if json_output {
13537 let source_hash = hash_path_or_fail(source);
13538 let payload = json!({
13539 "ok": true,
13540 "command": "tensions",
13541 "schema_version": project::VELA_SCHEMA_VERSION,
13542 "frontier": {
13543 "name": &frontier.project.name,
13544 "source": source.display().to_string(),
13545 "hash": format!("sha256:{source_hash}"),
13546 },
13547 "filters": {
13548 "both_high": both_high,
13549 "cross_domain": cross_domain,
13550 "top": top,
13551 },
13552 "count": result.len(),
13553 "tensions": result.iter().map(|t| json!({
13554 "score": t.score,
13555 "resolved": t.resolved,
13556 "superseding_id": &t.superseding_id,
13557 "finding_a": {
13558 "id": &t.finding_a.id,
13559 "assertion": &t.finding_a.assertion,
13560 "confidence": t.finding_a.confidence,
13561 "assertion_type": &t.finding_a.assertion_type,
13562 "citation_count": t.finding_a.citation_count,
13563 "contradicts_count": t.finding_a.contradicts_count,
13564 },
13565 "finding_b": {
13566 "id": &t.finding_b.id,
13567 "assertion": &t.finding_b.assertion,
13568 "confidence": t.finding_b.confidence,
13569 "assertion_type": &t.finding_b.assertion_type,
13570 "citation_count": t.finding_b.citation_count,
13571 "contradicts_count": t.finding_b.contradicts_count,
13572 }
13573 })).collect::<Vec<_>>()
13574 });
13575 println!(
13576 "{}",
13577 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
13578 );
13579 } else {
13580 tensions::print_tensions(&result);
13581 }
13582}
13583
13584fn cmd_gaps(action: GapsAction) {
13585 match action {
13586 GapsAction::Rank {
13587 frontier,
13588 top,
13589 domain,
13590 json,
13591 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
13592 }
13593}
13594
13595fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
13596 let frontier = load_frontier_or_fail(frontier_path);
13597 let mut ranked = frontier
13598 .findings
13599 .iter()
13600 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
13601 .filter(|finding| {
13602 domain.is_none_or(|domain| {
13603 finding
13604 .assertion
13605 .text
13606 .to_lowercase()
13607 .contains(&domain.to_lowercase())
13608 || finding
13609 .assertion
13610 .entities
13611 .iter()
13612 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
13613 })
13614 })
13615 .map(|finding| {
13616 let dependency_count = frontier
13617 .findings
13618 .iter()
13619 .flat_map(|candidate| candidate.links.iter())
13620 .filter(|link| link.target == finding.id)
13621 .count();
13622 let score = dependency_count as f64 + finding.confidence.score;
13623 json!({
13624 "id": &finding.id,
13625 "kind": "candidate_gap_review_lead",
13626 "assertion": &finding.assertion.text,
13627 "score": score,
13628 "dependency_count": dependency_count,
13629 "confidence": finding.confidence.score,
13630 "evidence_type": &finding.evidence.evidence_type,
13631 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
13632 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
13633 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
13634 })
13635 })
13636 .collect::<Vec<_>>();
13637 ranked.sort_by(|a, b| {
13638 b.get("score")
13639 .and_then(Value::as_f64)
13640 .partial_cmp(&a.get("score").and_then(Value::as_f64))
13641 .unwrap_or(std::cmp::Ordering::Equal)
13642 });
13643 ranked.truncate(top);
13644 if json_output {
13645 let source_hash = hash_path_or_fail(frontier_path);
13646 let payload = json!({
13647 "ok": true,
13648 "command": "gaps rank",
13649 "schema_version": project::VELA_SCHEMA_VERSION,
13650 "frontier": {
13651 "name": &frontier.project.name,
13652 "source": frontier_path.display().to_string(),
13653 "hash": format!("sha256:{source_hash}"),
13654 },
13655 "filters": {
13656 "top": top,
13657 "domain": domain,
13658 },
13659 "count": ranked.len(),
13660 "ranking_label": "candidate gap review leads",
13661 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
13662 "review_leads": ranked.clone(),
13663 "gaps": ranked,
13664 });
13665 println!(
13666 "{}",
13667 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
13668 );
13669 } else {
13670 println!();
13671 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
13672 println!(" {}", style::tick_row(60));
13673 println!(" review source scope; these are not guaranteed experiment targets.");
13674 println!();
13675 for (idx, gap) in ranked.iter().enumerate() {
13676 println!(
13677 " {}. [{}] score={} {}",
13678 idx + 1,
13679 gap["id"].as_str().unwrap_or("?"),
13680 gap["score"].as_f64().unwrap_or(0.0),
13681 gap["assertion"].as_str().unwrap_or("")
13682 );
13683 }
13684 }
13685}
13686
13687async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
13688 if inputs.len() < 2 {
13689 fail("need at least 2 frontier files for bridge detection.");
13690 }
13691 println!();
13692 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
13693 println!(" {}", style::tick_row(60));
13694 println!(" loading {} frontiers...", inputs.len());
13695 let mut named_projects = Vec::<(String, project::Project)>::new();
13696 let mut total_findings = 0;
13697 for path in inputs {
13698 let frontier = load_frontier_or_fail(path);
13699 let name = path
13700 .file_stem()
13701 .unwrap_or_default()
13702 .to_string_lossy()
13703 .to_string();
13704 println!(" {} · {} findings", name, frontier.stats.findings);
13705 total_findings += frontier.stats.findings;
13706 named_projects.push((name, frontier));
13707 }
13708 let refs = named_projects
13709 .iter()
13710 .map(|(name, frontier)| (name.as_str(), frontier))
13711 .collect::<Vec<_>>();
13712 let mut bridges = bridge::detect_bridges(&refs);
13713 if check_novelty && !bridges.is_empty() {
13714 let client = Client::new();
13715 let check_count = bridges.len().min(top_n);
13716 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
13717 for bridge_item in bridges.iter_mut().take(check_count) {
13718 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
13719 match bridge::check_novelty(&client, &query).await {
13720 Ok(count) => bridge_item.pubmed_count = Some(count),
13721 Err(e) => eprintln!(
13722 " {} prior-art check failed for {}: {e}",
13723 style::err_prefix(),
13724 bridge_item.entity_name
13725 ),
13726 }
13727 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
13728 }
13729 }
13730 print!("{}", bridge::format_report(&bridges, total_findings));
13731}
13732
13733struct BenchArgs {
13734 frontier: Option<PathBuf>,
13735 gold: Option<PathBuf>,
13736 entity_gold: Option<PathBuf>,
13737 link_gold: Option<PathBuf>,
13738 suite: Option<PathBuf>,
13739 suite_ready: bool,
13740 min_f1: Option<f64>,
13741 min_precision: Option<f64>,
13742 min_recall: Option<f64>,
13743 no_thresholds: bool,
13744 json: bool,
13745}
13746
13747fn cmd_agent_bench(
13752 gold: &Path,
13753 candidate: &Path,
13754 sources: Option<&Path>,
13755 threshold: Option<f64>,
13756 report_path: Option<&Path>,
13757 json_out: bool,
13758) {
13759 let input = crate::agent_bench::BenchInput {
13760 gold_path: gold.to_path_buf(),
13761 candidate_path: candidate.to_path_buf(),
13762 sources: sources.map(Path::to_path_buf),
13763 threshold: threshold.unwrap_or(0.0),
13764 };
13765 let report = match crate::agent_bench::run(input) {
13766 Ok(r) => r,
13767 Err(e) => {
13768 eprintln!("{} bench failed: {e}", style::err_prefix());
13769 std::process::exit(1);
13770 }
13771 };
13772
13773 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
13774 if let Some(path) = report_path
13775 && let Err(e) = std::fs::write(path, &json)
13776 {
13777 eprintln!(
13778 "{} failed to write report to {}: {e}",
13779 style::err_prefix(),
13780 path.display()
13781 );
13782 }
13783
13784 if json_out {
13785 println!("{json}");
13786 } else {
13787 println!();
13788 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
13789 println!(" {}", style::tick_row(60));
13790 print!("{}", crate::agent_bench::render_pretty(&report));
13791 println!();
13792 }
13793
13794 if !report.pass {
13795 std::process::exit(1);
13796 }
13797}
13798
13799fn cmd_bench(args: BenchArgs) {
13800 if args.suite_ready {
13801 let suite_path = args
13802 .suite
13803 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
13804 let payload =
13805 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
13806 println!(
13807 "{}",
13808 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
13809 );
13810 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13811 std::process::exit(1);
13812 }
13813 return;
13814 }
13815 if let Some(suite_path) = args.suite {
13816 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
13817 if args.json {
13818 println!(
13819 "{}",
13820 serde_json::to_string_pretty(&payload)
13821 .expect("failed to serialize benchmark suite")
13822 );
13823 } else {
13824 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13825 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
13826 println!();
13827 println!(" {}", "VELA · BENCH · SUITE".dimmed());
13828 println!(" {}", style::tick_row(60));
13829 println!(" suite: {}", suite_path.display());
13830 println!(
13831 " status: {}",
13832 if ok {
13833 style::ok("pass")
13834 } else {
13835 style::lost("fail")
13836 }
13837 );
13838 println!(
13839 " tasks: {}/{} passed",
13840 metrics
13841 .get("tasks_passed")
13842 .and_then(Value::as_u64)
13843 .unwrap_or(0),
13844 metrics
13845 .get("tasks_total")
13846 .and_then(Value::as_u64)
13847 .unwrap_or(0)
13848 );
13849 }
13850 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13851 std::process::exit(1);
13852 }
13853 return;
13854 }
13855
13856 let frontier = args
13857 .frontier
13858 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
13859 let thresholds = benchmark::BenchmarkThresholds {
13860 min_f1: if args.no_thresholds {
13861 None
13862 } else {
13863 args.min_f1.or(Some(0.05))
13864 },
13865 min_precision: if args.no_thresholds {
13866 None
13867 } else {
13868 args.min_precision
13869 },
13870 min_recall: if args.no_thresholds {
13871 None
13872 } else {
13873 args.min_recall
13874 },
13875 ..Default::default()
13876 };
13877 if let Some(path) = args.link_gold {
13878 print_benchmark_or_exit(benchmark::task_envelope(
13879 &frontier,
13880 None,
13881 benchmark::BenchmarkMode::Link,
13882 Some(&path),
13883 &thresholds,
13884 None,
13885 ));
13886 } else if let Some(path) = args.entity_gold {
13887 print_benchmark_or_exit(benchmark::task_envelope(
13888 &frontier,
13889 None,
13890 benchmark::BenchmarkMode::Entity,
13891 Some(&path),
13892 &thresholds,
13893 None,
13894 ));
13895 } else if let Some(path) = args.gold {
13896 if args.json {
13897 print_benchmark_or_exit(benchmark::task_envelope(
13898 &frontier,
13899 None,
13900 benchmark::BenchmarkMode::Finding,
13901 Some(&path),
13902 &thresholds,
13903 None,
13904 ));
13905 } else {
13906 benchmark::run(&frontier, &path, false);
13907 }
13908 } else {
13909 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
13910 }
13911}
13912
13913fn print_benchmark_or_exit(result: Result<Value, String>) {
13914 let payload = result.unwrap_or_else(|e| fail_return(&e));
13915 println!(
13916 "{}",
13917 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
13918 );
13919 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13920 std::process::exit(1);
13921 }
13922}
13923
13924fn cmd_packet(action: PacketAction) {
13925 let (result, json_output) = match action {
13926 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
13927 PacketAction::Validate { path, json } => (packet::validate(&path), json),
13928 };
13929 match result {
13930 Ok(output) if json_output => {
13931 println!(
13932 "{}",
13933 serde_json::to_string_pretty(&json!({
13934 "ok": true,
13935 "command": "packet",
13936 "result": output,
13937 }))
13938 .expect("failed to serialize packet response")
13939 );
13940 }
13941 Ok(output) => println!("{output}"),
13942 Err(e) => fail(&e),
13943 }
13944}
13945
13946fn cmd_verify(path: &Path, json_output: bool) {
13951 let result = packet::validate(path);
13952 match result {
13953 Ok(output) if json_output => {
13954 println!(
13955 "{}",
13956 serde_json::to_string_pretty(&json!({
13957 "ok": true,
13958 "command": "verify",
13959 "result": output,
13960 }))
13961 .expect("failed to serialize verify response")
13962 );
13963 }
13964 Ok(output) => {
13965 println!("{output}");
13966 println!(
13967 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
13968 );
13969 }
13970 Err(e) => fail(&e),
13971 }
13972}
13973
13974fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
13975 if path.join(".vela").exists() {
13976 fail(&format!(
13977 "already initialized: {} exists",
13978 path.join(".vela").display()
13979 ));
13980 }
13981 let payload = frontier_repo::initialize(
13982 path,
13983 frontier_repo::InitOptions {
13984 name,
13985 template,
13986 initialize_git,
13987 },
13988 )
13989 .unwrap_or_else(|e| fail_return(&e));
13990 if json_output {
13991 println!(
13992 "{}",
13993 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
13994 );
13995 } else {
13996 println!(
13997 "{} initialized frontier repository in {}",
13998 style::ok("ok"),
13999 path.display()
14000 );
14001 }
14002}
14003
14004fn cmd_quickstart(
14011 path: &Path,
14012 name: &str,
14013 reviewer: &str,
14014 assertion: Option<&str>,
14015 keys_out: Option<&Path>,
14016 json_output: bool,
14017) {
14018 use std::process::Command;
14019
14020 if path.join(".vela").exists() {
14021 fail(&format!(
14022 "already initialized: {} exists",
14023 path.join(".vela").display()
14024 ));
14025 }
14026
14027 let exe = std::env::current_exe()
14028 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
14029 let keys_dir = keys_out
14030 .map(Path::to_path_buf)
14031 .unwrap_or_else(|| path.join("keys"));
14032 let assertion_text =
14033 assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
14034
14035 let run_step = |label: &str, args: &[&str]| -> std::process::Output {
14036 let out = Command::new(&exe)
14037 .args(args)
14038 .output()
14039 .unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
14040 if !out.status.success() {
14041 let stderr = String::from_utf8_lossy(&out.stderr);
14042 fail(&format!("{label} failed:\n{stderr}"));
14043 }
14044 out
14045 };
14046
14047 run_step(
14049 "init",
14050 &[
14051 "init",
14052 path.to_string_lossy().as_ref(),
14053 "--name",
14054 name,
14055 "--no-git",
14056 "--json",
14057 ],
14058 );
14059
14060 let keys_out_str = keys_dir.to_string_lossy().into_owned();
14062 let keypair_out = run_step(
14063 "sign.generate-keypair",
14064 &[
14065 "sign",
14066 "generate-keypair",
14067 "--out",
14068 keys_out_str.as_ref(),
14069 "--json",
14070 ],
14071 );
14072 let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
14073 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
14074 let public_key = keypair_json
14075 .get("public_key")
14076 .and_then(|v| v.as_str())
14077 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
14078 .to_string();
14079
14080 run_step(
14082 "actor.add",
14083 &[
14084 "actor",
14085 "add",
14086 path.to_string_lossy().as_ref(),
14087 reviewer,
14088 "--pubkey",
14089 public_key.as_str(),
14090 "--json",
14091 ],
14092 );
14093
14094 let finding_out = run_step(
14096 "finding.add",
14097 &[
14098 "finding",
14099 "add",
14100 path.to_string_lossy().as_ref(),
14101 "--assertion",
14102 assertion_text,
14103 "--author",
14104 reviewer,
14105 "--apply",
14106 "--json",
14107 ],
14108 );
14109 let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
14110 .unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
14111 let finding_id = finding_json
14112 .get("finding_id")
14113 .and_then(|v| v.as_str())
14114 .map(str::to_string);
14115
14116 if json_output {
14117 let payload = json!({
14118 "ok": true,
14119 "command": "quickstart",
14120 "frontier": path.display().to_string(),
14121 "name": name,
14122 "reviewer": reviewer,
14123 "public_key": public_key,
14124 "keys_dir": keys_dir.display().to_string(),
14125 "finding_id": finding_id,
14126 "next_steps": [
14127 format!("vela serve {}", path.display()),
14128 format!(
14129 "vela ingest <paper.pdf|doi:...> --frontier {}",
14130 path.display()
14131 ),
14132 format!("vela log {}", path.display()),
14133 ],
14134 });
14135 println!(
14136 "{}",
14137 serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
14138 );
14139 return;
14140 }
14141
14142 println!();
14143 println!(
14144 " {}",
14145 format!("VELA · QUICKSTART · {}", path.display())
14146 .to_uppercase()
14147 .dimmed()
14148 );
14149 println!(" {}", style::tick_row(60));
14150 println!(" frontier: {}", path.display());
14151 println!(" name: {name}");
14152 println!(" reviewer: {reviewer}");
14153 println!(" keys: {}", keys_dir.display());
14154 println!(" pubkey: {}…", &public_key[..16]);
14155 if let Some(id) = finding_id.as_deref() {
14156 println!(" finding: {id}");
14157 }
14158 println!();
14159 println!(" {}", style::ok("done"));
14160 println!(" next:");
14161 println!(" vela serve {}", path.display());
14162 println!(
14163 " vela ingest <paper.pdf|doi:10.xxx|pmid:xxx> --frontier {}",
14164 path.display()
14165 );
14166 println!(" vela log {}", path.display());
14167 println!();
14168}
14169
14170fn cmd_lock(path: &Path, check: bool, json_output: bool) {
14176 if check {
14177 cmd_lock_check(path, json_output);
14178 return;
14179 }
14180 let payload = crate::frontier_repo::materialize(path).unwrap_or_else(|e| fail_return(&e));
14181 if json_output {
14182 println!(
14183 "{}",
14184 serde_json::to_string_pretty(&json!({
14185 "ok": true,
14186 "command": "lock",
14187 "path": path.display().to_string(),
14188 "snapshot_hash": payload.get("snapshot_hash"),
14189 "event_log_hash": payload.get("event_log_hash"),
14190 "proposal_state_hash": payload.get("proposal_state_hash"),
14191 }))
14192 .expect("failed to serialize lock report")
14193 );
14194 return;
14195 }
14196 println!();
14197 println!(
14198 " {}",
14199 format!("VELA · LOCK · {}", path.display())
14200 .to_uppercase()
14201 .dimmed()
14202 );
14203 println!(" {}", style::tick_row(60));
14204 println!(
14205 " snapshot_hash: {}",
14206 payload
14207 .get("snapshot_hash")
14208 .and_then(|v| v.as_str())
14209 .unwrap_or("?")
14210 );
14211 println!(
14212 " event_log_hash: {}",
14213 payload
14214 .get("event_log_hash")
14215 .and_then(|v| v.as_str())
14216 .unwrap_or("?")
14217 );
14218 println!(
14219 " proposal_state_hash: {}",
14220 payload
14221 .get("proposal_state_hash")
14222 .and_then(|v| v.as_str())
14223 .unwrap_or("?")
14224 );
14225 println!();
14226 println!(" {}", style::ok("locked"));
14227}
14228
14229fn cmd_lock_check(path: &Path, json_output: bool) {
14230 use crate::frontier_repo::read_lock;
14231 let lock = read_lock(path).unwrap_or_else(|e| fail_return(&e));
14232 let Some(lock) = lock else {
14233 fail("lock --check: no vela.lock found at path");
14234 };
14235 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
14236 let current_snapshot = format!("sha256:{}", crate::events::snapshot_hash(&project));
14237 let current_event_log = format!("sha256:{}", crate::events::event_log_hash(&project.events));
14238 let mut drift: Vec<String> = Vec::new();
14239 if lock.snapshot_hash != current_snapshot {
14240 drift.push(format!(
14241 "snapshot_hash: lock={} current={}",
14242 lock.snapshot_hash, current_snapshot
14243 ));
14244 }
14245 if lock.event_log_hash != current_event_log {
14246 drift.push(format!(
14247 "event_log_hash: lock={} current={}",
14248 lock.event_log_hash, current_event_log
14249 ));
14250 }
14251 let ok = drift.is_empty();
14252 if json_output {
14253 println!(
14254 "{}",
14255 serde_json::to_string_pretty(&json!({
14256 "ok": ok,
14257 "command": "lock.check",
14258 "path": path.display().to_string(),
14259 "drift": drift,
14260 "lock_snapshot_hash": lock.snapshot_hash,
14261 "current_snapshot_hash": current_snapshot,
14262 "lock_event_log_hash": lock.event_log_hash,
14263 "current_event_log_hash": current_event_log,
14264 "dependency_count": lock.dependencies.len(),
14265 }))
14266 .expect("failed to serialize lock check report")
14267 );
14268 } else {
14269 println!();
14270 println!(
14271 " {}",
14272 format!("VELA · LOCK · CHECK · {}", path.display())
14273 .to_uppercase()
14274 .dimmed()
14275 );
14276 println!(" {}", style::tick_row(60));
14277 if ok {
14278 println!(" snapshot_hash: {}", lock.snapshot_hash);
14279 println!(" event_log_hash: {}", lock.event_log_hash);
14280 println!(" dependencies pinned: {}", lock.dependencies.len());
14281 println!();
14282 println!(" {} on-disk state matches vela.lock", style::ok("ok"));
14283 } else {
14284 println!(" {} drift detected:", style::err_prefix());
14285 for d in &drift {
14286 println!(" - {d}");
14287 }
14288 }
14289 }
14290 if !ok {
14291 std::process::exit(1);
14292 }
14293}
14294
14295fn cmd_doc(path: &Path, out: Option<&Path>, json_output: bool) {
14300 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
14301 let out_dir = out
14302 .map(Path::to_path_buf)
14303 .unwrap_or_else(|| path.join("doc"));
14304 let report =
14305 crate::doc_render::write_site(&project, &out_dir).unwrap_or_else(|e| fail_return(&e));
14306 if json_output {
14307 println!(
14308 "{}",
14309 serde_json::to_string_pretty(&report).expect("failed to serialize doc report")
14310 );
14311 return;
14312 }
14313 println!();
14314 println!(
14315 " {}",
14316 format!("VELA · DOC · {}", path.display())
14317 .to_uppercase()
14318 .dimmed()
14319 );
14320 println!(" {}", style::tick_row(60));
14321 println!(" frontier_id: {}", report.frontier_id);
14322 println!(" out: {}", report.out);
14323 println!(" files written: {}", report.files_written);
14324 println!(" findings: {}", report.findings_documented);
14325 println!(" events: {}", report.events_documented);
14326 println!();
14327 println!(
14328 " {} open {}/index.html in a browser",
14329 style::ok("ok"),
14330 report.out
14331 );
14332}
14333
14334fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
14335 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
14336 let target = into
14337 .map(Path::to_path_buf)
14338 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
14339 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
14340 println!(
14341 "{} {} findings · {}",
14342 style::ok("imported"),
14343 frontier.findings.len(),
14344 target.display()
14345 );
14346}
14347
14348fn cmd_locator_repair(
14349 path: &Path,
14350 atom_id: &str,
14351 locator_override: Option<&str>,
14352 reviewer: &str,
14353 reason: &str,
14354 apply: bool,
14355 json_output: bool,
14356) {
14357 let report = state::repair_evidence_atom_locator(
14358 path,
14359 atom_id,
14360 locator_override,
14361 reviewer,
14362 reason,
14363 apply,
14364 )
14365 .unwrap_or_else(|e| fail_return(&e));
14366 print_state_report(&report, json_output);
14367}
14368
14369async fn cmd_source_fetch(
14374 identifier: &str,
14375 cache_root: Option<&Path>,
14376 out_path: Option<&Path>,
14377 refresh: bool,
14378 _json_output: bool,
14379) {
14380 use sha2::{Digest, Sha256};
14381
14382 let normalized = normalize_source_identifier(identifier);
14383 let cache_path = cache_root.map(|root| {
14384 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
14385 root.join("sources")
14386 .join("cache")
14387 .join(format!("{hash}.json"))
14388 });
14389
14390 if !refresh
14391 && let Some(p) = cache_path.as_ref()
14392 && p.is_file()
14393 {
14394 let body = std::fs::read_to_string(p)
14395 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
14396 emit_source_fetch_result(&body, out_path);
14397 return;
14398 }
14399
14400 let result = fetch_source_metadata(&normalized).await;
14401 let json = match result {
14402 Ok(value) => serde_json::to_string_pretty(&value)
14403 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
14404 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
14405 };
14406
14407 if let Some(p) = cache_path.as_ref() {
14408 if let Some(parent) = p.parent() {
14409 std::fs::create_dir_all(parent)
14410 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
14411 }
14412 std::fs::write(p, &json)
14413 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
14414 }
14415 emit_source_fetch_result(&json, out_path);
14416}
14417
14418fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
14419 if let Some(p) = out_path {
14420 if let Some(parent) = p.parent() {
14421 let _ = std::fs::create_dir_all(parent);
14422 }
14423 std::fs::write(p, body)
14424 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
14425 } else {
14426 println!("{body}");
14427 }
14428}
14429
14430fn normalize_source_identifier(raw: &str) -> String {
14431 let trimmed = raw.trim();
14432 if trimmed.starts_with("doi:")
14433 || trimmed.starts_with("pmid:")
14434 || trimmed.starts_with("nct:")
14435 || trimmed.starts_with("pmc:")
14436 {
14437 return trimmed.to_string();
14438 }
14439 if trimmed.starts_with("10.") {
14440 return format!("doi:{trimmed}");
14441 }
14442 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
14443 return format!(
14444 "nct:{}",
14445 trimmed
14446 .to_uppercase()
14447 .trim_start_matches("NCT")
14448 .to_string()
14449 .split_at(0)
14450 .0
14451 );
14452 }
14453 if trimmed.chars().all(|c| c.is_ascii_digit()) {
14454 return format!("pmid:{trimmed}");
14455 }
14456 trimmed.to_string()
14457}
14458
14459async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
14460 let client = Client::builder()
14461 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
14462 .timeout(std::time::Duration::from_secs(30))
14463 .build()
14464 .map_err(|e| format!("client build: {e}"))?;
14465 if let Some(rest) = normalized.strip_prefix("doi:") {
14466 let mut record = fetch_via_crossref(&client, rest).await?;
14473 let crossref_abstract = record
14474 .get("abstract")
14475 .and_then(|v| v.as_str())
14476 .unwrap_or("");
14477 if crossref_abstract.is_empty()
14478 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
14479 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
14480 {
14481 let pubmed_abstract = pubmed_record
14482 .get("abstract")
14483 .and_then(|v| v.as_str())
14484 .unwrap_or("")
14485 .to_string();
14486 if !pubmed_abstract.is_empty()
14487 && let Some(obj) = record.as_object_mut()
14488 {
14489 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
14490 obj.insert(
14491 "abstract_source".to_string(),
14492 Value::String(format!("pubmed:{pmid}")),
14493 );
14494 }
14495 }
14496 return Ok(record);
14497 }
14498 if let Some(rest) = normalized.strip_prefix("pmid:") {
14499 return fetch_via_pubmed(&client, rest).await;
14500 }
14501 if let Some(rest) = normalized.strip_prefix("nct:") {
14502 return fetch_via_ctgov(&client, rest).await;
14503 }
14504 Err(format!(
14505 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
14506 ))
14507}
14508
14509async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
14513 let url = format!(
14514 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
14515 urlencoding::encode(doi)
14516 );
14517 let resp = client.get(&url).send().await.ok()?;
14518 if !resp.status().is_success() {
14519 return None;
14520 }
14521 let body: Value = resp.json().await.ok()?;
14522 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
14523 if id_list.len() != 1 {
14524 return None;
14527 }
14528 id_list.first()?.as_str().map(|s| s.to_string())
14529}
14530
14531async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
14532 let url = format!("https://api.crossref.org/works/{doi}");
14533 let resp = client
14534 .get(&url)
14535 .send()
14536 .await
14537 .map_err(|e| format!("crossref get: {e}"))?;
14538 if !resp.status().is_success() {
14539 return Err(format!("crossref returned {}", resp.status()));
14540 }
14541 let body: Value = resp
14542 .json()
14543 .await
14544 .map_err(|e| format!("crossref json: {e}"))?;
14545 let work = body.get("message").cloned().unwrap_or(Value::Null);
14546 let title = work
14547 .get("title")
14548 .and_then(|v| v.as_array())
14549 .and_then(|a| a.first())
14550 .and_then(|v| v.as_str())
14551 .unwrap_or("")
14552 .to_string();
14553 let abstract_html = work
14554 .get("abstract")
14555 .and_then(|v| v.as_str())
14556 .unwrap_or("")
14557 .to_string();
14558 let abstract_text = strip_jats_tags(&abstract_html);
14559 let year = work
14560 .get("issued")
14561 .and_then(|v| v.get("date-parts"))
14562 .and_then(|v| v.as_array())
14563 .and_then(|a| a.first())
14564 .and_then(|v| v.as_array())
14565 .and_then(|a| a.first())
14566 .and_then(|v| v.as_i64());
14567 let journal = work
14568 .get("container-title")
14569 .and_then(|v| v.as_array())
14570 .and_then(|a| a.first())
14571 .and_then(|v| v.as_str())
14572 .unwrap_or("")
14573 .to_string();
14574 let authors = work
14575 .get("author")
14576 .and_then(|v| v.as_array())
14577 .map(|arr| {
14578 arr.iter()
14579 .filter_map(|a| {
14580 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
14581 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
14582 let combined = format!("{given} {family}").trim().to_string();
14583 if combined.is_empty() {
14584 None
14585 } else {
14586 Some(combined)
14587 }
14588 })
14589 .collect::<Vec<_>>()
14590 })
14591 .unwrap_or_default();
14592 Ok(json!({
14593 "schema": "vela.source_fetch.v0.1",
14594 "identifier": format!("doi:{doi}"),
14595 "source": "crossref",
14596 "title": title,
14597 "abstract": abstract_text,
14598 "year": year,
14599 "journal": journal,
14600 "authors": authors,
14601 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14602 }))
14603}
14604
14605async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
14606 let url = format!(
14607 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
14608 );
14609 let resp = client
14610 .get(&url)
14611 .send()
14612 .await
14613 .map_err(|e| format!("pubmed get: {e}"))?;
14614 if !resp.status().is_success() {
14615 return Err(format!("pubmed returned {}", resp.status()));
14616 }
14617 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
14618 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
14619 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
14620 let year = extract_xml_text(&xml, "<Year>", "</Year>")
14621 .parse::<i64>()
14622 .ok();
14623 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
14624 Ok(json!({
14625 "schema": "vela.source_fetch.v0.1",
14626 "identifier": format!("pmid:{pmid}"),
14627 "source": "pubmed",
14628 "title": title,
14629 "abstract": abstract_text,
14630 "year": year,
14631 "journal": journal,
14632 "authors": Vec::<String>::new(),
14633 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14634 }))
14635}
14636
14637async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
14638 let nct_clean = nct.trim();
14639 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
14640 nct_clean.to_uppercase()
14641 } else {
14642 format!("NCT{nct_clean}")
14643 };
14644 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
14645 let resp = client
14646 .get(&url)
14647 .send()
14648 .await
14649 .map_err(|e| format!("ctgov get: {e}"))?;
14650 if !resp.status().is_success() {
14651 return Err(format!("ctgov returned {}", resp.status()));
14652 }
14653 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
14654 let title = body
14655 .pointer("/protocolSection/identificationModule/briefTitle")
14656 .and_then(|v| v.as_str())
14657 .unwrap_or("")
14658 .to_string();
14659 let abstract_text = body
14660 .pointer("/protocolSection/descriptionModule/briefSummary")
14661 .and_then(|v| v.as_str())
14662 .unwrap_or("")
14663 .to_string();
14664 let phase = body
14665 .pointer("/protocolSection/designModule/phases")
14666 .and_then(|v| v.as_array())
14667 .and_then(|a| a.first())
14668 .and_then(|v| v.as_str())
14669 .unwrap_or("")
14670 .to_string();
14671 Ok(json!({
14672 "schema": "vela.source_fetch.v0.1",
14673 "identifier": format!("nct:{nct_id}"),
14674 "source": "clinicaltrials.gov",
14675 "title": title,
14676 "abstract": abstract_text,
14677 "year": Value::Null,
14678 "journal": phase,
14679 "authors": Vec::<String>::new(),
14680 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14681 }))
14682}
14683
14684fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
14685 if let Some(start) = xml.find(open) {
14686 let after = &xml[start + open.len()..];
14687 if let Some(end) = after.find(close) {
14688 return after[..end].trim().to_string();
14689 }
14690 }
14691 String::new()
14692}
14693
14694fn strip_jats_tags(html: &str) -> String {
14695 let mut out = String::with_capacity(html.len());
14696 let mut in_tag = false;
14697 for c in html.chars() {
14698 match c {
14699 '<' => in_tag = true,
14700 '>' => in_tag = false,
14701 _ if !in_tag => out.push(c),
14702 _ => {}
14703 }
14704 }
14705 out.split_whitespace().collect::<Vec<_>>().join(" ")
14706}
14707
14708fn cmd_span_repair(
14709 path: &Path,
14710 finding_id: &str,
14711 section: &str,
14712 text: &str,
14713 reviewer: &str,
14714 reason: &str,
14715 apply: bool,
14716 json_output: bool,
14717) {
14718 let report =
14719 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
14720 .unwrap_or_else(|e| fail_return(&e));
14721 print_state_report(&report, json_output);
14722}
14723
14724#[allow(clippy::too_many_arguments)]
14725fn cmd_entity_resolve(
14726 path: &Path,
14727 finding_id: &str,
14728 entity_name: &str,
14729 source: &str,
14730 id: &str,
14731 confidence: f64,
14732 matched_name: Option<&str>,
14733 resolution_method: &str,
14734 reviewer: &str,
14735 reason: &str,
14736 apply: bool,
14737 json_output: bool,
14738) {
14739 let report = state::resolve_finding_entity(
14740 path,
14741 finding_id,
14742 entity_name,
14743 source,
14744 id,
14745 confidence,
14746 matched_name,
14747 resolution_method,
14748 reviewer,
14749 reason,
14750 apply,
14751 )
14752 .unwrap_or_else(|e| fail_return(&e));
14753 print_state_report(&report, json_output);
14754}
14755
14756fn cmd_propagate(
14757 path: &Path,
14758 retract: Option<String>,
14759 reduce_confidence: Option<String>,
14760 to: Option<f64>,
14761 output: Option<&Path>,
14762) {
14763 let mut frontier = load_frontier_or_fail(path);
14764 let (finding_id, action, label) = if let Some(id) = retract {
14765 (id, propagate::PropagationAction::Retracted, "retraction")
14766 } else if let Some(id) = reduce_confidence {
14767 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
14768 if !(0.0..=1.0).contains(&score) {
14769 fail("--to must be between 0.0 and 1.0");
14770 }
14771 (
14772 id,
14773 propagate::PropagationAction::ConfidenceReduced { new_score: score },
14774 "confidence reduction",
14775 )
14776 } else {
14777 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
14778 };
14779 if !frontier.findings.iter().any(|f| f.id == finding_id) {
14780 fail(&format!("finding not found: {finding_id}"));
14781 }
14782 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
14783 frontier.review_events.extend(result.events.clone());
14788 project::recompute_stats(&mut frontier);
14789 propagate::print_result(&result, label, &finding_id);
14790 let out = output.unwrap_or(path);
14791 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
14792 println!(" output: {}", out.display());
14793}
14794
14795fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
14796 let source_desc = source
14797 .map(|p| p.display().to_string())
14798 .or_else(|| frontiers.map(|p| p.display().to_string()))
14799 .unwrap_or_else(|| "frontier.json".to_string());
14800 let args = if let Some(path) = source {
14801 format!(r#""serve", "{}""#, path.display())
14802 } else if let Some(path) = frontiers {
14803 format!(r#""serve", "--frontiers", "{}""#, path.display())
14804 } else {
14805 r#""serve", "frontier.json""#.to_string()
14806 };
14807 println!(
14808 r#"Add this MCP server configuration to your client:
14809
14810{{
14811 "mcpServers": {{
14812 "vela": {{
14813 "command": "vela",
14814 "args": [{args}]
14815 }}
14816 }}
14817}}
14818
14819Source: {source_desc}"#
14820 );
14821}
14822
14823fn parse_entities(input: &str) -> Vec<(String, String)> {
14824 if input.trim().is_empty() {
14825 return Vec::new();
14826 }
14827 input
14828 .split(',')
14829 .filter_map(|pair| {
14830 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
14831 if parts.len() == 2 {
14832 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
14833 } else {
14834 eprintln!(
14835 "{} skipping malformed entity '{}'",
14836 style::warn("warn"),
14837 pair.trim()
14838 );
14839 None
14840 }
14841 })
14842 .collect()
14843}
14844
14845fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
14846 inputs
14847 .iter()
14848 .filter_map(|input| {
14849 let trimmed = input.trim();
14850 if trimmed.is_empty() {
14851 return None;
14852 }
14853 if trimmed.starts_with('{') {
14854 match serde_json::from_str::<Value>(trimmed) {
14855 Ok(value @ Value::Object(_)) => return Some(value),
14856 Ok(_) | Err(_) => {
14857 eprintln!(
14858 "{} evidence span JSON should be an object; storing as text",
14859 style::warn("warn")
14860 );
14861 }
14862 }
14863 }
14864 Some(json!({
14865 "section": "curator_source",
14866 "text": trimmed,
14867 }))
14868 })
14869 .collect()
14870}
14871
14872fn hash_path(path: &Path) -> Result<String, String> {
14873 let mut hasher = Sha256::new();
14874 if path.is_file() {
14875 let bytes = std::fs::read(path)
14876 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
14877 hasher.update(&bytes);
14878 } else if path.is_dir() {
14879 let mut files = Vec::new();
14880 collect_hash_files(path, path, &mut files)?;
14881 files.sort();
14882 for rel in files {
14883 hasher.update(rel.to_string_lossy().as_bytes());
14884 let bytes = std::fs::read(path.join(&rel))
14885 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
14886 hasher.update(bytes);
14887 }
14888 } else {
14889 return Err(format!("Cannot hash missing path {}", path.display()));
14890 }
14891 Ok(format!("{:x}", hasher.finalize()))
14892}
14893
14894fn load_frontier_or_fail(path: &Path) -> project::Project {
14895 repo::load_from_path(path).unwrap_or_else(|e| {
14896 fail_return(&format!(
14897 "Failed to load frontier '{}': {e}",
14898 path.display()
14899 ))
14900 })
14901}
14902
14903fn hash_path_or_fail(path: &Path) -> String {
14904 hash_path(path).unwrap_or_else(|e| {
14905 fail_return(&format!(
14906 "Failed to hash frontier '{}': {e}",
14907 path.display()
14908 ))
14909 })
14910}
14911
14912fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
14913 for entry in
14914 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
14915 {
14916 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
14917 let path = entry.path();
14918 if path.is_dir() {
14919 collect_hash_files(root, &path, files)?;
14920 } else if path.is_file() {
14921 files.push(
14922 path.strip_prefix(root)
14923 .map_err(|e| e.to_string())?
14924 .to_path_buf(),
14925 );
14926 }
14927 }
14928 Ok(())
14929}
14930
14931fn schema_error_suggestion(error: &str) -> &'static str {
14932 if schema_error_action(error).is_some() {
14933 "Run `vela normalize` to repair deterministic frontier state."
14934 } else {
14935 "Inspect and correct the referenced frontier field."
14936 }
14937}
14938
14939fn schema_error_fix(error: &str) -> bool {
14940 schema_error_action(error).is_some()
14941}
14942
14943fn schema_error_action(error: &str) -> Option<&'static str> {
14944 if error.contains("stats.findings")
14945 || error.contains("stats.links")
14946 || error.contains("Invalid compiler")
14947 || error.contains("Invalid vela_version")
14948 || error.contains("Invalid schema")
14949 {
14950 Some("normalize_metadata_and_stats")
14951 } else if error.contains("does not match content-address") {
14952 Some("rewrite_ids")
14953 } else {
14954 None
14955 }
14956}
14957
14958fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
14959 let mut actions = std::collections::BTreeMap::<String, usize>::new();
14960 for diagnostic in diagnostics {
14961 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
14962 *actions.entry(action.to_string()).or_default() += 1;
14963 }
14964 }
14965 actions
14966 .into_iter()
14967 .map(|(action, count)| {
14968 let command = if action == "rewrite_ids" {
14969 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
14970 } else {
14971 "vela normalize <frontier> --write"
14972 };
14973 json!({
14974 "action": action,
14975 "count": count,
14976 "command": command,
14977 })
14978 })
14979 .collect()
14980}
14981
14982fn cmd_integrity(frontier: &Path, json: bool) {
14983 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
14984 if json {
14985 println!(
14986 "{}",
14987 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
14988 );
14989 } else {
14990 println!("vela integrity");
14991 println!(" frontier: {}", frontier.display());
14992 println!(" status: {}", report.status);
14993 println!(" proof freshness: {}", report.proof_freshness);
14994 println!(" structural errors: {}", report.structural_errors.len());
14995 for error in report.structural_errors.iter().take(8) {
14996 println!(" - {}: {}", error.rule_id, error.message);
14997 }
14998 }
14999}
15000
15001fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
15002 let report =
15003 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
15004 if json {
15005 println!(
15006 "{}",
15007 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
15008 );
15009 } else {
15010 println!("vela impact");
15011 println!(" finding: {}", report.target.id);
15012 println!(" frontier: {}", report.frontier.vfr_id);
15013 println!(" direct dependents: {}", report.summary.direct_dependents);
15014 println!(" downstream: {}", report.summary.total_downstream);
15015 println!(" open proposals: {}", report.summary.open_proposals);
15016 println!(" accepted events: {}", report.summary.accepted_events);
15017 println!(" proof: {}", report.summary.proof_status);
15018 }
15019}
15020
15021fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
15022 use crate::discord::DiscordKind;
15023 use crate::discord_compute::compute_discord_assignment;
15024
15025 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
15026 let assignment = compute_discord_assignment(&project);
15027 let support = assignment.frontier_support();
15028
15029 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
15032 for context in support.iter() {
15033 let set = assignment.get(context);
15034 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
15035 if let Some(filter) = kind_filter
15036 && !kinds.iter().any(|k| k == filter)
15037 {
15038 continue;
15039 }
15040 rows.push((context.clone(), kinds));
15041 }
15042
15043 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
15047 std::collections::BTreeMap::new();
15048 for kind in DiscordKind::ALL {
15049 let count = assignment
15050 .iter()
15051 .filter(|(_, set)| set.contains(*kind))
15052 .count();
15053 if count > 0 {
15054 histogram.insert(kind.as_str(), count);
15055 }
15056 }
15057
15058 let total_findings = project.findings.len();
15059 let frontier_id = project
15060 .frontier_id
15061 .clone()
15062 .unwrap_or_else(|| String::from("<unknown>"));
15063
15064 if json {
15065 let row_value = |row: &(String, Vec<String>)| {
15066 serde_json::json!({
15067 "finding_id": row.0,
15068 "discord_kinds": row.1,
15069 })
15070 };
15071 let report = serde_json::json!({
15072 "frontier_id": frontier_id,
15073 "total_findings": total_findings,
15074 "frontier_support_size": support.len(),
15075 "filtered_row_count": rows.len(),
15076 "filter_kind": kind_filter,
15077 "histogram": histogram,
15078 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
15079 });
15080 println!(
15081 "{}",
15082 serde_json::to_string_pretty(&report).expect("serialize discord report")
15083 );
15084 return;
15085 }
15086
15087 println!("vela discord");
15088 println!(" frontier: {frontier_id}");
15089 println!(" total findings: {total_findings}");
15090 println!(
15091 " frontier support (any discord): {} of {}",
15092 support.len(),
15093 total_findings
15094 );
15095 if let Some(k) = kind_filter {
15096 println!(" filter: kind = {k}");
15097 }
15098 println!();
15099 if histogram.is_empty() {
15100 println!(" no discord detected.");
15101 } else {
15102 println!(" discord histogram:");
15103 for (k, n) in &histogram {
15104 println!(" {n:>4} {k}");
15105 }
15106 }
15107 if !rows.is_empty() {
15108 println!();
15109 println!(" findings with discord (showing up to 50):");
15110 for (fid, kinds) in rows.iter().take(50) {
15111 println!(" {fid} · {}", kinds.join(", "));
15112 }
15113 if rows.len() > 50 {
15114 println!(" ... and {} more", rows.len() - 50);
15115 }
15116 }
15117}
15118
15119fn empty_signal_report() -> signals::SignalReport {
15120 signals::SignalReport {
15121 schema: "vela.signals.v0".to_string(),
15122 frontier: "unavailable".to_string(),
15123 signals: Vec::new(),
15124 review_queue: Vec::new(),
15125 proof_readiness: signals::ProofReadiness {
15126 status: "unavailable".to_string(),
15127 blockers: 0,
15128 warnings: 0,
15129 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
15130 },
15131 }
15132}
15133
15134fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
15135 println!();
15136 println!(" {}", "SIGNALS".dimmed());
15137 println!(" {}", style::tick_row(60));
15138 println!(" total signals: {}", report.signals.len());
15139 println!(" proof readiness: {}", report.proof_readiness.status);
15140 if !report.review_queue.is_empty() {
15141 println!(" review queue: {} items", report.review_queue.len());
15142 }
15143 if strict && report.proof_readiness.status != "ready" {
15144 println!(
15145 " {} proof readiness has blocking signals.",
15146 style::lost("strict check failed")
15147 );
15148 }
15149}
15150
15151fn append_packet_json_file(
15152 packet_dir: &Path,
15153 relative_path: &str,
15154 value: &Value,
15155) -> Result<(), String> {
15156 let content = serde_json::to_vec_pretty(value)
15157 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
15158 let path = packet_dir.join(relative_path);
15159 if let Some(parent) = path.parent() {
15160 std::fs::create_dir_all(parent)
15161 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
15162 }
15163 std::fs::write(&path, &content)
15164 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
15165 let entry = json!({
15166 "path": relative_path,
15167 "sha256": hex::encode(Sha256::digest(&content)),
15168 "bytes": content.len(),
15169 });
15170
15171 for manifest_name in ["manifest.json", "packet.lock.json"] {
15172 let manifest_path = packet_dir.join(manifest_name);
15173 let data = std::fs::read_to_string(&manifest_path)
15174 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
15175 let mut manifest: Value = serde_json::from_str(&data)
15176 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
15177 let array_key = if manifest_name == "manifest.json" {
15178 "included_files"
15179 } else {
15180 "files"
15181 };
15182 let files = manifest
15183 .get_mut(array_key)
15184 .and_then(Value::as_array_mut)
15185 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
15186 files.retain(|file| {
15187 file.get("path")
15188 .and_then(Value::as_str)
15189 .is_none_or(|path| path != relative_path)
15190 });
15191 files.push(entry.clone());
15192 std::fs::write(
15193 &manifest_path,
15194 serde_json::to_vec_pretty(&manifest)
15195 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
15196 )
15197 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
15198 }
15199
15200 let lock_path = packet_dir.join("packet.lock.json");
15201 let lock_content = std::fs::read(&lock_path)
15202 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
15203 let lock_entry = json!({
15204 "path": "packet.lock.json",
15205 "sha256": hex::encode(Sha256::digest(&lock_content)),
15206 "bytes": lock_content.len(),
15207 });
15208 let manifest_path = packet_dir.join("manifest.json");
15209 let data = std::fs::read_to_string(&manifest_path)
15210 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
15211 let mut manifest: Value = serde_json::from_str(&data)
15212 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
15213 let files = manifest
15214 .get_mut("included_files")
15215 .and_then(Value::as_array_mut)
15216 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
15217 files.retain(|file| {
15218 file.get("path")
15219 .and_then(Value::as_str)
15220 .is_none_or(|path| path != "packet.lock.json")
15221 });
15222 files.push(lock_entry);
15223 std::fs::write(
15224 &manifest_path,
15225 serde_json::to_vec_pretty(&manifest)
15226 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
15227 )
15228 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
15229 Ok(())
15230}
15231
15232fn print_tool_check_report(report: &Value) {
15233 let summary = report.get("summary").unwrap_or(&Value::Null);
15234 let frontier = report.get("frontier").unwrap_or(&Value::Null);
15235 println!();
15236 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
15237 println!(" {}", style::tick_row(60));
15238 println!(
15239 "frontier: {}",
15240 frontier
15241 .get("name")
15242 .and_then(Value::as_str)
15243 .unwrap_or("unknown")
15244 );
15245 println!(
15246 "findings: {}",
15247 frontier
15248 .get("findings")
15249 .and_then(Value::as_u64)
15250 .unwrap_or_default()
15251 );
15252 println!(
15253 "checks: {} passed, {} failed",
15254 summary
15255 .get("passed")
15256 .and_then(Value::as_u64)
15257 .unwrap_or_default(),
15258 summary
15259 .get("failed")
15260 .and_then(Value::as_u64)
15261 .unwrap_or_default()
15262 );
15263 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
15264 let names = tools
15265 .iter()
15266 .filter_map(Value::as_str)
15267 .collect::<Vec<_>>()
15268 .join(", ");
15269 println!("tools: {names}");
15270 }
15271 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
15272 for check in checks {
15273 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
15274 style::ok("ok")
15275 } else {
15276 style::lost("lost")
15277 };
15278 println!(
15279 " {} {}",
15280 status,
15281 check
15282 .get("tool")
15283 .and_then(Value::as_str)
15284 .unwrap_or("unknown")
15285 );
15286 }
15287 }
15288}
15289
15290fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
15291 if json_output {
15292 println!(
15293 "{}",
15294 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
15295 );
15296 } else {
15297 println!("{}", report.message);
15298 println!(" frontier: {}", report.frontier);
15299 println!(" finding: {}", report.finding_id);
15300 println!(" proposal: {}", report.proposal_id);
15301 println!(" status: {}", report.proposal_status);
15302 if let Some(event_id) = &report.applied_event_id {
15303 println!(" event: {}", event_id);
15304 }
15305 println!(" wrote: {}", report.wrote_to);
15306 }
15307}
15308
15309fn print_history(payload: &Value) {
15310 let finding = payload.get("finding").unwrap_or(&Value::Null);
15311 println!("vela history");
15312 println!(
15313 " finding: {}",
15314 finding
15315 .get("id")
15316 .and_then(Value::as_str)
15317 .unwrap_or("unknown")
15318 );
15319 println!(
15320 " assertion: {}",
15321 finding
15322 .get("assertion")
15323 .and_then(Value::as_str)
15324 .unwrap_or("")
15325 );
15326 println!(
15327 " confidence: {:.3}",
15328 finding
15329 .get("confidence")
15330 .and_then(Value::as_f64)
15331 .unwrap_or_default()
15332 );
15333 let reviews = payload
15334 .get("review_events")
15335 .and_then(Value::as_array)
15336 .map_or(0, Vec::len);
15337 let updates = payload
15338 .get("confidence_updates")
15339 .and_then(Value::as_array)
15340 .map_or(0, Vec::len);
15341 let annotations = finding
15342 .get("annotations")
15343 .and_then(Value::as_array)
15344 .map_or(0, Vec::len);
15345 let sources = payload
15346 .get("sources")
15347 .and_then(Value::as_array)
15348 .map_or(0, Vec::len);
15349 let atoms = payload
15350 .get("evidence_atoms")
15351 .and_then(Value::as_array)
15352 .map_or(0, Vec::len);
15353 let conditions = payload
15354 .get("condition_records")
15355 .and_then(Value::as_array)
15356 .map_or(0, Vec::len);
15357 let proposals = payload
15358 .get("proposals")
15359 .and_then(Value::as_array)
15360 .map_or(0, Vec::len);
15361 let events = payload
15362 .get("events")
15363 .and_then(Value::as_array)
15364 .map_or(0, Vec::len);
15365 println!(" review events: {reviews}");
15366 println!(" confidence updates: {updates}");
15367 println!(" annotations: {annotations}");
15368 println!(" sources: {sources}");
15369 println!(" evidence atoms: {atoms}");
15370 println!(" condition records: {conditions}");
15371 println!(" proposals: {proposals}");
15372 println!(" canonical events: {events}");
15373 if let Some(status) = payload
15374 .get("proof_state")
15375 .and_then(|value| value.get("latest_packet"))
15376 .and_then(|value| value.get("status"))
15377 .and_then(Value::as_str)
15378 {
15379 println!(" proof state: {status}");
15380 }
15381 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
15382 for event in events.iter().take(8) {
15383 println!(
15384 " - {} {} {}",
15385 event
15386 .get("reviewed_at")
15387 .and_then(Value::as_str)
15388 .unwrap_or(""),
15389 event.get("id").and_then(Value::as_str).unwrap_or(""),
15390 event.get("reason").and_then(Value::as_str).unwrap_or("")
15391 );
15392 }
15393 }
15394}
15395
15396#[derive(Debug, Serialize)]
15397pub struct ProofTrace {
15398 pub trace_version: String,
15399 pub command: Vec<String>,
15400 pub source: String,
15401 pub source_hash: String,
15402 pub schema_version: String,
15403 pub checked_artifacts: Vec<String>,
15404 pub benchmark: Option<Value>,
15405 pub packet_manifest: String,
15406 pub packet_validation: String,
15407 pub caveats: Vec<String>,
15408 pub status: String,
15409 pub trace_path: String,
15410}
15411
15412const SCIENCE_SUBCOMMANDS: &[&str] = &[
15413 "compile-notes",
15414 "compile-code",
15415 "compile-data",
15416 "review-pending",
15417 "find-tensions",
15418 "plan-experiments",
15419 "scout",
15420 "check",
15421 "normalize",
15422 "integrity",
15423 "impact",
15424 "discord",
15425 "quickstart",
15426 "proof",
15427 "repo",
15428 "serve",
15429 "stats",
15430 "search",
15431 "tensions",
15432 "gaps",
15433 "bridge",
15434 "export",
15435 "packet",
15436 "bench",
15437 "conformance",
15438 "version",
15439 "sign",
15440 "actor",
15441 "frontier",
15442 "queue",
15443 "registry",
15444 "init",
15445 "import",
15446 "lock",
15447 "doc",
15448 "diff",
15449 "proposals",
15450 "finding",
15451 "link",
15452 "entity",
15453 "review",
15454 "note",
15455 "caveat",
15456 "revise",
15457 "reject",
15458 "history",
15459 "import-events",
15460 "retract",
15461 "propagate",
15462 "replicate",
15464 "replications",
15465 "dataset-add",
15468 "datasets",
15469 "code-add",
15470 "code-artifacts",
15471 "artifact-add",
15472 "artifact-to-state",
15473 "bridge-kit",
15474 "source-adapter",
15475 "runtime-adapter",
15476 "artifacts",
15477 "artifact-audit",
15478 "decision-brief",
15479 "trial-summary",
15480 "source-verification",
15481 "source-ingest-plan",
15482 "clinical-trial-import",
15483 "negative-result-add",
15485 "negative-results",
15486 "trajectory-create",
15488 "trajectory-step",
15489 "trajectories",
15490 "tier-set",
15492 "locator-repair",
15494 "span-repair",
15496 "entity-resolve",
15498 "entity-add",
15500 "proof-add",
15502 "source-fetch",
15504 "predict",
15507 "resolve",
15508 "predictions",
15509 "predictions-expire",
15510 "calibration",
15511 "consensus",
15514 "federation",
15516 "causal",
15518 "status",
15522 "log",
15523 "inbox",
15524 "ask",
15525 "bridges",
15527 "workbench",
15529 "verify",
15531 "ingest",
15535 "propose",
15536 "accept",
15537 "attest",
15538 "lineage",
15539 "carina",
15542 "atlas",
15545 "constellation",
15548];
15549
15550pub fn is_science_subcommand(name: &str) -> bool {
15551 SCIENCE_SUBCOMMANDS.contains(&name)
15552}
15553
15554fn print_strict_help() {
15555 println!(
15556 r#"Vela {}
15557Version control for scientific state.
15558
15559Usage:
15560 vela <COMMAND>
15561
15562Core flow (v0.74):
15563 init Initialize a split frontier repo
15564 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
15565 propose Create a finding.review proposal
15566 diff Preview a `vpr_*` proposal, or compare two frontier files
15567 accept Apply a proposal under reviewer authority
15568 attest Sign findings under your private key
15569 log Recent canonical state events
15570 lineage State-transition replay for one finding
15571 serve Local Workbench (findings, evidence, diff, lineage)
15572
15573Read-only inspection:
15574 check Validate a frontier, repo, or proof packet
15575 integrity Check accepted frontier state integrity
15576 impact Report downstream finding impact
15577 normalize Apply deterministic frontier-state repairs
15578 proof Export and validate a proof packet
15579 repo Inspect split frontier repository status and shape
15580 stats Show frontier statistics
15581 search Search findings
15582 tensions List candidate contradictions and tensions
15583 gaps Inspect and rank candidate gap review leads
15584 bridge Find candidate cross-domain connections
15585
15586Advanced (proposal-creation, agent inboxes, federation):
15587 scout Run Literature Scout against a folder of PDFs (writes proposals)
15588 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
15589 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
15590 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
15591 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
15592 find-tensions Run Contradiction Finder: surface real contradictions among findings
15593 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
15594 export Export frontier artifacts
15595 packet Inspect or validate proof packets
15596 bench Run deterministic benchmark gates
15597 conformance Run protocol conformance vectors
15598 sign Optional signing and signature verification
15599 runtime-adapter
15600 Normalize external runtime exports into reviewable proposals
15601 version Show version information
15602 import Import frontier.json into a .vela repo
15603 proposals Inspect, validate, export, import, accept, or reject write proposals
15604 artifact-to-state
15605 Import a Carina artifact packet as reviewable proposals
15606 bridge-kit
15607 Validate Carina artifact packets before importing runtime output
15608 source-adapter
15609 Run reviewed source adapters into artifact-to-state proposals
15610 finding Add or manage finding bundles as frontier state
15611 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
15612 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
15613 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
15614 actor Register Ed25519 publisher identities in a frontier
15615 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
15616 review Create a review proposal or review interactively
15617 note Add a lightweight note to a finding
15618 caveat Create an explicit caveat proposal
15619 revise Create a confidence revision proposal
15620 reject Create a rejection proposal
15621 history Show state-transition history for one finding (v0.74 alias: `lineage`)
15622 import-events Import review/state events from a packet or JSON file
15623 retract Create a retraction proposal
15624 propagate Simulate impact over declared dependency links
15625 artifact-add Register a content-addressed artifact
15626 artifacts List content-addressed artifacts
15627 artifact-audit Audit artifact locators, hashes, references, and profiles
15628 decision-brief Show the validated decision brief projection
15629 trial-summary Show the validated trial outcome projection
15630 source-verification Show the validated source verification projection
15631 source-ingest-plan Show the validated source ingest plan
15632 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
15633 locator-repair Mechanically repair an evidence atom's missing source locator
15634 span-repair Mechanically repair a finding's missing evidence span
15635 entity-resolve Resolve a finding entity to a canonical id
15636 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
15637 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
15638 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
15639
15640Quick start (the demo):
15641 vela init demo --name "Your bounded question"
15642 vela ingest paper.pdf --frontier demo
15643 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
15644 vela diff <vpr_id> --frontier demo
15645 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
15646 vela serve --path demo
15647
15648Substrate health:
15649 vela frontier materialize my-frontier --json
15650 vela repo status my-frontier --json
15651 vela proof verify my-frontier --json
15652 vela check my-frontier --strict --json
15653
15654Monolithic frontier file:
15655 vela frontier new frontier.json --name "Your bounded question"
15656 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
15657 vela check frontier.json --json
15658 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
15659 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
15660
15661Publish your own frontier (see docs/PUBLISHING.md):
15662 vela frontier new ./frontier.json --name "Your bounded question"
15663 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
15664 vela sign generate-keypair --out keys
15665 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
15666 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
15667 --to https://vela-hub.fly.dev
15668"#,
15669 env!("CARGO_PKG_VERSION")
15670 );
15671}
15672
15673pub type ScoutHandler = fn(
15682 folder: PathBuf,
15683 frontier: PathBuf,
15684 backend: Option<String>,
15685 dry_run: bool,
15686 json: bool,
15687) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15688
15689static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
15690
15691pub fn register_scout_handler(handler: ScoutHandler) {
15695 let _ = SCOUT_HANDLER.set(handler);
15696}
15697
15698pub type AtlasInitHandler = fn(
15702 atlases_root: PathBuf,
15703 name: String,
15704 domain: String,
15705 scope_note: Option<String>,
15706 frontiers: Vec<PathBuf>,
15707 json: bool,
15708) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15709
15710static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
15711
15712pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
15713 let _ = ATLAS_INIT_HANDLER.set(handler);
15714}
15715
15716pub type AtlasMaterializeHandler =
15718 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15719
15720static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
15721
15722pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
15723 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
15724}
15725
15726pub type AtlasServeHandler = fn(
15731 atlases_root: PathBuf,
15732 name: String,
15733 port: u16,
15734 open_browser: bool,
15735) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15736
15737static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
15738
15739pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
15740 let _ = ATLAS_SERVE_HANDLER.set(handler);
15741}
15742
15743pub type AtlasUpdateHandler = fn(
15748 atlases_root: PathBuf,
15749 name: String,
15750 add_frontier: Vec<PathBuf>,
15751 remove_vfr_id: Vec<String>,
15752 json: bool,
15753) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15754
15755static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
15756
15757pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
15758 let _ = ATLAS_UPDATE_HANDLER.set(handler);
15759}
15760
15761pub type ConstellationInitHandler = fn(
15765 constellations_root: PathBuf,
15766 name: String,
15767 scope_note: Option<String>,
15768 atlases: Vec<PathBuf>,
15769 json: bool,
15770) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15771
15772static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
15773
15774pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
15775 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
15776}
15777
15778pub type ConstellationMaterializeHandler = fn(
15779 constellations_root: PathBuf,
15780 name: String,
15781 json: bool,
15782) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15783
15784static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
15785 OnceLock::new();
15786
15787pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
15788 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
15789}
15790
15791pub type ConstellationServeHandler = fn(
15792 constellations_root: PathBuf,
15793 name: String,
15794 port: u16,
15795 open_browser: bool,
15796) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15797
15798static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
15799
15800pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
15801 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
15802}
15803
15804pub type NotesHandler = fn(
15808 vault: PathBuf,
15809 frontier: PathBuf,
15810 backend: Option<String>,
15811 max_files: Option<usize>,
15812 max_items_per_category: Option<usize>,
15813 dry_run: bool,
15814 json: bool,
15815) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15816
15817static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
15818
15819pub fn register_notes_handler(handler: NotesHandler) {
15821 let _ = NOTES_HANDLER.set(handler);
15822}
15823
15824pub type CodeHandler = fn(
15826 root: PathBuf,
15827 frontier: PathBuf,
15828 backend: Option<String>,
15829 max_files: Option<usize>,
15830 dry_run: bool,
15831 json: bool,
15832) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15833
15834static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
15835
15836pub fn register_code_handler(handler: CodeHandler) {
15838 let _ = CODE_HANDLER.set(handler);
15839}
15840
15841pub type DatasetsHandler = fn(
15843 root: PathBuf,
15844 frontier: PathBuf,
15845 backend: Option<String>,
15846 sample_rows: Option<usize>,
15847 dry_run: bool,
15848 json: bool,
15849) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15850
15851static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
15852
15853pub fn register_datasets_handler(handler: DatasetsHandler) {
15855 let _ = DATASETS_HANDLER.set(handler);
15856}
15857
15858pub type ReviewerHandler = fn(
15860 frontier: PathBuf,
15861 backend: Option<String>,
15862 max_proposals: Option<usize>,
15863 batch_size: usize,
15864 dry_run: bool,
15865 json: bool,
15866) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15867
15868static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
15869
15870pub fn register_reviewer_handler(handler: ReviewerHandler) {
15872 let _ = REVIEWER_HANDLER.set(handler);
15873}
15874
15875pub type TensionsHandler = fn(
15877 frontier: PathBuf,
15878 backend: Option<String>,
15879 max_findings: Option<usize>,
15880 dry_run: bool,
15881 json: bool,
15882) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15883
15884static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
15885
15886pub fn register_tensions_handler(handler: TensionsHandler) {
15888 let _ = TENSIONS_HANDLER.set(handler);
15889}
15890
15891pub type ExperimentsHandler = fn(
15893 frontier: PathBuf,
15894 backend: Option<String>,
15895 max_findings: Option<usize>,
15896 dry_run: bool,
15897 json: bool,
15898) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15899
15900static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
15901
15902pub fn register_experiments_handler(handler: ExperimentsHandler) {
15904 let _ = EXPERIMENTS_HANDLER.set(handler);
15905}
15906
15907fn find_vela_repo() -> Option<PathBuf> {
15923 let mut cur = std::env::current_dir().ok()?;
15924 loop {
15925 if cur.join(".vela").is_dir() {
15926 return Some(cur);
15927 }
15928 if !cur.pop() {
15929 return None;
15930 }
15931 }
15932}
15933
15934fn print_session_help() {
15935 println!();
15936 println!(
15937 " Vela {} · Version control for scientific state.",
15938 env!("CARGO_PKG_VERSION")
15939 );
15940 println!();
15941 println!(" USAGE");
15942 println!(" vela Open a session against the nearest .vela/ repo");
15943 println!(" vela <command> Run a specific subcommand");
15944 println!(" vela help advanced Full subcommand list (30+ commands)");
15945 println!();
15946 println!(" CORE FLOW (v0.74)");
15947 println!(" init Initialize a split frontier repo");
15948 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
15949 println!(" propose Create a finding.review proposal");
15950 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
15951 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
15952 println!(" attest Sign findings under your private key");
15953 println!(" log Recent canonical state events");
15954 println!(" lineage <vf_id> State-transition replay for one finding");
15955 println!(" serve Local Workbench (find, evidence, diff, lineage)");
15956 println!();
15957 println!(" DAILY ALSO-RANS");
15958 println!(" status One-screen frontier health");
15959 println!(" inbox Pending review proposals");
15960 println!(" review Review a proposal interactively");
15961 println!(" ask <question> Plain-text query against the frontier");
15962 println!();
15963 println!(" REASONING (Pearl 1 → 2 → 3)");
15964 println!(" causal audit Per-finding identifiability");
15965 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
15966 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
15967 println!();
15968 println!(" COMPOSITION");
15969 println!(" bridge <a> <b> Cross-frontier hypotheses");
15970 println!(" consensus <vf> Field consensus over similar claims");
15971 println!();
15972 println!(" PUBLISH");
15973 println!(" registry publish Push a signed manifest to the hub");
15974 println!(" federation peer-add Federate with another hub");
15975 println!();
15976 println!(" In session, type a single letter for a quick verb, or any");
15977 println!(" question in plain text. `q` or `exit` quits.");
15978 println!();
15979}
15980
15981fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
15982 use crate::causal_reasoning::{audit_frontier, summarize_audit};
15983
15984 let label = frontier_label(project);
15985 let vfr = project.frontier_id();
15986 let vfr_short = vfr.chars().take(16).collect::<String>();
15987
15988 let mut pending = 0usize;
15989 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
15990 for p in &project.proposals {
15991 if p.status == "pending_review" {
15992 pending += 1;
15993 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
15994 }
15995 }
15996
15997 let audit = audit_frontier(project);
15998 let audit_summary = summarize_audit(&audit);
15999
16000 let bridges_dir = repo_path.join(".vela/bridges");
16001 let mut bridge_total = 0usize;
16002 let mut bridge_confirmed = 0usize;
16003 let mut bridge_derived = 0usize;
16004 if bridges_dir.is_dir()
16005 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
16006 {
16007 for entry in entries.flatten() {
16008 let path = entry.path();
16009 if path.extension().and_then(|s| s.to_str()) != Some("json") {
16010 continue;
16011 }
16012 bridge_total += 1;
16013 if let Ok(data) = std::fs::read_to_string(&path)
16014 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
16015 {
16016 match b.status {
16017 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
16018 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
16019 _ => {}
16020 }
16021 }
16022 }
16023 }
16024
16025 let mut targets_with_success = std::collections::HashSet::new();
16026 let mut failed_replications = 0usize;
16027 for r in &project.replications {
16028 if r.outcome == "replicated" {
16029 targets_with_success.insert(r.target_finding.clone());
16030 } else if r.outcome == "failed" {
16031 failed_replications += 1;
16032 }
16033 }
16034
16035 println!();
16036 let version = crate::project::VELA_COMPILER_VERSION
16037 .strip_prefix("vela/")
16038 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
16039 println!(
16040 " {}",
16041 format!("VELA · {version} · {label}")
16042 .to_uppercase()
16043 .dimmed()
16044 );
16045 println!(" {}", style::tick_row(60));
16046 println!(
16047 " vfr_id {}… repo {}",
16048 vfr_short,
16049 repo_path.display()
16050 );
16051 println!(
16052 " findings {:>4} events {} proposals pending {}",
16053 project.findings.len(),
16054 project.events.len(),
16055 pending
16056 );
16057
16058 if pending > 0 {
16059 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
16060 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
16061 }
16062 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
16063 println!(
16064 " {} · {} underidentified · {} conditional",
16065 if audit_summary.underidentified > 0 {
16066 style::lost("audit")
16067 } else {
16068 style::warn("audit")
16069 },
16070 audit_summary.underidentified,
16071 audit_summary.conditional,
16072 );
16073 }
16074 if bridge_total > 0 {
16075 println!(
16076 " {} · {} total · {} confirmed · {} awaiting review",
16077 style::ok("bridges"),
16078 bridge_total,
16079 bridge_confirmed,
16080 bridge_derived
16081 );
16082 }
16083 if !project.replications.is_empty() {
16084 println!(
16085 " {} · {} records · {} findings replicated · {} failed",
16086 style::ok("replications"),
16087 project.replications.len(),
16088 targets_with_success.len(),
16089 failed_replications,
16090 );
16091 }
16092
16093 println!();
16094 println!(" type a verb or ask anything:");
16095 println!(" a audit problems i inbox (pending) b bridges");
16096 println!(" g causal graph l log (recent) c counterfactuals");
16097 println!(" s refresh status h help (more verbs) q quit");
16098 println!();
16099}
16100
16101fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
16103 match verb {
16104 "a" | "audit" => {
16105 let action = CausalAction::Audit {
16106 frontier: repo_path.to_path_buf(),
16107 problems_only: true,
16108 json: false,
16109 };
16110 cmd_causal(action);
16111 true
16112 }
16113 "i" | "inbox" => {
16114 let action = ProposalAction::List {
16115 frontier: repo_path.to_path_buf(),
16116 status: Some("pending_review".into()),
16117 json: false,
16118 };
16119 cmd_proposals(action);
16120 true
16121 }
16122 "b" | "bridges" => {
16123 let action = BridgesAction::List {
16124 frontier: repo_path.to_path_buf(),
16125 status: None,
16126 json: false,
16127 };
16128 cmd_bridges(action);
16129 true
16130 }
16131 "g" | "graph" => {
16132 let action = CausalAction::Graph {
16133 frontier: repo_path.to_path_buf(),
16134 node: None,
16135 json: false,
16136 };
16137 cmd_causal(action);
16138 true
16139 }
16140 "l" | "log" => {
16141 cmd_log(repo_path, 10, None, false);
16142 true
16143 }
16144 "c" | "counterfactual" | "counterfactuals" => {
16145 let project = match repo::load_from_path(repo_path) {
16148 Ok(p) => p,
16149 Err(e) => {
16150 eprintln!("{} {e}", style::err_prefix());
16151 return true;
16152 }
16153 };
16154 println!();
16155 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
16156 println!(" {}", style::tick_row(60));
16157 let mut pairs = 0usize;
16161 for child in &project.findings {
16162 for link in &child.links {
16163 if !matches!(link.link_type.as_str(), "depends" | "supports") {
16164 continue;
16165 }
16166 if link.mechanism.is_none() {
16167 continue;
16168 }
16169 let parent = link
16170 .target
16171 .split_once(':')
16172 .map_or(link.target.as_str(), |(_, r)| r);
16173 pairs += 1;
16174 if pairs <= 10 {
16175 println!(" · do({parent}) → {}", child.id);
16176 }
16177 }
16178 }
16179 if pairs == 0 {
16180 println!(" no mechanism-annotated edges found.");
16181 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
16182 } else {
16183 println!();
16184 println!(" {pairs} live pair(s). Run with:");
16185 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
16186 }
16187 println!();
16188 true
16189 }
16190 "s" | "status" | "refresh" => {
16191 match repo::load_from_path(repo_path) {
16193 Ok(p) => print_session_dashboard(&p, repo_path),
16194 Err(e) => eprintln!("{} {e}", style::err_prefix()),
16195 }
16196 true
16197 }
16198 "h" | "help" | "?" => {
16199 print_session_help();
16200 true
16201 }
16202 _ => false,
16203 }
16204}
16205
16206fn run_session() {
16207 let repo_path = match find_vela_repo() {
16208 Some(p) => p,
16209 None => {
16210 println!();
16211 println!(
16212 " {}",
16213 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
16214 );
16215 println!(" {}", style::tick_row(60));
16216 println!(" Run `vela init` here to create a frontier, or cd into one.");
16217 println!(" Or run `vela help` for the command list.");
16218 println!();
16219 return;
16220 }
16221 };
16222
16223 let project = match repo::load_from_path(&repo_path) {
16224 Ok(p) => p,
16225 Err(e) => {
16226 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
16227 std::process::exit(1);
16228 }
16229 };
16230
16231 print_session_dashboard(&project, &repo_path);
16232
16233 use std::io::{BufRead, Write};
16234 let stdin = std::io::stdin();
16235 let mut stdout = std::io::stdout();
16236 loop {
16237 print!(" > ");
16238 stdout.flush().ok();
16239 let mut line = String::new();
16240 if stdin.lock().read_line(&mut line).is_err() {
16241 break;
16242 }
16243 let input = line.trim();
16244 if input.is_empty() {
16245 continue;
16246 }
16247 if matches!(input, "q" | "quit" | "exit") {
16248 break;
16249 }
16250 if run_session_verb(input, &repo_path) {
16251 continue;
16252 }
16253 let project = match repo::load_from_path(&repo_path) {
16255 Ok(p) => p,
16256 Err(e) => {
16257 eprintln!("{} {e}", style::err_prefix());
16258 continue;
16259 }
16260 };
16261 answer(&project, input, false);
16262 }
16263}
16264
16265pub fn run_from_args() {
16266 style::init();
16267 let args = std::env::args().collect::<Vec<_>>();
16268 match args.get(1).map(String::as_str) {
16269 None => {
16273 run_session();
16274 return;
16275 }
16276 Some("-h" | "--help" | "help") => {
16277 if args.get(2).map(String::as_str) == Some("advanced") {
16280 print_strict_help();
16281 } else {
16282 print_session_help();
16283 }
16284 return;
16285 }
16286 Some("-V" | "--version" | "version") => {
16287 println!("vela {}", env!("CARGO_PKG_VERSION"));
16288 return;
16289 }
16290 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
16291 let json = args.iter().any(|arg| arg == "--json");
16292 let frontier = args
16293 .iter()
16294 .skip(3)
16295 .find(|arg| !arg.starts_with('-'))
16296 .map(PathBuf::from)
16297 .unwrap_or_else(|| {
16298 eprintln!(
16299 "{} proof verify requires a frontier repo",
16300 style::err_prefix()
16301 );
16302 std::process::exit(2);
16303 });
16304 cmd_proof_verify(&frontier, json);
16305 return;
16306 }
16307 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
16308 let frontier = args
16309 .iter()
16310 .skip(3)
16311 .find(|arg| !arg.starts_with('-'))
16312 .map(PathBuf::from)
16313 .unwrap_or_else(|| {
16314 eprintln!(
16315 "{} proof explain requires a frontier repo",
16316 style::err_prefix()
16317 );
16318 std::process::exit(2);
16319 });
16320 cmd_proof_explain(&frontier);
16321 return;
16322 }
16323 Some(cmd) if !is_science_subcommand(cmd) => {
16324 eprintln!(
16325 "{} unknown or non-release command: {cmd}",
16326 style::err_prefix()
16327 );
16328 eprintln!("run `vela --help` for the strict v0 command surface.");
16329 std::process::exit(2);
16330 }
16331 Some(_) => {}
16332 }
16333 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
16334 runtime.block_on(run_command());
16335}
16336
16337fn fail(message: &str) -> ! {
16338 eprintln!("{} {message}", style::err_prefix());
16339 std::process::exit(1);
16340}
16341
16342fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
16347 if !valid.contains(&value) {
16348 fail(&format!(
16349 "invalid {flag} '{value}'. Valid: {}",
16350 valid.join(", ")
16351 ));
16352 }
16353}
16354
16355fn fail_return<T>(message: &str) -> T {
16356 fail(message)
16357}