1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Quickstart {
601 #[arg(default_value = "demo")]
603 path: PathBuf,
604 #[arg(long, default_value = "Quickstart frontier")]
606 name: String,
607 #[arg(long, default_value = "reviewer:you")]
611 reviewer: String,
612 #[arg(long)]
615 assertion: Option<String>,
616 #[arg(long)]
619 keys_out: Option<PathBuf>,
620 #[arg(long)]
622 json: bool,
623 },
624 Lock {
631 path: PathBuf,
633 #[arg(long)]
636 check: bool,
637 #[arg(long)]
639 json: bool,
640 },
641 Doc {
648 path: PathBuf,
650 #[arg(long)]
652 out: Option<PathBuf>,
653 #[arg(long)]
656 json: bool,
657 },
658 Import {
660 frontier: PathBuf,
661 #[arg(long)]
662 into: Option<PathBuf>,
663 },
664 Diff {
674 target: String,
677 frontier_b: Option<PathBuf>,
680 #[arg(long)]
684 frontier: Option<PathBuf>,
685 #[arg(long, default_value = "reviewer:preview")]
687 reviewer: String,
688 #[arg(long)]
689 json: bool,
690 #[arg(long)]
691 quiet: bool,
692 },
693 Proposals {
695 #[command(subcommand)]
696 action: ProposalAction,
697 },
698 ArtifactToState {
700 frontier: PathBuf,
702 packet: PathBuf,
704 #[arg(long)]
706 actor: String,
707 #[arg(long)]
709 apply_artifacts: bool,
710 #[arg(long)]
711 json: bool,
712 },
713 BridgeKit {
715 #[command(subcommand)]
716 action: BridgeKitAction,
717 },
718 SourceAdapter {
720 #[command(subcommand)]
721 action: SourceAdapterAction,
722 },
723 RuntimeAdapter {
725 #[command(subcommand)]
726 action: RuntimeAdapterAction,
727 },
728 Finding {
730 #[command(subcommand)]
731 command: FindingCommands,
732 },
733 Link {
737 #[command(subcommand)]
738 action: LinkAction,
739 },
740 Workbench {
745 #[arg(default_value = ".")]
747 path: PathBuf,
748 #[arg(long, default_value_t = 3850)]
750 port: u16,
751 #[arg(long)]
753 no_open: bool,
754 },
755 Bridges {
761 #[command(subcommand)]
762 action: BridgesAction,
763 },
764 Entity {
769 #[command(subcommand)]
770 action: EntityAction,
771 },
772 Review {
774 frontier: PathBuf,
776 finding_id: String,
778 #[arg(long)]
780 status: Option<String>,
781 #[arg(long)]
783 reason: Option<String>,
784 #[arg(long)]
786 reviewer: String,
787 #[arg(long)]
789 apply: bool,
790 #[arg(long)]
792 json: bool,
793 },
794 Note {
796 frontier: PathBuf,
797 finding_id: String,
798 #[arg(long)]
799 text: String,
800 #[arg(long)]
801 author: String,
802 #[arg(long)]
804 apply: bool,
805 #[arg(long)]
806 json: bool,
807 },
808 Caveat {
810 frontier: PathBuf,
811 finding_id: String,
812 #[arg(long)]
813 text: String,
814 #[arg(long)]
815 author: String,
816 #[arg(long)]
817 apply: bool,
818 #[arg(long)]
819 json: bool,
820 },
821 Revise {
823 frontier: PathBuf,
824 finding_id: String,
825 #[arg(long)]
827 confidence: f64,
828 #[arg(long)]
830 reason: String,
831 #[arg(long)]
833 reviewer: String,
834 #[arg(long)]
835 apply: bool,
836 #[arg(long)]
837 json: bool,
838 },
839 Reject {
841 frontier: PathBuf,
842 finding_id: String,
843 #[arg(long)]
844 reason: String,
845 #[arg(long)]
846 reviewer: String,
847 #[arg(long)]
848 apply: bool,
849 #[arg(long)]
850 json: bool,
851 },
852 History {
854 frontier: PathBuf,
855 finding_id: String,
856 #[arg(long)]
857 json: bool,
858 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
862 as_of: Option<String>,
863 },
864 ImportEvents {
866 source: PathBuf,
867 #[arg(long)]
868 into: PathBuf,
869 #[arg(long)]
870 json: bool,
871 },
872 Retract {
874 source: PathBuf,
875 finding_id: String,
876 #[arg(long)]
877 reason: String,
878 #[arg(long)]
879 reviewer: String,
880 #[arg(long)]
881 apply: bool,
882 #[arg(long)]
883 json: bool,
884 },
885 ProofAdd {
897 frontier: PathBuf,
898 #[arg(long = "target-finding")]
900 target_finding: String,
901 #[arg(long, default_value = "lean4")]
904 tool: String,
905 #[arg(long = "tool-version", default_value = "4.29.1")]
907 tool_version: String,
908 #[arg(long = "script-path")]
910 script_path: PathBuf,
911 #[arg(long, default_value = "Proof script")]
913 name: String,
914 #[arg(long)]
916 reviewer: String,
917 #[arg(long)]
919 reason: String,
920 #[arg(long)]
921 json: bool,
922 },
923 EntityAdd {
928 frontier: PathBuf,
929 finding_id: String,
930 #[arg(long)]
931 entity: String,
932 #[arg(long)]
936 entity_type: String,
937 #[arg(long)]
938 reviewer: String,
939 #[arg(long)]
940 reason: String,
941 #[arg(long)]
942 apply: bool,
943 #[arg(long)]
944 json: bool,
945 },
946 EntityResolve {
950 frontier: PathBuf,
951 finding_id: String,
952 #[arg(long)]
953 entity: String,
954 #[arg(long)]
955 source: String,
956 #[arg(long)]
957 id: String,
958 #[arg(long)]
959 confidence: f64,
960 #[arg(long)]
961 matched_name: Option<String>,
962 #[arg(long, default_value = "manual")]
963 resolution_method: String,
964 #[arg(long)]
965 reviewer: String,
966 #[arg(long)]
967 reason: String,
968 #[arg(long)]
969 apply: bool,
970 #[arg(long)]
971 json: bool,
972 },
973 SourceFetch {
981 identifier: String,
984 #[arg(long)]
988 cache: Option<PathBuf>,
989 #[arg(long)]
991 out: Option<PathBuf>,
992 #[arg(long)]
994 refresh: bool,
995 #[arg(long)]
996 json: bool,
997 },
998 SpanRepair {
1001 frontier: PathBuf,
1002 finding_id: String,
1003 #[arg(long)]
1004 section: String,
1005 #[arg(long)]
1006 text: String,
1007 #[arg(long)]
1008 reviewer: String,
1009 #[arg(long)]
1010 reason: String,
1011 #[arg(long)]
1012 apply: bool,
1013 #[arg(long)]
1014 json: bool,
1015 },
1016 LocatorRepair {
1021 frontier: PathBuf,
1022 atom_id: String,
1023 #[arg(long)]
1026 locator: Option<String>,
1027 #[arg(long)]
1030 reviewer: String,
1031 #[arg(long)]
1033 reason: String,
1034 #[arg(long)]
1036 apply: bool,
1037 #[arg(long)]
1038 json: bool,
1039 },
1040 Propagate {
1042 frontier: PathBuf,
1043 #[arg(long)]
1044 retract: Option<String>,
1045 #[arg(long)]
1046 reduce_confidence: Option<String>,
1047 #[arg(long)]
1048 to: Option<f64>,
1049 #[arg(short, long)]
1050 output: Option<PathBuf>,
1051 },
1052 Replicate {
1061 frontier: PathBuf,
1063 target: String,
1065 #[arg(long)]
1067 outcome: String,
1068 #[arg(long)]
1070 by: String,
1071 #[arg(long)]
1075 conditions: String,
1076 #[arg(long)]
1078 source_title: String,
1079 #[arg(long)]
1081 doi: Option<String>,
1082 #[arg(long)]
1084 pmid: Option<String>,
1085 #[arg(long)]
1087 sample_size: Option<String>,
1088 #[arg(long, default_value = "")]
1091 note: String,
1092 #[arg(long)]
1094 previous_attempt: Option<String>,
1095 #[arg(long, default_value_t = false)]
1102 no_cascade: bool,
1103 #[arg(long)]
1105 json: bool,
1106 },
1107 Replications {
1110 frontier: PathBuf,
1112 #[arg(long)]
1114 target: Option<String>,
1115 #[arg(long)]
1117 json: bool,
1118 },
1119 DatasetAdd {
1126 frontier: PathBuf,
1128 #[arg(long)]
1130 name: String,
1131 #[arg(long)]
1133 version: Option<String>,
1134 #[arg(long)]
1138 content_hash: String,
1139 #[arg(long)]
1141 url: Option<String>,
1142 #[arg(long)]
1144 license: Option<String>,
1145 #[arg(long)]
1147 source_title: String,
1148 #[arg(long)]
1150 doi: Option<String>,
1151 #[arg(long)]
1153 row_count: Option<u64>,
1154 #[arg(long)]
1156 json: bool,
1157 },
1158 Datasets {
1160 frontier: PathBuf,
1161 #[arg(long)]
1162 json: bool,
1163 },
1164 CodeAdd {
1168 frontier: PathBuf,
1170 #[arg(long)]
1172 language: String,
1173 #[arg(long)]
1175 repo_url: Option<String>,
1176 #[arg(long)]
1179 commit: Option<String>,
1180 #[arg(long)]
1182 path: String,
1183 #[arg(long)]
1185 content_hash: String,
1186 #[arg(long)]
1188 line_start: Option<u32>,
1189 #[arg(long)]
1191 line_end: Option<u32>,
1192 #[arg(long)]
1194 entry_point: Option<String>,
1195 #[arg(long)]
1197 json: bool,
1198 },
1199 CodeArtifacts {
1201 frontier: PathBuf,
1202 #[arg(long)]
1203 json: bool,
1204 },
1205 ArtifactAdd {
1210 frontier: PathBuf,
1212 #[arg(long)]
1215 kind: String,
1216 #[arg(long)]
1218 name: String,
1219 #[arg(long)]
1222 file: Option<PathBuf>,
1223 #[arg(long)]
1225 url: Option<String>,
1226 #[arg(long)]
1228 content_hash: Option<String>,
1229 #[arg(long)]
1231 media_type: Option<String>,
1232 #[arg(long)]
1234 license: Option<String>,
1235 #[arg(long)]
1237 source_title: Option<String>,
1238 #[arg(long)]
1240 source_url: Option<String>,
1241 #[arg(long)]
1243 doi: Option<String>,
1244 #[arg(long)]
1246 target: Vec<String>,
1247 #[arg(long)]
1249 metadata: Vec<String>,
1250 #[arg(long, default_value = "public")]
1252 access_tier: String,
1253 #[arg(long, default_value = "reviewer:manual")]
1255 deposited_by: String,
1256 #[arg(long, default_value = "artifact deposit")]
1258 reason: String,
1259 #[arg(long)]
1261 json: bool,
1262 },
1263 Artifacts {
1265 frontier: PathBuf,
1266 #[arg(long)]
1268 target: Option<String>,
1269 #[arg(long)]
1270 json: bool,
1271 },
1272 ArtifactAudit {
1274 frontier: PathBuf,
1275 #[arg(long)]
1277 json: bool,
1278 },
1279 DecisionBrief {
1281 frontier: PathBuf,
1282 #[arg(long)]
1284 json: bool,
1285 },
1286 TrialSummary {
1288 frontier: PathBuf,
1289 #[arg(long)]
1291 json: bool,
1292 },
1293 SourceVerification {
1295 frontier: PathBuf,
1296 #[arg(long)]
1298 json: bool,
1299 },
1300 SourceIngestPlan {
1302 frontier: PathBuf,
1303 #[arg(long)]
1305 json: bool,
1306 },
1307 ClinicalTrialImport {
1310 frontier: PathBuf,
1312 nct_id: String,
1314 #[arg(long)]
1317 input_json: Option<PathBuf>,
1318 #[arg(long)]
1320 target: Vec<String>,
1321 #[arg(long, default_value = "reviewer:manual")]
1323 deposited_by: String,
1324 #[arg(long, default_value = "clinical trial record import")]
1326 reason: String,
1327 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1329 license: String,
1330 #[arg(long)]
1332 json: bool,
1333 },
1334 NegativeResultAdd {
1342 frontier: PathBuf,
1344 #[arg(long)]
1346 kind: String,
1347 #[arg(long)]
1349 deposited_by: String,
1350 #[arg(long)]
1352 reason: String,
1353 #[arg(long)]
1356 conditions_text: String,
1357 #[arg(long, default_value = "")]
1359 notes: String,
1360 #[arg(long)]
1363 target: Vec<String>,
1364 #[arg(long)]
1368 endpoint: Option<String>,
1369 #[arg(long)]
1371 intervention: Option<String>,
1372 #[arg(long)]
1374 comparator: Option<String>,
1375 #[arg(long)]
1377 population: Option<String>,
1378 #[arg(long)]
1380 n_enrolled: Option<u32>,
1381 #[arg(long)]
1383 power: Option<f64>,
1384 #[arg(long)]
1386 ci_lower: Option<f64>,
1387 #[arg(long)]
1389 ci_upper: Option<f64>,
1390 #[arg(long)]
1392 effect_size_threshold: Option<f64>,
1393 #[arg(long)]
1395 registry_id: Option<String>,
1396 #[arg(long)]
1399 reagent: Option<String>,
1400 #[arg(long)]
1402 observation: Option<String>,
1403 #[arg(long)]
1405 attempts: Option<u32>,
1406 #[arg(long)]
1409 source_title: String,
1410 #[arg(long)]
1412 doi: Option<String>,
1413 #[arg(long)]
1415 url: Option<String>,
1416 #[arg(long)]
1418 year: Option<i32>,
1419 #[arg(long)]
1421 json: bool,
1422 },
1423 NegativeResults {
1425 frontier: PathBuf,
1426 #[arg(long)]
1428 target: Option<String>,
1429 #[arg(long)]
1430 json: bool,
1431 },
1432 TrajectoryCreate {
1437 frontier: PathBuf,
1439 #[arg(long)]
1441 deposited_by: String,
1442 #[arg(long)]
1444 reason: String,
1445 #[arg(long)]
1450 target: Vec<String>,
1451 #[arg(long, default_value = "")]
1453 notes: String,
1454 #[arg(long)]
1455 json: bool,
1456 },
1457 TrajectoryStep {
1460 frontier: PathBuf,
1462 trajectory_id: String,
1464 #[arg(long)]
1466 kind: String,
1467 #[arg(long)]
1471 description: String,
1472 #[arg(long)]
1474 actor: String,
1475 #[arg(long)]
1477 reason: String,
1478 #[arg(long)]
1481 reference: Vec<String>,
1482 #[arg(long)]
1483 json: bool,
1484 },
1485 Trajectories {
1487 frontier: PathBuf,
1488 #[arg(long)]
1490 target: Option<String>,
1491 #[arg(long)]
1492 json: bool,
1493 },
1494 TierSet {
1500 frontier: PathBuf,
1502 #[arg(long)]
1504 object_type: String,
1505 #[arg(long)]
1507 object_id: String,
1508 #[arg(long)]
1510 tier: String,
1511 #[arg(long)]
1514 actor: String,
1515 #[arg(long)]
1518 reason: String,
1519 #[arg(long)]
1520 json: bool,
1521 },
1522 Predict {
1529 frontier: PathBuf,
1531 #[arg(long)]
1533 by: String,
1534 #[arg(long)]
1537 claim: String,
1538 #[arg(long)]
1540 criterion: String,
1541 #[arg(long)]
1543 resolves_by: Option<String>,
1544 #[arg(long)]
1546 confidence: f64,
1547 #[arg(long, default_value = "")]
1549 target: String,
1550 #[arg(long, default_value = "affirmed")]
1552 outcome: String,
1553 #[arg(long, default_value = "")]
1555 conditions: String,
1556 #[arg(long)]
1558 json: bool,
1559 },
1560 Resolve {
1565 frontier: PathBuf,
1567 prediction: String,
1569 #[arg(long)]
1571 outcome: String,
1572 #[arg(long)]
1574 matched: bool,
1575 #[arg(long)]
1578 by: String,
1579 #[arg(long, default_value = "1.0")]
1581 confidence: f64,
1582 #[arg(long, default_value = "")]
1584 source_title: String,
1585 #[arg(long)]
1587 doi: Option<String>,
1588 #[arg(long)]
1590 json: bool,
1591 },
1592 Predictions {
1594 frontier: PathBuf,
1595 #[arg(long)]
1597 by: Option<String>,
1598 #[arg(long)]
1600 open: bool,
1601 #[arg(long)]
1603 json: bool,
1604 },
1605 Calibration {
1608 frontier: PathBuf,
1609 #[arg(long)]
1611 actor: Option<String>,
1612 #[arg(long)]
1614 json: bool,
1615 },
1616 PredictionsExpire {
1624 frontier: PathBuf,
1625 #[arg(long)]
1628 now: Option<String>,
1629 #[arg(long)]
1632 dry_run: bool,
1633 #[arg(long)]
1634 json: bool,
1635 },
1636 Consensus {
1645 frontier: PathBuf,
1647 target: String,
1649 #[arg(long, default_value = "composite")]
1652 weighting: String,
1653 #[arg(long)]
1658 causal_claim: Option<String>,
1659 #[arg(long)]
1664 causal_grade_min: Option<String>,
1665 #[arg(long)]
1667 json: bool,
1668 },
1669
1670 Ingest {
1686 path: String,
1689 #[arg(long)]
1692 frontier: PathBuf,
1693 #[arg(short, long)]
1697 backend: Option<String>,
1698 #[arg(long)]
1702 actor: Option<String>,
1703 #[arg(long)]
1705 dry_run: bool,
1706 #[arg(long)]
1707 json: bool,
1708 },
1709
1710 Propose {
1716 frontier: PathBuf,
1717 finding_id: String,
1718 #[arg(long)]
1720 status: String,
1721 #[arg(long)]
1722 reason: String,
1723 #[arg(long)]
1724 reviewer: String,
1725 #[arg(long)]
1728 apply: bool,
1729 #[arg(long)]
1730 json: bool,
1731 },
1732
1733 Accept {
1737 frontier: PathBuf,
1738 proposal_id: String,
1739 #[arg(long)]
1740 reviewer: String,
1741 #[arg(long)]
1742 reason: String,
1743 #[arg(long)]
1744 json: bool,
1745 },
1746
1747 Attest {
1759 frontier: PathBuf,
1761 #[arg(long)]
1765 event: Option<String>,
1766 #[arg(long)]
1769 attester: Option<String>,
1770 #[arg(long)]
1773 scope_note: Option<String>,
1774 #[arg(long)]
1777 proof_id: Option<String>,
1778 #[arg(long)]
1783 signature: Option<String>,
1784 #[arg(long)]
1787 key: Option<PathBuf>,
1788 #[arg(long)]
1789 json: bool,
1790 },
1791
1792 Lineage {
1795 frontier: PathBuf,
1796 finding_id: String,
1797 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1798 as_of: Option<String>,
1799 #[arg(long)]
1800 json: bool,
1801 },
1802
1803 Carina {
1806 #[command(subcommand)]
1807 action: CarinaAction,
1808 },
1809
1810 Atlas {
1815 #[command(subcommand)]
1816 action: AtlasAction,
1817 },
1818
1819 Constellation {
1825 #[command(subcommand)]
1826 action: ConstellationAction,
1827 },
1828}
1829
1830#[derive(Subcommand)]
1835enum AtlasAction {
1836 Init {
1841 name: String,
1844 #[arg(long, value_delimiter = ',', num_args = 1..)]
1846 frontiers: Vec<PathBuf>,
1847 #[arg(long, default_value = "general")]
1850 domain: String,
1851 #[arg(long)]
1853 scope_note: Option<String>,
1854 #[arg(long, default_value = "atlases")]
1856 atlases_root: PathBuf,
1857 #[arg(long)]
1858 json: bool,
1859 },
1860 Materialize {
1864 name: String,
1866 #[arg(long, default_value = "atlases")]
1867 atlases_root: PathBuf,
1868 #[arg(long)]
1869 json: bool,
1870 },
1871 Serve {
1876 name: String,
1877 #[arg(long, default_value = "atlases")]
1878 atlases_root: PathBuf,
1879 #[arg(long, default_value_t = 3848)]
1880 port: u16,
1881 #[arg(long)]
1882 no_open: bool,
1883 },
1884 Update {
1891 name: String,
1892 #[arg(long, value_delimiter = ',')]
1895 add_frontier: Vec<PathBuf>,
1896 #[arg(long, value_delimiter = ',')]
1899 remove_vfr_id: Vec<String>,
1900 #[arg(long, default_value = "atlases")]
1901 atlases_root: PathBuf,
1902 #[arg(long)]
1903 json: bool,
1904 },
1905}
1906
1907#[derive(Subcommand)]
1911enum ConstellationAction {
1912 Init {
1916 name: String,
1917 #[arg(long, value_delimiter = ',', num_args = 1..)]
1919 atlases: Vec<PathBuf>,
1920 #[arg(long)]
1921 scope_note: Option<String>,
1922 #[arg(long, default_value = "constellations")]
1923 constellations_root: PathBuf,
1924 #[arg(long)]
1925 json: bool,
1926 },
1927 Materialize {
1932 name: String,
1933 #[arg(long, default_value = "constellations")]
1934 constellations_root: PathBuf,
1935 #[arg(long)]
1936 json: bool,
1937 },
1938 Serve {
1942 name: String,
1943 #[arg(long, default_value = "constellations")]
1944 constellations_root: PathBuf,
1945 #[arg(long, default_value_t = 3849)]
1946 port: u16,
1947 #[arg(long)]
1948 no_open: bool,
1949 },
1950}
1951
1952#[derive(Subcommand)]
1956enum CarinaAction {
1957 Validate {
1962 path: PathBuf,
1966 #[arg(long)]
1969 primitive: Option<String>,
1970 #[arg(long)]
1971 json: bool,
1972 },
1973 List {
1975 #[arg(long)]
1976 json: bool,
1977 },
1978 Schema { primitive: String },
1980}
1981
1982#[derive(Subcommand)]
1983enum PacketAction {
1984 Inspect {
1986 path: PathBuf,
1987 #[arg(long)]
1988 json: bool,
1989 },
1990 Validate {
1992 path: PathBuf,
1993 #[arg(long)]
1994 json: bool,
1995 },
1996}
1997
1998#[derive(Subcommand)]
1999enum SignAction {
2000 GenerateKeypair {
2002 #[arg(long, default_value = ".vela/keys")]
2003 out: PathBuf,
2004 #[arg(long)]
2005 json: bool,
2006 },
2007 Apply {
2009 frontier: PathBuf,
2010 #[arg(long)]
2011 private_key: PathBuf,
2012 #[arg(long)]
2013 json: bool,
2014 },
2015 Verify {
2017 frontier: PathBuf,
2018 #[arg(long)]
2019 public_key: Option<PathBuf>,
2020 #[arg(long)]
2021 json: bool,
2022 },
2023 ThresholdSet {
2028 frontier: PathBuf,
2029 finding_id: String,
2031 #[arg(long)]
2033 to: u32,
2034 #[arg(long)]
2035 json: bool,
2036 },
2037}
2038
2039#[derive(Subcommand)]
2040enum ActorAction {
2041 Add {
2043 frontier: PathBuf,
2044 id: String,
2046 #[arg(long)]
2048 pubkey: String,
2049 #[arg(long)]
2053 tier: Option<String>,
2054 #[arg(long)]
2058 orcid: Option<String>,
2059 #[arg(long)]
2064 clearance: Option<String>,
2065 #[arg(long)]
2066 json: bool,
2067 },
2068 List {
2070 frontier: PathBuf,
2071 #[arg(long)]
2072 json: bool,
2073 },
2074}
2075
2076#[derive(Subcommand)]
2077enum CausalAction {
2078 Audit {
2082 frontier: PathBuf,
2083 #[arg(long)]
2086 problems_only: bool,
2087 #[arg(long)]
2088 json: bool,
2089 },
2090 Effect {
2103 frontier: PathBuf,
2104 source: String,
2106 #[arg(long)]
2108 on: String,
2109 #[arg(long)]
2110 json: bool,
2111 },
2112 Graph {
2115 frontier: PathBuf,
2116 #[arg(long)]
2118 node: Option<String>,
2119 #[arg(long)]
2120 json: bool,
2121 },
2122 Counterfactual {
2129 frontier: PathBuf,
2130 intervene_on: String,
2132 #[arg(long)]
2134 set_to: f64,
2135 #[arg(long)]
2137 target: String,
2138 #[arg(long)]
2139 json: bool,
2140 },
2141}
2142
2143#[derive(Subcommand)]
2144enum BridgesAction {
2145 Derive {
2149 frontier_a: PathBuf,
2152 #[arg(long, default_value = "a")]
2154 label_a: String,
2155 frontier_b: PathBuf,
2157 #[arg(long, default_value = "b")]
2159 label_b: String,
2160 #[arg(long)]
2161 json: bool,
2162 },
2163 List {
2165 frontier: PathBuf,
2167 #[arg(long)]
2169 status: Option<String>,
2170 #[arg(long)]
2171 json: bool,
2172 },
2173 Show {
2175 frontier: PathBuf,
2176 bridge_id: String,
2177 #[arg(long)]
2178 json: bool,
2179 },
2180 Confirm {
2185 frontier: PathBuf,
2186 bridge_id: String,
2187 #[arg(long)]
2190 reviewer: Option<String>,
2191 #[arg(long)]
2193 note: Option<String>,
2194 #[arg(long)]
2195 json: bool,
2196 },
2197 Refute {
2200 frontier: PathBuf,
2201 bridge_id: String,
2202 #[arg(long)]
2203 reviewer: Option<String>,
2204 #[arg(long)]
2205 note: Option<String>,
2206 #[arg(long)]
2207 json: bool,
2208 },
2209}
2210
2211#[derive(Subcommand)]
2212enum FederationAction {
2213 PeerAdd {
2217 frontier: PathBuf,
2218 id: String,
2220 #[arg(long)]
2222 url: String,
2223 #[arg(long)]
2225 pubkey: String,
2226 #[arg(long, default_value = "")]
2228 note: String,
2229 #[arg(long)]
2230 json: bool,
2231 },
2232 PeerList {
2234 frontier: PathBuf,
2235 #[arg(long)]
2236 json: bool,
2237 },
2238 PeerRemove {
2242 frontier: PathBuf,
2243 id: String,
2244 #[arg(long)]
2245 json: bool,
2246 },
2247 Sync {
2264 frontier: PathBuf,
2265 peer_id: String,
2267 #[arg(long)]
2269 url: Option<String>,
2270 #[arg(long)]
2274 via_hub: bool,
2275 #[arg(long)]
2278 vfr_id: Option<String>,
2279 #[arg(long)]
2286 allow_cross_vfr: bool,
2287 #[arg(long)]
2289 dry_run: bool,
2290 #[arg(long)]
2291 json: bool,
2292 },
2293 PushResolution {
2306 frontier: PathBuf,
2307 conflict_event_id: String,
2311 #[arg(long = "to")]
2313 to: String,
2314 #[arg(long)]
2318 key: Option<PathBuf>,
2319 #[arg(long)]
2322 vfr_id: Option<String>,
2323 #[arg(long)]
2324 json: bool,
2325 },
2326}
2327
2328#[derive(Subcommand)]
2329enum FrontierAction {
2330 New {
2337 path: PathBuf,
2339 #[arg(long)]
2341 name: String,
2342 #[arg(long, default_value = "")]
2344 description: String,
2345 #[arg(long)]
2347 force: bool,
2348 #[arg(long)]
2349 json: bool,
2350 },
2351 Materialize {
2353 frontier: PathBuf,
2355 #[arg(long)]
2356 json: bool,
2357 },
2358 AddDep {
2362 frontier: PathBuf,
2364 vfr_id: String,
2366 #[arg(long)]
2369 locator: String,
2370 #[arg(long)]
2373 snapshot: String,
2374 #[arg(long)]
2376 name: Option<String>,
2377 #[arg(long)]
2378 json: bool,
2379 },
2380 ListDeps {
2382 frontier: PathBuf,
2383 #[arg(long)]
2384 json: bool,
2385 },
2386 RemoveDep {
2389 frontier: PathBuf,
2390 vfr_id: String,
2391 #[arg(long)]
2392 json: bool,
2393 },
2394 RefreshDeps {
2401 frontier: PathBuf,
2402 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2404 from: String,
2405 #[arg(long)]
2407 dry_run: bool,
2408 #[arg(long)]
2409 json: bool,
2410 },
2411 Diff {
2423 frontier: PathBuf,
2425 #[arg(long)]
2428 since: Option<String>,
2429 #[arg(long)]
2432 week: Option<String>,
2433 #[arg(long)]
2435 json: bool,
2436 },
2437}
2438
2439#[derive(Subcommand)]
2440enum RepoAction {
2441 Status {
2443 frontier: PathBuf,
2445 #[arg(long)]
2447 json: bool,
2448 },
2449 Doctor {
2451 frontier: PathBuf,
2453 #[arg(long)]
2455 json: bool,
2456 },
2457}
2458
2459#[derive(Subcommand)]
2460enum QueueAction {
2461 List {
2463 #[arg(long)]
2464 queue_file: Option<PathBuf>,
2465 #[arg(long)]
2466 json: bool,
2467 },
2468 Sign {
2471 #[arg(long)]
2473 actor: String,
2474 #[arg(long)]
2476 key: PathBuf,
2477 #[arg(long)]
2479 queue_file: Option<PathBuf>,
2480 #[arg(long, alias = "all")]
2486 yes_to_all: bool,
2487 #[arg(long)]
2488 json: bool,
2489 },
2490 Clear {
2492 #[arg(long)]
2493 queue_file: Option<PathBuf>,
2494 #[arg(long)]
2495 json: bool,
2496 },
2497}
2498
2499#[derive(Subcommand)]
2500enum RegistryAction {
2501 List {
2503 #[arg(long)]
2505 from: Option<String>,
2506 #[arg(long)]
2507 json: bool,
2508 },
2509 Publish {
2511 frontier: PathBuf,
2513 #[arg(long)]
2515 owner: String,
2516 #[arg(long)]
2518 key: PathBuf,
2519 #[arg(long)]
2526 locator: Option<String>,
2527 #[arg(long)]
2529 to: Option<String>,
2530 #[arg(long)]
2531 json: bool,
2532 },
2533 DependsOn {
2540 vfr_id: String,
2542 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2544 from: String,
2545 #[arg(long)]
2546 json: bool,
2547 },
2548 Mirror {
2556 vfr_id: String,
2558 #[arg(long)]
2560 from: String,
2561 #[arg(long)]
2563 to: String,
2564 #[arg(long)]
2565 json: bool,
2566 },
2567 Pull {
2569 vfr_id: String,
2571 #[arg(long)]
2573 from: Option<String>,
2574 #[arg(long)]
2578 out: PathBuf,
2579 #[arg(long)]
2582 transitive: bool,
2583 #[arg(long, default_value = "4")]
2586 depth: usize,
2587 #[arg(long)]
2588 json: bool,
2589 },
2590}
2591
2592#[derive(Subcommand)]
2593enum GapsAction {
2594 Rank {
2596 frontier: PathBuf,
2597 #[arg(long, default_value = "10")]
2598 top: usize,
2599 #[arg(long)]
2600 domain: Option<String>,
2601 #[arg(long)]
2602 json: bool,
2603 },
2604}
2605
2606#[derive(Subcommand)]
2607enum LinkAction {
2608 Add {
2613 frontier: PathBuf,
2615 #[arg(long)]
2617 from: String,
2618 #[arg(long)]
2620 to: String,
2621 #[arg(long, default_value = "supports")]
2623 r#type: String,
2624 #[arg(long, default_value = "")]
2626 note: String,
2627 #[arg(long, default_value = "reviewer")]
2629 inferred_by: String,
2630 #[arg(long)]
2639 no_check_target: bool,
2640 #[arg(long)]
2641 json: bool,
2642 },
2643}
2644
2645#[derive(Subcommand)]
2646enum EntityAction {
2647 Resolve {
2654 frontier: PathBuf,
2655 #[arg(long)]
2657 force: bool,
2658 #[arg(long)]
2659 json: bool,
2660 },
2661 List {
2663 #[arg(long)]
2664 json: bool,
2665 },
2666}
2667
2668#[derive(Subcommand)]
2669enum FindingCommands {
2670 Add {
2672 frontier: PathBuf,
2674 #[arg(long)]
2676 assertion: String,
2677 #[arg(long, default_value = "mechanism")]
2679 r#type: String,
2680 #[arg(long, default_value = "manual finding")]
2682 source: String,
2683 #[arg(long, default_value = "expert_assertion")]
2685 source_type: String,
2686 #[arg(long)]
2688 author: String,
2689 #[arg(long, default_value = "0.3")]
2691 confidence: f64,
2692 #[arg(long, default_value = "theoretical")]
2694 evidence_type: String,
2695 #[arg(long, default_value = "")]
2697 entities: String,
2698 #[arg(long)]
2700 entities_reviewed: bool,
2701 #[arg(long)]
2703 evidence_span: Vec<String>,
2704 #[arg(long)]
2706 gap: bool,
2707 #[arg(long)]
2709 negative_space: bool,
2710 #[arg(long)]
2712 doi: Option<String>,
2713 #[arg(long)]
2715 pmid: Option<String>,
2716 #[arg(long)]
2718 year: Option<i32>,
2719 #[arg(long)]
2721 journal: Option<String>,
2722 #[arg(long)]
2724 url: Option<String>,
2725 #[arg(long)]
2727 source_authors: Option<String>,
2728 #[arg(long)]
2730 conditions_text: Option<String>,
2731 #[arg(long)]
2733 species: Option<String>,
2734 #[arg(long)]
2736 in_vivo: bool,
2737 #[arg(long)]
2739 in_vitro: bool,
2740 #[arg(long)]
2742 human_data: bool,
2743 #[arg(long)]
2745 clinical_trial: bool,
2746 #[arg(long)]
2748 json: bool,
2749 #[arg(long)]
2751 apply: bool,
2752 },
2753 Supersede {
2760 frontier: PathBuf,
2762 old_id: String,
2764 #[arg(long)]
2766 assertion: String,
2767 #[arg(long, default_value = "mechanism")]
2769 r#type: String,
2770 #[arg(long, default_value = "manual finding")]
2772 source: String,
2773 #[arg(long, default_value = "expert_assertion")]
2775 source_type: String,
2776 #[arg(long)]
2778 author: String,
2779 #[arg(long)]
2781 reason: String,
2782 #[arg(long, default_value = "0.5")]
2784 confidence: f64,
2785 #[arg(long, default_value = "experimental")]
2787 evidence_type: String,
2788 #[arg(long, default_value = "")]
2790 entities: String,
2791 #[arg(long)]
2793 doi: Option<String>,
2794 #[arg(long)]
2796 pmid: Option<String>,
2797 #[arg(long)]
2799 year: Option<i32>,
2800 #[arg(long)]
2802 journal: Option<String>,
2803 #[arg(long)]
2805 url: Option<String>,
2806 #[arg(long)]
2808 source_authors: Option<String>,
2809 #[arg(long)]
2811 conditions_text: Option<String>,
2812 #[arg(long)]
2814 species: Option<String>,
2815 #[arg(long)]
2816 in_vivo: bool,
2817 #[arg(long)]
2818 in_vitro: bool,
2819 #[arg(long)]
2820 human_data: bool,
2821 #[arg(long)]
2822 clinical_trial: bool,
2823 #[arg(long)]
2824 json: bool,
2825 #[arg(long)]
2827 apply: bool,
2828 },
2829 CausalSet {
2835 frontier: PathBuf,
2837 finding_id: String,
2839 #[arg(long)]
2841 claim: String,
2842 #[arg(long)]
2845 grade: Option<String>,
2846 #[arg(long)]
2849 actor: String,
2850 #[arg(long)]
2853 reason: String,
2854 #[arg(long)]
2855 json: bool,
2856 },
2857}
2858
2859#[derive(Subcommand)]
2860enum ProposalAction {
2861 List {
2863 frontier: PathBuf,
2864 #[arg(long)]
2865 status: Option<String>,
2866 #[arg(long)]
2867 json: bool,
2868 },
2869 Show {
2871 frontier: PathBuf,
2872 proposal_id: String,
2873 #[arg(long)]
2874 json: bool,
2875 },
2876 Preview {
2878 frontier: PathBuf,
2879 proposal_id: String,
2880 #[arg(long, default_value = "reviewer:preview")]
2881 reviewer: String,
2882 #[arg(long)]
2883 json: bool,
2884 },
2885 Import {
2887 frontier: PathBuf,
2888 source: PathBuf,
2889 #[arg(long)]
2890 json: bool,
2891 },
2892 Validate {
2894 source: PathBuf,
2895 #[arg(long)]
2896 json: bool,
2897 },
2898 Export {
2900 frontier: PathBuf,
2901 output: PathBuf,
2902 #[arg(long)]
2903 status: Option<String>,
2904 #[arg(long)]
2905 json: bool,
2906 },
2907 Accept {
2909 frontier: PathBuf,
2910 proposal_id: String,
2911 #[arg(long)]
2912 reviewer: String,
2913 #[arg(long)]
2914 reason: String,
2915 #[arg(long)]
2916 json: bool,
2917 },
2918 Reject {
2920 frontier: PathBuf,
2921 proposal_id: String,
2922 #[arg(long)]
2923 reviewer: String,
2924 #[arg(long)]
2925 reason: String,
2926 #[arg(long)]
2927 json: bool,
2928 },
2929}
2930
2931#[derive(Subcommand)]
2932enum SourceAdapterAction {
2933 Run {
2935 frontier: PathBuf,
2937 adapter: String,
2939 #[arg(long)]
2941 actor: String,
2942 #[arg(long = "entry")]
2944 entries: Vec<String>,
2945 #[arg(long)]
2947 priority: Option<String>,
2948 #[arg(long)]
2950 include_excluded: bool,
2951 #[arg(long)]
2953 allow_partial: bool,
2954 #[arg(long)]
2956 dry_run: bool,
2957 #[arg(long)]
2959 input_dir: Option<PathBuf>,
2960 #[arg(long)]
2962 apply_artifacts: bool,
2963 #[arg(long)]
2965 json: bool,
2966 },
2967}
2968
2969#[derive(Subcommand)]
2970enum RuntimeAdapterAction {
2971 Run {
2973 frontier: PathBuf,
2975 adapter: String,
2977 #[arg(long)]
2979 input: PathBuf,
2980 #[arg(long)]
2982 actor: String,
2983 #[arg(long)]
2985 dry_run: bool,
2986 #[arg(long)]
2988 apply_artifacts: bool,
2989 #[arg(long)]
2991 json: bool,
2992 },
2993}
2994
2995#[derive(Subcommand)]
2996enum BridgeKitAction {
2997 Validate {
2999 source: PathBuf,
3001 #[arg(long)]
3003 json: bool,
3004 },
3005 VerifyProvenance {
3012 packet: PathBuf,
3014 #[arg(long)]
3016 json: bool,
3017 },
3018}
3019
3020pub async fn run_command() {
3021 dotenvy::dotenv().ok();
3022
3023 match Cli::parse().command {
3024 Commands::Scout {
3025 folder,
3026 frontier,
3027 backend,
3028 dry_run,
3029 json,
3030 } => {
3031 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
3032 }
3033 Commands::CompileNotes {
3034 vault,
3035 frontier,
3036 backend,
3037 max_files,
3038 max_items_per_category,
3039 dry_run,
3040 json,
3041 } => {
3042 cmd_compile_notes(
3043 &vault,
3044 &frontier,
3045 backend.as_deref(),
3046 max_files,
3047 max_items_per_category,
3048 dry_run,
3049 json,
3050 )
3051 .await;
3052 }
3053 Commands::CompileCode {
3054 root,
3055 frontier,
3056 backend,
3057 max_files,
3058 dry_run,
3059 json,
3060 } => {
3061 cmd_compile_code(
3062 &root,
3063 &frontier,
3064 backend.as_deref(),
3065 max_files,
3066 dry_run,
3067 json,
3068 )
3069 .await;
3070 }
3071 Commands::CompileData {
3072 root,
3073 frontier,
3074 backend,
3075 sample_rows,
3076 dry_run,
3077 json,
3078 } => {
3079 cmd_compile_data(
3080 &root,
3081 &frontier,
3082 backend.as_deref(),
3083 sample_rows,
3084 dry_run,
3085 json,
3086 )
3087 .await;
3088 }
3089 Commands::ReviewPending {
3090 frontier,
3091 backend,
3092 max_proposals,
3093 batch_size,
3094 dry_run,
3095 json,
3096 } => {
3097 cmd_review_pending(
3098 &frontier,
3099 backend.as_deref(),
3100 max_proposals,
3101 batch_size,
3102 dry_run,
3103 json,
3104 )
3105 .await;
3106 }
3107 Commands::FindTensions {
3108 frontier,
3109 backend,
3110 max_findings,
3111 dry_run,
3112 json,
3113 } => {
3114 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3115 }
3116 Commands::PlanExperiments {
3117 frontier,
3118 backend,
3119 max_findings,
3120 dry_run,
3121 json,
3122 } => {
3123 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3124 }
3125 Commands::Check {
3126 source,
3127 schema,
3128 stats,
3129 conformance,
3130 conformance_dir,
3131 all,
3132 schema_only,
3133 strict,
3134 fix,
3135 json,
3136 } => cmd_check(
3137 source.as_deref(),
3138 schema,
3139 stats,
3140 conformance,
3141 &conformance_dir,
3142 all,
3143 schema_only,
3144 strict,
3145 fix,
3146 json,
3147 ),
3148 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3149 Commands::Impact {
3150 frontier,
3151 finding_id,
3152 depth,
3153 json,
3154 } => cmd_impact(&frontier, &finding_id, depth, json),
3155 Commands::Discord {
3156 frontier,
3157 json,
3158 kind,
3159 } => cmd_discord(&frontier, json, kind.as_deref()),
3160 Commands::Normalize {
3161 source,
3162 out,
3163 write,
3164 dry_run,
3165 rewrite_ids,
3166 id_map,
3167 resync_provenance,
3168 json,
3169 } => cmd_normalize(
3170 &source,
3171 out.as_deref(),
3172 write,
3173 dry_run,
3174 rewrite_ids,
3175 id_map.as_deref(),
3176 resync_provenance,
3177 json,
3178 ),
3179 Commands::Proof {
3180 frontier,
3181 out,
3182 template,
3183 gold,
3184 record_proof_state,
3185 json,
3186 } => cmd_proof(
3187 &frontier,
3188 &out,
3189 &template,
3190 gold.as_deref(),
3191 record_proof_state,
3192 json,
3193 ),
3194 Commands::Repo { action } => cmd_repo(action),
3195 Commands::Serve {
3196 frontier,
3197 frontiers,
3198 backend,
3199 http,
3200 setup,
3201 check_tools,
3202 json,
3203 workbench,
3204 } => {
3205 if setup {
3206 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3207 } else if check_tools {
3208 let source =
3209 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3210 match serve::check_tools(source) {
3211 Ok(report) => {
3212 if json {
3213 println!(
3214 "{}",
3215 serde_json::to_string_pretty(&report)
3216 .expect("failed to serialize tool check report")
3217 );
3218 } else {
3219 print_tool_check_report(&report);
3220 }
3221 }
3222 Err(e) => fail(&format!("Tool check failed: {e}")),
3223 }
3224 } else {
3225 let source =
3226 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3227 let resolved_port = if workbench {
3229 Some(http.unwrap_or(3848))
3230 } else {
3231 http
3232 };
3233 if let Some(port) = resolved_port {
3234 serve::run_http(source, backend.as_deref(), port, workbench).await;
3235 } else {
3236 serve::run(source, backend.as_deref()).await;
3237 }
3238 }
3239 }
3240 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3241 Commands::Log {
3242 frontier,
3243 limit,
3244 kind,
3245 json,
3246 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3247 Commands::Inbox {
3248 frontier,
3249 kind,
3250 limit,
3251 json,
3252 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3253 Commands::Ask {
3254 frontier,
3255 question,
3256 json,
3257 } => cmd_ask(&frontier, &question.join(" "), json),
3258 Commands::Stats { frontier, json } => {
3259 if json {
3260 print_stats_json(&frontier);
3261 } else {
3262 cmd_stats(&frontier);
3263 }
3264 }
3265 Commands::Search {
3266 source,
3267 query,
3268 entity,
3269 r#type,
3270 all,
3271 limit,
3272 json,
3273 } => cmd_search(
3274 source.as_deref(),
3275 &query,
3276 entity.as_deref(),
3277 r#type.as_deref(),
3278 all.as_deref(),
3279 limit,
3280 json,
3281 ),
3282 Commands::Tensions {
3283 source,
3284 both_high,
3285 cross_domain,
3286 top,
3287 json,
3288 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3289 Commands::Gaps { action } => cmd_gaps(action),
3290 Commands::Bridge {
3291 inputs,
3292 novelty,
3293 top,
3294 } => cmd_bridge(&inputs, novelty, top).await,
3295 Commands::Export {
3296 frontier,
3297 format,
3298 output,
3299 } => export::run(&frontier, &format, output.as_deref()),
3300 Commands::Packet { action } => cmd_packet(action),
3301 Commands::Verify { path, json } => cmd_verify(&path, json),
3302 Commands::Bench {
3303 frontier,
3304 gold,
3305 candidate,
3306 sources,
3307 threshold,
3308 report,
3309 entity_gold,
3310 link_gold,
3311 suite,
3312 suite_ready,
3313 min_f1,
3314 min_precision,
3315 min_recall,
3316 no_thresholds,
3317 json,
3318 } => {
3319 if let Some(cand) = candidate.clone() {
3324 let Some(g) = gold.clone() else {
3325 eprintln!(
3326 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3327 style::err_prefix()
3328 );
3329 std::process::exit(2);
3330 };
3331 cmd_agent_bench(
3332 &g,
3333 &cand,
3334 sources.as_deref(),
3335 threshold,
3336 report.as_deref(),
3337 json,
3338 );
3339 } else {
3340 cmd_bench(BenchArgs {
3341 frontier,
3342 gold,
3343 entity_gold,
3344 link_gold,
3345 suite,
3346 suite_ready,
3347 min_f1,
3348 min_precision,
3349 min_recall,
3350 no_thresholds,
3351 json,
3352 });
3353 }
3354 }
3355 Commands::Conformance { dir } => {
3356 let _ = conformance::run(&dir);
3357 }
3358 Commands::Version => println!("vela 0.36.0"),
3359 Commands::Sign { action } => cmd_sign(action),
3360 Commands::Actor { action } => cmd_actor(action),
3361 Commands::Federation { action } => cmd_federation(action),
3362 Commands::Causal { action } => cmd_causal(action),
3363 Commands::Frontier { action } => cmd_frontier(action),
3364 Commands::Queue { action } => cmd_queue(action),
3365 Commands::Registry { action } => cmd_registry(action),
3366 Commands::Init {
3367 path,
3368 name,
3369 template,
3370 no_git,
3371 json,
3372 } => cmd_init(&path, &name, &template, !no_git, json),
3373 Commands::Quickstart {
3374 path,
3375 name,
3376 reviewer,
3377 assertion,
3378 keys_out,
3379 json,
3380 } => cmd_quickstart(
3381 &path,
3382 &name,
3383 &reviewer,
3384 assertion.as_deref(),
3385 keys_out.as_deref(),
3386 json,
3387 ),
3388 Commands::Lock { path, check, json } => cmd_lock(&path, check, json),
3389 Commands::Doc { path, out, json } => cmd_doc(&path, out.as_deref(), json),
3390 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3391 Commands::Diff {
3392 target,
3393 frontier_b,
3394 frontier,
3395 reviewer,
3396 json,
3397 quiet,
3398 } => {
3399 if target.starts_with("vpr_") {
3404 let frontier_root = frontier
3405 .clone()
3406 .or_else(|| frontier_b.clone())
3407 .unwrap_or_else(|| std::path::PathBuf::from("."));
3408 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3409 .unwrap_or_else(|e| fail_return(&e));
3410 let payload = json!({
3411 "ok": true,
3412 "command": "diff.proposal",
3413 "frontier": frontier_root.display().to_string(),
3414 "proposal_id": target,
3415 "preview": preview,
3416 });
3417 if json {
3418 println!(
3419 "{}",
3420 serde_json::to_string_pretty(&payload)
3421 .expect("failed to serialize diff preview")
3422 );
3423 } else {
3424 println!("vela diff · proposal preview");
3425 println!(" proposal: {}", target);
3426 println!(" kind: {}", preview.kind);
3427 println!(
3428 " findings: {} -> {}",
3429 preview.findings_before, preview.findings_after
3430 );
3431 println!(
3432 " artifacts: {} -> {}",
3433 preview.artifacts_before, preview.artifacts_after
3434 );
3435 println!(
3436 " events: {} -> {}",
3437 preview.events_before, preview.events_after
3438 );
3439 if !preview.changed_findings.is_empty() {
3440 println!(
3441 " findings changed: {}",
3442 preview.changed_findings.join(", ")
3443 );
3444 }
3445 }
3446 } else {
3447 let frontier_a = std::path::PathBuf::from(&target);
3448 let b = frontier_b.unwrap_or_else(|| {
3449 fail_return(
3450 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3451 )
3452 });
3453 diff::run(&frontier_a, &b, json, quiet);
3454 }
3455 }
3456 Commands::Proposals { action } => cmd_proposals(action),
3457 Commands::ArtifactToState {
3458 frontier,
3459 packet,
3460 actor,
3461 apply_artifacts,
3462 json,
3463 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3464 Commands::BridgeKit { action } => cmd_bridge_kit(action).await,
3465 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3466 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3467 Commands::Link { action } => cmd_link(action),
3468 Commands::Workbench {
3469 path,
3470 port,
3471 no_open,
3472 } => {
3473 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3474 fail(&e);
3475 }
3476 }
3477 Commands::Bridges { action } => cmd_bridges(action),
3478 Commands::Entity { action } => cmd_entity(action),
3479 Commands::Finding { command } => match command {
3480 FindingCommands::Add {
3481 frontier,
3482 assertion,
3483 r#type,
3484 source,
3485 source_type,
3486 author,
3487 confidence,
3488 evidence_type,
3489 entities,
3490 entities_reviewed,
3491 evidence_span,
3492 gap,
3493 negative_space,
3494 doi,
3495 pmid,
3496 year,
3497 journal,
3498 url,
3499 source_authors,
3500 conditions_text,
3501 species,
3502 in_vivo,
3503 in_vitro,
3504 human_data,
3505 clinical_trial,
3506 json,
3507 apply,
3508 } => {
3509 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3510 validate_enum_arg(
3511 "--evidence-type",
3512 &evidence_type,
3513 bundle::VALID_EVIDENCE_TYPES,
3514 );
3515 validate_enum_arg(
3516 "--source-type",
3517 &source_type,
3518 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3519 );
3520 let parsed_entities = parse_entities(&entities);
3521 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3522 for (name, etype) in &parsed_entities {
3523 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3524 fail(&format!(
3525 "invalid entity type '{}' for '{}'. Valid: {}",
3526 etype,
3527 name,
3528 bundle::VALID_ENTITY_TYPES.join(", "),
3529 ));
3530 }
3531 }
3532 let parsed_source_authors = source_authors
3533 .map(|s| {
3534 s.split(';')
3535 .map(|a| a.trim().to_string())
3536 .filter(|a| !a.is_empty())
3537 .collect()
3538 })
3539 .unwrap_or_default();
3540 let parsed_species = species
3541 .map(|s| {
3542 s.split(';')
3543 .map(|a| a.trim().to_string())
3544 .filter(|a| !a.is_empty())
3545 .collect()
3546 })
3547 .unwrap_or_default();
3548 let report = state::add_finding(
3549 &frontier,
3550 state::FindingDraftOptions {
3551 text: assertion,
3552 assertion_type: r#type,
3553 source,
3554 source_type,
3555 author,
3556 confidence,
3557 evidence_type,
3558 entities: parsed_entities,
3559 doi,
3560 pmid,
3561 year,
3562 journal,
3563 url,
3564 source_authors: parsed_source_authors,
3565 conditions_text,
3566 species: parsed_species,
3567 in_vivo,
3568 in_vitro,
3569 human_data,
3570 clinical_trial,
3571 entities_reviewed,
3572 evidence_spans: parsed_evidence_spans,
3573 gap,
3574 negative_space,
3575 },
3576 apply,
3577 )
3578 .unwrap_or_else(|e| fail_return(&e));
3579 print_state_report(&report, json);
3580 }
3581 FindingCommands::Supersede {
3582 frontier,
3583 old_id,
3584 assertion,
3585 r#type,
3586 source,
3587 source_type,
3588 author,
3589 reason,
3590 confidence,
3591 evidence_type,
3592 entities,
3593 doi,
3594 pmid,
3595 year,
3596 journal,
3597 url,
3598 source_authors,
3599 conditions_text,
3600 species,
3601 in_vivo,
3602 in_vitro,
3603 human_data,
3604 clinical_trial,
3605 json,
3606 apply,
3607 } => {
3608 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3609 validate_enum_arg(
3610 "--evidence-type",
3611 &evidence_type,
3612 bundle::VALID_EVIDENCE_TYPES,
3613 );
3614 validate_enum_arg(
3615 "--source-type",
3616 &source_type,
3617 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3618 );
3619 let parsed_entities = parse_entities(&entities);
3620 for (name, etype) in &parsed_entities {
3621 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3622 fail(&format!(
3623 "invalid entity type '{}' for '{}'. Valid: {}",
3624 etype,
3625 name,
3626 bundle::VALID_ENTITY_TYPES.join(", "),
3627 ));
3628 }
3629 }
3630 let parsed_source_authors = source_authors
3631 .map(|s| {
3632 s.split(';')
3633 .map(|a| a.trim().to_string())
3634 .filter(|a| !a.is_empty())
3635 .collect()
3636 })
3637 .unwrap_or_default();
3638 let parsed_species = species
3639 .map(|s| {
3640 s.split(';')
3641 .map(|a| a.trim().to_string())
3642 .filter(|a| !a.is_empty())
3643 .collect()
3644 })
3645 .unwrap_or_default();
3646 let report = state::supersede_finding(
3647 &frontier,
3648 &old_id,
3649 &reason,
3650 state::FindingDraftOptions {
3651 text: assertion,
3652 assertion_type: r#type,
3653 source,
3654 source_type,
3655 author,
3656 confidence,
3657 evidence_type,
3658 entities: parsed_entities,
3659 doi,
3660 pmid,
3661 year,
3662 journal,
3663 url,
3664 source_authors: parsed_source_authors,
3665 conditions_text,
3666 species: parsed_species,
3667 in_vivo,
3668 in_vitro,
3669 human_data,
3670 clinical_trial,
3671 entities_reviewed: false,
3672 evidence_spans: Vec::new(),
3673 gap: false,
3674 negative_space: false,
3675 },
3676 apply,
3677 )
3678 .unwrap_or_else(|e| fail_return(&e));
3679 print_state_report(&report, json);
3680 }
3681 FindingCommands::CausalSet {
3682 frontier,
3683 finding_id,
3684 claim,
3685 grade,
3686 actor,
3687 reason,
3688 json,
3689 } => {
3690 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3691 fail(&format!(
3692 "invalid --claim '{claim}'; valid: {:?}",
3693 bundle::VALID_CAUSAL_CLAIMS
3694 ));
3695 }
3696 if let Some(g) = grade.as_deref()
3697 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3698 {
3699 fail(&format!(
3700 "invalid --grade '{g}'; valid: {:?}",
3701 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3702 ));
3703 }
3704 let report = state::set_causal(
3705 &frontier,
3706 &finding_id,
3707 &claim,
3708 grade.as_deref(),
3709 &actor,
3710 &reason,
3711 )
3712 .unwrap_or_else(|e| fail_return(&e));
3713 print_state_report(&report, json);
3714 }
3715 },
3716 Commands::Review {
3717 frontier,
3718 finding_id,
3719 status,
3720 reason,
3721 reviewer,
3722 apply,
3723 json,
3724 } => {
3725 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3726 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3727 let report = state::review_finding(
3728 &frontier,
3729 &finding_id,
3730 state::ReviewOptions {
3731 status,
3732 reason,
3733 reviewer,
3734 },
3735 apply,
3736 )
3737 .unwrap_or_else(|e| fail_return(&e));
3738 print_state_report(&report, json);
3739 }
3740 Commands::Note {
3741 frontier,
3742 finding_id,
3743 text,
3744 author,
3745 apply,
3746 json,
3747 } => {
3748 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3749 .unwrap_or_else(|e| fail_return(&e));
3750 print_state_report(&report, json);
3751 }
3752 Commands::Caveat {
3753 frontier,
3754 finding_id,
3755 text,
3756 author,
3757 apply,
3758 json,
3759 } => {
3760 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3761 .unwrap_or_else(|e| fail_return(&e));
3762 print_state_report(&report, json);
3763 }
3764 Commands::Revise {
3765 frontier,
3766 finding_id,
3767 confidence,
3768 reason,
3769 reviewer,
3770 apply,
3771 json,
3772 } => {
3773 let report = state::revise_confidence(
3774 &frontier,
3775 &finding_id,
3776 state::ReviseOptions {
3777 confidence,
3778 reason,
3779 reviewer,
3780 },
3781 apply,
3782 )
3783 .unwrap_or_else(|e| fail_return(&e));
3784 print_state_report(&report, json);
3785 }
3786 Commands::Reject {
3787 frontier,
3788 finding_id,
3789 reason,
3790 reviewer,
3791 apply,
3792 json,
3793 } => {
3794 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3795 .unwrap_or_else(|e| fail_return(&e));
3796 print_state_report(&report, json);
3797 }
3798 Commands::History {
3799 frontier,
3800 finding_id,
3801 json,
3802 as_of,
3803 } => {
3804 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3805 .unwrap_or_else(|e| fail_return(&e));
3806 if json {
3807 println!(
3808 "{}",
3809 serde_json::to_string_pretty(&payload)
3810 .expect("failed to serialize history response")
3811 );
3812 } else {
3813 print_history(&payload);
3814 }
3815 }
3816 Commands::ImportEvents { source, into, json } => {
3817 let report =
3818 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3819 if json {
3820 println!(
3821 "{}",
3822 serde_json::to_string_pretty(&json!({
3823 "ok": true,
3824 "command": "import-events",
3825 "source": report.source,
3826 "target": into.display().to_string(),
3827 "summary": {
3828 "imported": report.imported,
3829 "new": report.new,
3830 "duplicate": report.duplicate,
3831 "canonical_events_imported": report.events_imported,
3832 "canonical_events_new": report.events_new,
3833 "canonical_events_duplicate": report.events_duplicate,
3834 }
3835 }))
3836 .expect("failed to serialize import-events response")
3837 );
3838 } else {
3839 println!("{report}");
3840 }
3841 }
3842 Commands::Retract {
3843 source,
3844 finding_id,
3845 reason,
3846 reviewer,
3847 apply,
3848 json,
3849 } => {
3850 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3851 .unwrap_or_else(|e| fail_return(&e));
3852 print_state_report(&report, json);
3853 }
3854 Commands::LocatorRepair {
3855 frontier,
3856 atom_id,
3857 locator,
3858 reviewer,
3859 reason,
3860 apply,
3861 json,
3862 } => {
3863 cmd_locator_repair(
3864 &frontier,
3865 &atom_id,
3866 locator.as_deref(),
3867 &reviewer,
3868 &reason,
3869 apply,
3870 json,
3871 );
3872 }
3873 Commands::SourceFetch {
3874 identifier,
3875 cache,
3876 out,
3877 refresh,
3878 json,
3879 } => {
3880 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3881 }
3882 Commands::SpanRepair {
3883 frontier,
3884 finding_id,
3885 section,
3886 text,
3887 reviewer,
3888 reason,
3889 apply,
3890 json,
3891 } => {
3892 cmd_span_repair(
3893 &frontier,
3894 &finding_id,
3895 §ion,
3896 &text,
3897 &reviewer,
3898 &reason,
3899 apply,
3900 json,
3901 );
3902 }
3903 Commands::ProofAdd {
3904 frontier,
3905 target_finding,
3906 tool,
3907 tool_version,
3908 script_path,
3909 name,
3910 reviewer,
3911 reason,
3912 json,
3913 } => {
3914 cmd_proof_add(
3915 &frontier,
3916 &target_finding,
3917 &tool,
3918 &tool_version,
3919 &script_path,
3920 &name,
3921 &reviewer,
3922 &reason,
3923 json,
3924 );
3925 }
3926 Commands::EntityAdd {
3927 frontier,
3928 finding_id,
3929 entity,
3930 entity_type,
3931 reviewer,
3932 reason,
3933 apply,
3934 json,
3935 } => {
3936 let report = state::add_finding_entity(
3937 &frontier,
3938 &finding_id,
3939 &entity,
3940 &entity_type,
3941 &reviewer,
3942 &reason,
3943 apply,
3944 )
3945 .unwrap_or_else(|e| fail_return(&e));
3946 print_state_report(&report, json);
3947 }
3948 Commands::EntityResolve {
3949 frontier,
3950 finding_id,
3951 entity,
3952 source,
3953 id,
3954 confidence,
3955 matched_name,
3956 resolution_method,
3957 reviewer,
3958 reason,
3959 apply,
3960 json,
3961 } => {
3962 cmd_entity_resolve(
3963 &frontier,
3964 &finding_id,
3965 &entity,
3966 &source,
3967 &id,
3968 confidence,
3969 matched_name.as_deref(),
3970 &resolution_method,
3971 &reviewer,
3972 &reason,
3973 apply,
3974 json,
3975 );
3976 }
3977 Commands::Propagate {
3978 frontier,
3979 retract,
3980 reduce_confidence,
3981 to,
3982 output,
3983 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3984 Commands::Replicate {
3985 frontier,
3986 target,
3987 outcome,
3988 by,
3989 conditions,
3990 source_title,
3991 doi,
3992 pmid,
3993 sample_size,
3994 note,
3995 previous_attempt,
3996 no_cascade,
3997 json,
3998 } => cmd_replicate(
3999 &frontier,
4000 &target,
4001 &outcome,
4002 &by,
4003 &conditions,
4004 &source_title,
4005 doi.as_deref(),
4006 pmid.as_deref(),
4007 sample_size.as_deref(),
4008 ¬e,
4009 previous_attempt.as_deref(),
4010 no_cascade,
4011 json,
4012 ),
4013 Commands::Replications {
4014 frontier,
4015 target,
4016 json,
4017 } => cmd_replications(&frontier, target.as_deref(), json),
4018 Commands::DatasetAdd {
4019 frontier,
4020 name,
4021 version,
4022 content_hash,
4023 url,
4024 license,
4025 source_title,
4026 doi,
4027 row_count,
4028 json,
4029 } => cmd_dataset_add(
4030 &frontier,
4031 &name,
4032 version.as_deref(),
4033 &content_hash,
4034 url.as_deref(),
4035 license.as_deref(),
4036 &source_title,
4037 doi.as_deref(),
4038 row_count,
4039 json,
4040 ),
4041 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
4042 Commands::CodeAdd {
4043 frontier,
4044 language,
4045 repo_url,
4046 commit,
4047 path,
4048 content_hash,
4049 line_start,
4050 line_end,
4051 entry_point,
4052 json,
4053 } => cmd_code_add(
4054 &frontier,
4055 &language,
4056 repo_url.as_deref(),
4057 commit.as_deref(),
4058 &path,
4059 &content_hash,
4060 line_start,
4061 line_end,
4062 entry_point.as_deref(),
4063 json,
4064 ),
4065 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
4066 Commands::ArtifactAdd {
4067 frontier,
4068 kind,
4069 name,
4070 file,
4071 url,
4072 content_hash,
4073 media_type,
4074 license,
4075 source_title,
4076 source_url,
4077 doi,
4078 target,
4079 metadata,
4080 access_tier,
4081 deposited_by,
4082 reason,
4083 json,
4084 } => cmd_artifact_add(
4085 &frontier,
4086 &kind,
4087 &name,
4088 file.as_deref(),
4089 url.as_deref(),
4090 content_hash.as_deref(),
4091 media_type.as_deref(),
4092 license.as_deref(),
4093 source_title.as_deref(),
4094 source_url.as_deref(),
4095 doi.as_deref(),
4096 target,
4097 metadata,
4098 &access_tier,
4099 &deposited_by,
4100 &reason,
4101 json,
4102 ),
4103 Commands::Artifacts {
4104 frontier,
4105 target,
4106 json,
4107 } => cmd_artifacts(&frontier, target.as_deref(), json),
4108 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
4109 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
4110 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
4111 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
4112 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
4113 Commands::ClinicalTrialImport {
4114 frontier,
4115 nct_id,
4116 input_json,
4117 target,
4118 deposited_by,
4119 reason,
4120 license,
4121 json,
4122 } => {
4123 cmd_clinical_trial_import(
4124 &frontier,
4125 &nct_id,
4126 input_json.as_deref(),
4127 target,
4128 &deposited_by,
4129 &reason,
4130 &license,
4131 json,
4132 )
4133 .await
4134 }
4135 Commands::NegativeResultAdd {
4136 frontier,
4137 kind,
4138 deposited_by,
4139 reason,
4140 conditions_text,
4141 notes,
4142 target,
4143 endpoint,
4144 intervention,
4145 comparator,
4146 population,
4147 n_enrolled,
4148 power,
4149 ci_lower,
4150 ci_upper,
4151 effect_size_threshold,
4152 registry_id,
4153 reagent,
4154 observation,
4155 attempts,
4156 source_title,
4157 doi,
4158 url,
4159 year,
4160 json,
4161 } => cmd_negative_result_add(
4162 &frontier,
4163 &kind,
4164 &deposited_by,
4165 &reason,
4166 &conditions_text,
4167 ¬es,
4168 target,
4169 endpoint.as_deref(),
4170 intervention.as_deref(),
4171 comparator.as_deref(),
4172 population.as_deref(),
4173 n_enrolled,
4174 power,
4175 ci_lower,
4176 ci_upper,
4177 effect_size_threshold,
4178 registry_id.as_deref(),
4179 reagent.as_deref(),
4180 observation.as_deref(),
4181 attempts,
4182 &source_title,
4183 doi.as_deref(),
4184 url.as_deref(),
4185 year,
4186 json,
4187 ),
4188 Commands::NegativeResults {
4189 frontier,
4190 target,
4191 json,
4192 } => cmd_negative_results(&frontier, target.as_deref(), json),
4193 Commands::TrajectoryCreate {
4194 frontier,
4195 deposited_by,
4196 reason,
4197 target,
4198 notes,
4199 json,
4200 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4201 Commands::TrajectoryStep {
4202 frontier,
4203 trajectory_id,
4204 kind,
4205 description,
4206 actor,
4207 reason,
4208 reference,
4209 json,
4210 } => cmd_trajectory_step(
4211 &frontier,
4212 &trajectory_id,
4213 &kind,
4214 &description,
4215 &actor,
4216 &reason,
4217 reference,
4218 json,
4219 ),
4220 Commands::Trajectories {
4221 frontier,
4222 target,
4223 json,
4224 } => cmd_trajectories(&frontier, target.as_deref(), json),
4225 Commands::TierSet {
4226 frontier,
4227 object_type,
4228 object_id,
4229 tier,
4230 actor,
4231 reason,
4232 json,
4233 } => cmd_tier_set(
4234 &frontier,
4235 &object_type,
4236 &object_id,
4237 &tier,
4238 &actor,
4239 &reason,
4240 json,
4241 ),
4242 Commands::Predict {
4243 frontier,
4244 by,
4245 claim,
4246 criterion,
4247 resolves_by,
4248 confidence,
4249 target,
4250 outcome,
4251 conditions,
4252 json,
4253 } => cmd_predict(
4254 &frontier,
4255 &by,
4256 &claim,
4257 &criterion,
4258 resolves_by.as_deref(),
4259 confidence,
4260 &target,
4261 &outcome,
4262 &conditions,
4263 json,
4264 ),
4265 Commands::Resolve {
4266 frontier,
4267 prediction,
4268 outcome,
4269 matched,
4270 by,
4271 confidence,
4272 source_title,
4273 doi,
4274 json,
4275 } => cmd_resolve(
4276 &frontier,
4277 &prediction,
4278 &outcome,
4279 matched,
4280 &by,
4281 confidence,
4282 &source_title,
4283 doi.as_deref(),
4284 json,
4285 ),
4286 Commands::Predictions {
4287 frontier,
4288 by,
4289 open,
4290 json,
4291 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4292 Commands::Calibration {
4293 frontier,
4294 actor,
4295 json,
4296 } => cmd_calibration(&frontier, actor.as_deref(), json),
4297 Commands::PredictionsExpire {
4298 frontier,
4299 now,
4300 dry_run,
4301 json,
4302 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4303 Commands::Consensus {
4304 frontier,
4305 target,
4306 weighting,
4307 causal_claim,
4308 causal_grade_min,
4309 json,
4310 } => cmd_consensus(
4311 &frontier,
4312 &target,
4313 &weighting,
4314 causal_claim.as_deref(),
4315 causal_grade_min.as_deref(),
4316 json,
4317 ),
4318
4319 Commands::Ingest {
4322 path,
4323 frontier,
4324 backend,
4325 actor,
4326 dry_run,
4327 json,
4328 } => {
4329 cmd_ingest(
4330 &path,
4331 &frontier,
4332 backend.as_deref(),
4333 actor.as_deref(),
4334 dry_run,
4335 json,
4336 )
4337 .await
4338 }
4339
4340 Commands::Propose {
4341 frontier,
4342 finding_id,
4343 status,
4344 reason,
4345 reviewer,
4346 apply,
4347 json,
4348 } => {
4349 let options = state::ReviewOptions {
4352 status: status.clone(),
4353 reason: reason.clone(),
4354 reviewer: reviewer.clone(),
4355 };
4356 let report = state::review_finding(&frontier, &finding_id, options, apply)
4357 .unwrap_or_else(|e| fail_return(&e));
4358 print_state_report(&report, json);
4359 }
4360
4361 Commands::Accept {
4362 frontier,
4363 proposal_id,
4364 reviewer,
4365 reason,
4366 json,
4367 } => {
4368 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4370 .unwrap_or_else(|e| fail_return(&e));
4371 let payload = json!({
4372 "ok": true,
4373 "command": "accept",
4374 "frontier": frontier.display().to_string(),
4375 "proposal_id": proposal_id,
4376 "reviewer": reviewer,
4377 "applied_event_id": event_id,
4378 });
4379 if json {
4380 println!(
4381 "{}",
4382 serde_json::to_string_pretty(&payload)
4383 .expect("failed to serialize accept response")
4384 );
4385 } else {
4386 println!(
4387 "{} accepted and applied proposal {}",
4388 style::ok("ok"),
4389 proposal_id
4390 );
4391 println!(" event: {}", event_id);
4392 }
4393 }
4394
4395 Commands::Attest {
4396 frontier,
4397 event,
4398 attester,
4399 scope_note,
4400 proof_id,
4401 signature,
4402 key,
4403 json,
4404 } => {
4405 if let Some(target_event_id) = event {
4409 let attester_id = attester.unwrap_or_else(|| {
4410 fail_return("attest: --attester is required in per-event mode")
4411 });
4412 let scope = scope_note.unwrap_or_else(|| {
4413 fail_return("attest: --scope-note is required in per-event mode")
4414 });
4415 let attestation_event_id = state::record_attestation(
4416 &frontier,
4417 &target_event_id,
4418 &attester_id,
4419 &scope,
4420 proof_id.as_deref(),
4421 signature.as_deref(),
4422 )
4423 .unwrap_or_else(|e| fail_return(&e));
4424 if json {
4425 let payload = json!({
4426 "ok": true,
4427 "command": "attest.event",
4428 "frontier": frontier.display().to_string(),
4429 "target_event_id": target_event_id,
4430 "attestation_event_id": attestation_event_id,
4431 "attester_id": attester_id,
4432 });
4433 println!(
4434 "{}",
4435 serde_json::to_string_pretty(&payload)
4436 .expect("failed to serialize attest.event response")
4437 );
4438 } else {
4439 println!(
4440 "{} attested {} by {} ({})",
4441 style::ok("ok"),
4442 target_event_id,
4443 attester_id,
4444 attestation_event_id
4445 );
4446 }
4447 return;
4448 }
4449 let key_path = key.unwrap_or_else(|| {
4451 fail_return(
4452 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
4453 )
4454 });
4455 let count =
4456 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
4457 let payload = json!({
4458 "ok": true,
4459 "command": "attest",
4460 "frontier": frontier.display().to_string(),
4461 "private_key": key_path.display().to_string(),
4462 "signed": count,
4463 });
4464 if json {
4465 println!(
4466 "{}",
4467 serde_json::to_string_pretty(&payload)
4468 .expect("failed to serialize attest response")
4469 );
4470 } else {
4471 println!(
4472 "{} {count} findings in {}",
4473 style::ok("attested"),
4474 frontier.display()
4475 );
4476 }
4477 }
4478
4479 Commands::Lineage {
4480 frontier,
4481 finding_id,
4482 as_of,
4483 json,
4484 } => {
4485 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4487 .unwrap_or_else(|e| fail_return(&e));
4488 if json {
4489 println!(
4490 "{}",
4491 serde_json::to_string_pretty(&payload)
4492 .expect("failed to serialize lineage response")
4493 );
4494 } else {
4495 print_history(&payload);
4496 }
4497 }
4498
4499 Commands::Carina { action } => cmd_carina(action),
4500
4501 Commands::Atlas { action } => cmd_atlas(action).await,
4502
4503 Commands::Constellation { action } => cmd_constellation(action).await,
4504 }
4505}
4506
4507async fn cmd_atlas(action: AtlasAction) {
4512 match action {
4513 AtlasAction::Init {
4514 name,
4515 frontiers,
4516 domain,
4517 scope_note,
4518 atlases_root,
4519 json,
4520 } => match ATLAS_INIT_HANDLER.get() {
4521 Some(handler) => {
4522 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
4523 }
4524 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
4525 },
4526 AtlasAction::Materialize {
4527 name,
4528 atlases_root,
4529 json,
4530 } => match ATLAS_MATERIALIZE_HANDLER.get() {
4531 Some(handler) => handler(atlases_root, name, json).await,
4532 None => fail("vela atlas materialize: handler not registered"),
4533 },
4534 AtlasAction::Serve {
4535 name,
4536 atlases_root,
4537 port,
4538 no_open,
4539 } => {
4540 match ATLAS_SERVE_HANDLER.get() {
4544 Some(handler) => handler(atlases_root, name, port, !no_open).await,
4545 None => fail("vela atlas serve: handler not registered"),
4546 }
4547 }
4548 AtlasAction::Update {
4549 name,
4550 add_frontier,
4551 remove_vfr_id,
4552 atlases_root,
4553 json,
4554 } => match ATLAS_UPDATE_HANDLER.get() {
4555 Some(handler) => {
4556 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
4557 }
4558 None => fail("vela atlas update: handler not registered"),
4559 },
4560 }
4561}
4562
4563async fn cmd_constellation(action: ConstellationAction) {
4567 match action {
4568 ConstellationAction::Init {
4569 name,
4570 atlases,
4571 scope_note,
4572 constellations_root,
4573 json,
4574 } => match CONSTELLATION_INIT_HANDLER.get() {
4575 Some(handler) => {
4576 handler(constellations_root, name, scope_note, atlases, json).await;
4577 }
4578 None => fail(
4579 "vela constellation init: handler not registered (built without vela-constellation)",
4580 ),
4581 },
4582 ConstellationAction::Materialize {
4583 name,
4584 constellations_root,
4585 json,
4586 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
4587 Some(handler) => handler(constellations_root, name, json).await,
4588 None => fail("vela constellation materialize: handler not registered"),
4589 },
4590 ConstellationAction::Serve {
4591 name,
4592 constellations_root,
4593 port,
4594 no_open,
4595 } => match CONSTELLATION_SERVE_HANDLER.get() {
4596 Some(handler) => handler(constellations_root, name, port, !no_open).await,
4597 None => fail("vela constellation serve: handler not registered"),
4598 },
4599 }
4600}
4601
4602fn cmd_carina(action: CarinaAction) {
4605 match action {
4606 CarinaAction::List { json } => {
4607 if json {
4608 println!(
4609 "{}",
4610 serde_json::to_string_pretty(&json!({
4611 "ok": true,
4612 "command": "carina.list",
4613 "primitives": carina_validate::PRIMITIVE_NAMES,
4614 }))
4615 .expect("failed to serialize carina.list")
4616 );
4617 } else {
4618 println!("Carina primitives bundled with this build:");
4619 for name in carina_validate::PRIMITIVE_NAMES {
4620 println!(" · {name}");
4621 }
4622 }
4623 }
4624 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4625 Some(text) => print!("{text}"),
4626 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4627 },
4628 CarinaAction::Validate {
4629 path,
4630 primitive,
4631 json,
4632 } => {
4633 let text = std::fs::read_to_string(&path)
4634 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4635 let value: Value = serde_json::from_str(&text)
4636 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4637 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4643 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4644 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4645 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4646 for (key, child) in primitives {
4647 let outcome = carina_validate::validate(key, child)
4648 .map(|()| carina_validate::detect_primitive(child));
4649 report.push((key.clone(), outcome));
4650 }
4651 } else {
4652 let outcome = match primitive.as_deref() {
4653 Some(name) => carina_validate::validate(name, &value).map(|()| {
4654 carina_validate::PRIMITIVE_NAMES
4655 .iter()
4656 .copied()
4657 .find(|p| *p == name)
4658 }),
4659 None => carina_validate::validate_auto(&value).map(Some),
4660 };
4661 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4662 report.push((label, outcome));
4663 }
4664
4665 let total = report.len();
4666 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4667 let fail = total - pass;
4668
4669 if json {
4670 let entries: Vec<Value> = report
4671 .iter()
4672 .map(|(label, r)| match r {
4673 Ok(name) => json!({
4674 "key": label,
4675 "primitive": name,
4676 "ok": true,
4677 }),
4678 Err(errs) => json!({
4679 "key": label,
4680 "ok": false,
4681 "errors": errs,
4682 }),
4683 })
4684 .collect();
4685 println!(
4686 "{}",
4687 serde_json::to_string_pretty(&json!({
4688 "ok": fail == 0,
4689 "command": "carina.validate",
4690 "file": path.display().to_string(),
4691 "total": total,
4692 "passed": pass,
4693 "failed": fail,
4694 "entries": entries,
4695 }))
4696 .expect("failed to serialize carina.validate")
4697 );
4698 } else {
4699 for (label, r) in &report {
4700 match r {
4701 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4702 Ok(None) => println!(" {} {label}", style::ok("ok")),
4703 Err(errs) => {
4704 println!(" {} {label}", style::lost("fail"));
4705 for e in errs {
4706 println!(" {e}");
4707 }
4708 }
4709 }
4710 }
4711 println!();
4712 if fail == 0 {
4713 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4714 } else {
4715 println!(
4716 "{} {pass}/{total} valid · {fail} failed",
4717 style::lost("carina.validate")
4718 );
4719 }
4720 }
4721
4722 if fail > 0 {
4723 std::process::exit(1);
4724 }
4725 }
4726 }
4727}
4728
4729#[allow(clippy::too_many_arguments)]
4740fn cmd_proof_add(
4741 frontier: &Path,
4742 target_finding: &str,
4743 tool: &str,
4744 tool_version: &str,
4745 script_path: &Path,
4746 name: &str,
4747 reviewer: &str,
4748 reason: &str,
4749 json_output: bool,
4750) {
4751 use std::collections::BTreeMap;
4752
4753 if !target_finding.starts_with("vf_") {
4755 fail(&format!(
4756 "--target-finding must be a vf_* finding id; got `{target_finding}`"
4757 ));
4758 }
4759 let valid_tools = [
4761 "lean4", "coq", "isabelle", "agda", "metamath", "rocq", "other",
4762 ];
4763 if !valid_tools.contains(&tool) {
4764 fail(&format!(
4765 "--tool `{tool}` not in {valid_tools:?}; see embedded/carina-schemas/proof.schema.json"
4766 ));
4767 }
4768
4769 let script_bytes = std::fs::read(script_path)
4771 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", script_path.display())));
4772 let script_hash_hex = hex::encode(Sha256::digest(&script_bytes));
4773 let script_locator = format!("sha256:{script_hash_hex}");
4774
4775 let vpf_preimage = format!("{script_locator}|{tool}|{tool_version}|{target_finding}");
4779 let vpf_id = format!(
4780 "vpf_{}",
4781 &hex::encode(Sha256::digest(vpf_preimage.as_bytes()))[..16]
4782 );
4783
4784 let verified_at = chrono::Utc::now().to_rfc3339();
4787 let proof_primitive = json!({
4788 "schema": "carina.proof.v0.3",
4789 "id": vpf_id,
4790 "tool": tool,
4791 "tool_version": tool_version,
4792 "script_locator": script_locator,
4793 "verifier_output_hash": format!("sha256:{}", "0".repeat(64)),
4797 "verified_at": verified_at,
4798 "target_finding_id": target_finding,
4799 });
4800 if let Err(errs) = carina_validate::validate("proof", &proof_primitive) {
4801 fail(&format!(
4802 "constructed Proof primitive does not validate against proof.schema.json:\n - {}",
4803 errs.join("\n - ")
4804 ));
4805 }
4806
4807 let mut metadata: BTreeMap<String, Value> = BTreeMap::new();
4809 metadata.insert(
4810 "carina_kind".to_string(),
4811 Value::String("proof_script".to_string()),
4812 );
4813 metadata.insert(
4814 "carina_proof_tool".to_string(),
4815 Value::String(tool.to_string()),
4816 );
4817 metadata.insert(
4818 "carina_proof_tool_version".to_string(),
4819 Value::String(tool_version.to_string()),
4820 );
4821 metadata.insert("carina_proof_id".to_string(), Value::String(vpf_id.clone()));
4822 metadata.insert(
4823 "carina_proof_target_finding".to_string(),
4824 Value::String(target_finding.to_string()),
4825 );
4826
4827 let media_type = match tool {
4828 "lean4" | "rocq" => Some("text/x-lean".to_string()),
4829 "coq" => Some("text/x-coq".to_string()),
4830 "isabelle" => Some("text/x-isabelle".to_string()),
4831 "agda" => Some("text/x-agda".to_string()),
4832 "metamath" => Some("text/x-metamath".to_string()),
4833 _ => None,
4834 };
4835
4836 let provenance = crate::bundle::Provenance {
4837 source_type: "code_repository".to_string(),
4838 doi: None,
4839 pmid: None,
4840 pmc: None,
4841 openalex_id: None,
4842 url: None,
4843 title: format!("Proof script for {target_finding} ({tool} {tool_version})"),
4844 authors: Vec::new(),
4845 year: None,
4846 journal: None,
4847 license: Some("Apache-2.0 OR MIT".to_string()),
4848 publisher: None,
4849 funders: Vec::new(),
4850 extraction: crate::bundle::Extraction::default(),
4851 review: None,
4852 citation_count: None,
4853 };
4854
4855 let artifact_id = crate::bundle::Artifact::content_address(
4856 "source_file",
4857 name,
4858 &format!("sha256:{script_hash_hex}"),
4859 None,
4860 Some(&script_path.display().to_string()),
4861 );
4862
4863 let artifact = crate::bundle::Artifact {
4864 id: artifact_id.clone(),
4865 kind: "source_file".to_string(),
4866 name: name.to_string(),
4867 content_hash: format!("sha256:{script_hash_hex}"),
4868 size_bytes: Some(script_bytes.len() as u64),
4869 media_type,
4870 storage_mode: "pointer".to_string(),
4871 locator: Some(script_path.display().to_string()),
4872 source_url: None,
4873 license: Some("Apache-2.0 OR MIT".to_string()),
4874 target_findings: vec![target_finding.to_string()],
4875 source_id: None,
4876 provenance,
4877 metadata,
4878 review_state: None,
4879 retracted: false,
4880 access_tier: crate::access_tier::AccessTier::default(),
4881 created: verified_at.clone(),
4882 };
4883
4884 let report = state::add_artifact(frontier, artifact, reviewer, reason)
4888 .unwrap_or_else(|e| fail_return(&e));
4889
4890 let payload = json!({
4892 "ok": true,
4893 "command": "proof-add",
4894 "frontier": frontier.display().to_string(),
4895 "target_finding": target_finding,
4896 "tool": tool,
4897 "tool_version": tool_version,
4898 "script_path": script_path.display().to_string(),
4899 "script_locator": script_locator,
4900 "size_bytes": script_bytes.len(),
4901 "vpf_id": vpf_id,
4902 "va_id": artifact_id,
4903 "applied_event_id": report.applied_event_id,
4904 "verified_at": verified_at,
4905 "reviewer": reviewer,
4906 });
4907
4908 if json_output {
4909 println!(
4910 "{}",
4911 serde_json::to_string_pretty(&payload).expect("failed to serialize proof-add response")
4912 );
4913 } else {
4914 println!(
4915 "{} proof artifact deposited for {target_finding}",
4916 style::ok("ok")
4917 );
4918 println!(" vpf_id: {vpf_id}");
4919 println!(" va_id: {artifact_id}");
4920 println!(" locator: {script_locator}");
4921 println!(" tool: {tool} {tool_version}");
4922 if let Some(eid) = &report.applied_event_id {
4923 println!(" event: {eid}");
4924 }
4925 }
4926}
4927
4928fn cmd_consensus(
4931 frontier: &Path,
4932 target: &str,
4933 weighting_str: &str,
4934 causal_claim: Option<&str>,
4935 causal_grade_min: Option<&str>,
4936 json: bool,
4937) {
4938 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4939
4940 if !target.starts_with("vf_") {
4941 fail(&format!("target `{target}` is not a vf_ finding id"));
4942 }
4943 let scheme =
4944 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4945
4946 let parsed_claim = match causal_claim {
4947 None => None,
4948 Some("correlation") => Some(CausalClaim::Correlation),
4949 Some("mediation") => Some(CausalClaim::Mediation),
4950 Some("intervention") => Some(CausalClaim::Intervention),
4951 Some(other) => fail_return(&format!(
4952 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4953 )),
4954 };
4955 let parsed_grade = match causal_grade_min {
4956 None => None,
4957 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4958 Some("observational") => Some(CausalEvidenceGrade::Observational),
4959 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4960 Some("rct") => Some(CausalEvidenceGrade::Rct),
4961 Some(other) => fail_return(&format!(
4962 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4963 )),
4964 };
4965 let filter = crate::aggregate::AggregateFilter {
4966 causal_claim: parsed_claim,
4967 causal_grade_min: parsed_grade,
4968 };
4969 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4970
4971 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4972 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4973
4974 if json {
4975 println!(
4976 "{}",
4977 serde_json::to_string_pretty(&result).expect("serialize consensus")
4978 );
4979 return;
4980 }
4981
4982 println!();
4983 println!(
4984 " {}",
4985 format!(
4986 "VELA · CONSENSUS · {} ({})",
4987 result.target, result.weighting
4988 )
4989 .to_uppercase()
4990 .dimmed()
4991 );
4992 println!(" {}", style::tick_row(60));
4993 println!(
4994 " target: {}",
4995 truncate(&result.target_assertion, 80)
4996 );
4997 println!(" similar findings: {}", result.n_findings);
4998 println!(
4999 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
5000 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
5001 );
5002 println!();
5003 println!(" constituents (sorted by weight):");
5004 let mut sorted = result.constituents.clone();
5005 sorted.sort_by(|a, b| {
5006 b.weight
5007 .partial_cmp(&a.weight)
5008 .unwrap_or(std::cmp::Ordering::Equal)
5009 });
5010 for c in sorted.iter().take(10) {
5011 let repls = if c.n_replications > 0 {
5012 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
5013 } else {
5014 String::new()
5015 };
5016 println!(
5017 " · w={:.2} raw={:.2} adj={:.2}{}",
5018 c.weight, c.raw_score, c.adjusted_score, repls
5019 );
5020 println!(" {}", truncate(&c.assertion_text, 88));
5021 }
5022 if result.constituents.len() > 10 {
5023 println!(" ... ({} more)", result.constituents.len() - 10);
5024 }
5025}
5026
5027fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
5033 let trimmed = s.trim();
5034 if trimmed.eq_ignore_ascii_case("affirmed") {
5035 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
5036 }
5037 if trimmed.eq_ignore_ascii_case("falsified") {
5038 return Ok(crate::bundle::ExpectedOutcome::Falsified);
5039 }
5040 if let Some(rest) = trimmed.strip_prefix("cat:") {
5041 return Ok(crate::bundle::ExpectedOutcome::Categorical {
5042 value: rest.to_string(),
5043 });
5044 }
5045 if let Some(rest) = trimmed.strip_prefix("quant:") {
5046 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
5047 let (val_s, tol_s) = vt
5048 .split_once('±')
5049 .or_else(|| vt.split_once("+/-"))
5050 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
5051 let value: f64 = val_s
5052 .parse()
5053 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
5054 let tolerance: f64 = tol_s
5055 .parse()
5056 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
5057 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
5058 value,
5059 tolerance,
5060 units: units.to_string(),
5061 });
5062 }
5063 Err(format!(
5064 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
5065 ))
5066}
5067
5068#[allow(clippy::too_many_arguments)]
5070fn cmd_predict(
5071 frontier: &Path,
5072 by: &str,
5073 claim: &str,
5074 criterion: &str,
5075 resolves_by: Option<&str>,
5076 confidence: f64,
5077 target_csv: &str,
5078 outcome: &str,
5079 conditions_text: &str,
5080 json: bool,
5081) {
5082 if !(0.0..=1.0).contains(&confidence) {
5083 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
5084 }
5085 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
5086
5087 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5088
5089 let targets: Vec<String> = target_csv
5090 .split(',')
5091 .map(|s| s.trim().to_string())
5092 .filter(|s| !s.is_empty())
5093 .collect();
5094 for t in &targets {
5095 if !t.starts_with("vf_") {
5096 fail(&format!("target `{t}` is not a vf_ id"));
5097 }
5098 if !project.findings.iter().any(|f| f.id == *t) {
5099 fail(&format!("target `{t}` not present in frontier"));
5100 }
5101 }
5102
5103 let lower = conditions_text.to_lowercase();
5104 let conditions = crate::bundle::Conditions {
5105 text: conditions_text.to_string(),
5106 species_verified: Vec::new(),
5107 species_unverified: Vec::new(),
5108 in_vitro: lower.contains("in vitro"),
5109 in_vivo: lower.contains("in vivo"),
5110 human_data: lower.contains("human") || lower.contains("clinical"),
5111 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
5112 concentration_range: None,
5113 duration: None,
5114 age_group: None,
5115 cell_type: None,
5116 };
5117
5118 let prediction = crate::bundle::Prediction::new(
5119 claim.to_string(),
5120 targets,
5121 None,
5122 resolves_by.map(|s| s.to_string()),
5123 criterion.to_string(),
5124 expected,
5125 by.to_string(),
5126 confidence,
5127 conditions,
5128 );
5129
5130 if project.predictions.iter().any(|p| p.id == prediction.id) {
5131 if json {
5132 println!(
5133 "{}",
5134 serde_json::to_string_pretty(&json!({
5135 "ok": false,
5136 "command": "predict",
5137 "reason": "prediction_already_exists",
5138 "id": prediction.id,
5139 }))
5140 .expect("serialize")
5141 );
5142 } else {
5143 println!(
5144 "{} prediction {} already exists in {}; skipping.",
5145 style::warn("predict"),
5146 prediction.id,
5147 frontier.display()
5148 );
5149 }
5150 return;
5151 }
5152
5153 let new_id = prediction.id.clone();
5154 project.predictions.push(prediction);
5155 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5156
5157 if json {
5158 println!(
5159 "{}",
5160 serde_json::to_string_pretty(&json!({
5161 "ok": true,
5162 "command": "predict",
5163 "id": new_id,
5164 "made_by": by,
5165 "confidence": confidence,
5166 "frontier": frontier.display().to_string(),
5167 }))
5168 .expect("serialize predict result")
5169 );
5170 } else {
5171 println!();
5172 println!(
5173 " {}",
5174 format!("VELA · PREDICT · {}", new_id)
5175 .to_uppercase()
5176 .dimmed()
5177 );
5178 println!(" {}", style::tick_row(60));
5179 println!(" by: {by}");
5180 println!(" confidence: {confidence:.3}");
5181 if let Some(d) = resolves_by {
5182 println!(" resolves by: {d}");
5183 }
5184 println!(" outcome: {outcome}");
5185 println!(" claim: {}", truncate(claim, 88));
5186 println!();
5187 println!(
5188 " {} prediction recorded in {}",
5189 style::ok("ok"),
5190 frontier.display()
5191 );
5192 }
5193}
5194
5195#[allow(clippy::too_many_arguments)]
5197fn cmd_resolve(
5198 frontier: &Path,
5199 prediction_id: &str,
5200 actual_outcome: &str,
5201 matched: bool,
5202 by: &str,
5203 confidence: f64,
5204 source_title: &str,
5205 doi: Option<&str>,
5206 json: bool,
5207) {
5208 if !prediction_id.starts_with("vpred_") {
5209 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
5210 }
5211 if !(0.0..=1.0).contains(&confidence) {
5212 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
5213 }
5214 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5215 if !project.predictions.iter().any(|p| p.id == prediction_id) {
5216 fail(&format!(
5217 "prediction `{prediction_id}` not present in frontier"
5218 ));
5219 }
5220
5221 let evidence = crate::bundle::Evidence {
5222 evidence_type: "experimental".to_string(),
5223 model_system: String::new(),
5224 species: None,
5225 method: "prediction_resolution".to_string(),
5226 sample_size: None,
5227 effect_size: None,
5228 p_value: None,
5229 replicated: false,
5230 replication_count: None,
5231 evidence_spans: if source_title.is_empty() {
5232 Vec::new()
5233 } else {
5234 vec![serde_json::json!({"text": source_title})]
5235 },
5236 };
5237
5238 let _ = doi; let resolution = crate::bundle::Resolution::new(
5245 prediction_id.to_string(),
5246 actual_outcome.to_string(),
5247 matched,
5248 by.to_string(),
5249 evidence,
5250 confidence,
5251 );
5252
5253 if project.resolutions.iter().any(|r| r.id == resolution.id) {
5254 if json {
5255 println!(
5256 "{}",
5257 serde_json::to_string_pretty(&json!({
5258 "ok": false,
5259 "command": "resolve",
5260 "reason": "resolution_already_exists",
5261 "id": resolution.id,
5262 }))
5263 .expect("serialize")
5264 );
5265 } else {
5266 println!(
5267 "{} resolution {} already exists in {}; skipping.",
5268 style::warn("resolve"),
5269 resolution.id,
5270 frontier.display()
5271 );
5272 }
5273 return;
5274 }
5275
5276 let new_id = resolution.id.clone();
5277 project.resolutions.push(resolution);
5278 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5279
5280 if json {
5281 println!(
5282 "{}",
5283 serde_json::to_string_pretty(&json!({
5284 "ok": true,
5285 "command": "resolve",
5286 "id": new_id,
5287 "prediction": prediction_id,
5288 "matched": matched,
5289 "frontier": frontier.display().to_string(),
5290 }))
5291 .expect("serialize resolve result")
5292 );
5293 } else {
5294 println!();
5295 println!(
5296 " {}",
5297 format!("VELA · RESOLVE · {}", new_id)
5298 .to_uppercase()
5299 .dimmed()
5300 );
5301 println!(" {}", style::tick_row(60));
5302 println!(" prediction: {prediction_id}");
5303 println!(
5304 " matched: {}",
5305 if matched {
5306 style::ok("yes")
5307 } else {
5308 style::lost("no")
5309 }
5310 );
5311 println!(" by: {by}");
5312 println!(" outcome: {}", truncate(actual_outcome, 80));
5313 println!();
5314 println!(
5315 " {} resolution recorded in {}",
5316 style::ok("ok"),
5317 frontier.display()
5318 );
5319 }
5320}
5321
5322fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
5324 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5325
5326 let resolved_ids: std::collections::HashSet<&str> = project
5327 .resolutions
5328 .iter()
5329 .map(|r| r.prediction_id.as_str())
5330 .collect();
5331
5332 let mut filtered: Vec<&crate::bundle::Prediction> = project
5333 .predictions
5334 .iter()
5335 .filter(|p| by.is_none_or(|b| p.made_by == b))
5336 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
5337 .collect();
5338 filtered.sort_by(|a, b| {
5339 a.resolves_by
5340 .as_deref()
5341 .unwrap_or("9999")
5342 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
5343 });
5344
5345 if json {
5346 let payload: Vec<serde_json::Value> = filtered
5347 .iter()
5348 .map(|p| {
5349 json!({
5350 "id": p.id,
5351 "claim_text": p.claim_text,
5352 "made_by": p.made_by,
5353 "confidence": p.confidence,
5354 "predicted_at": p.predicted_at,
5355 "resolves_by": p.resolves_by,
5356 "expected_outcome": p.expected_outcome,
5357 "resolved": resolved_ids.contains(p.id.as_str()),
5358 })
5359 })
5360 .collect();
5361 println!(
5362 "{}",
5363 serde_json::to_string_pretty(&json!({
5364 "ok": true,
5365 "command": "predictions",
5366 "frontier": frontier.display().to_string(),
5367 "count": payload.len(),
5368 "predictions": payload,
5369 }))
5370 .expect("serialize predictions")
5371 );
5372 return;
5373 }
5374
5375 println!();
5376 println!(
5377 " {}",
5378 format!("VELA · PREDICTIONS · {}", frontier.display())
5379 .to_uppercase()
5380 .dimmed()
5381 );
5382 println!(" {}", style::tick_row(60));
5383 if filtered.is_empty() {
5384 println!(" (no predictions matching filters)");
5385 return;
5386 }
5387 for p in &filtered {
5388 let resolved = resolved_ids.contains(p.id.as_str());
5389 let chip = if resolved {
5390 style::ok("resolved")
5391 } else {
5392 style::warn("open")
5393 };
5394 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
5395 println!(
5396 " · {} {} by {} → {}",
5397 p.id.dimmed(),
5398 chip,
5399 p.made_by,
5400 deadline,
5401 );
5402 println!(" claim: {}", truncate(&p.claim_text, 90));
5403 println!(" confidence: {:.2}", p.confidence);
5404 }
5405}
5406
5407fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
5412 use chrono::DateTime;
5413
5414 let now_dt = match now_override {
5415 Some(s) => DateTime::parse_from_rfc3339(s)
5416 .map(|dt| dt.with_timezone(&chrono::Utc))
5417 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
5418 None => chrono::Utc::now(),
5419 };
5420
5421 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5422 if dry_run {
5423 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5425 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
5426 if json {
5427 println!(
5428 "{}",
5429 serde_json::to_string_pretty(&json!({
5430 "ok": true,
5431 "command": "predictions.expire",
5432 "dry_run": true,
5433 "report": report,
5434 }))
5435 .expect("serialize predictions.expire (dry-run)")
5436 );
5437 } else {
5438 println!(
5439 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
5440 style::ok("ok"),
5441 report.now,
5442 report.newly_expired.len(),
5443 report.already_expired.len(),
5444 report.already_resolved.len(),
5445 report.still_open.len(),
5446 );
5447 for id in &report.newly_expired {
5448 println!(" · {id}");
5449 }
5450 }
5451 return;
5452 }
5453
5454 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
5455 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5456
5457 if json {
5458 println!(
5459 "{}",
5460 serde_json::to_string_pretty(&json!({
5461 "ok": true,
5462 "command": "predictions.expire",
5463 "report": report,
5464 }))
5465 .expect("serialize predictions.expire")
5466 );
5467 } else {
5468 println!(
5469 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
5470 style::ok("expired"),
5471 report.now,
5472 report.newly_expired.len(),
5473 report.already_expired.len(),
5474 report.already_resolved.len(),
5475 report.still_open.len(),
5476 );
5477 for id in &report.newly_expired {
5478 println!(" · {id}");
5479 }
5480 }
5481}
5482
5483fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
5484 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5485 let records = match actor {
5486 Some(a) => {
5487 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
5488 .map(|r| vec![r])
5489 .unwrap_or_default()
5490 }
5491 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
5492 };
5493
5494 if json {
5495 println!(
5496 "{}",
5497 serde_json::to_string_pretty(&json!({
5498 "ok": true,
5499 "command": "calibration",
5500 "frontier": frontier.display().to_string(),
5501 "filter_actor": actor,
5502 "records": records,
5503 }))
5504 .expect("serialize calibration")
5505 );
5506 return;
5507 }
5508
5509 println!();
5510 println!(
5511 " {}",
5512 format!("VELA · CALIBRATION · {}", frontier.display())
5513 .to_uppercase()
5514 .dimmed()
5515 );
5516 println!(" {}", style::tick_row(60));
5517 if records.is_empty() {
5518 println!(" (no calibration records)");
5519 return;
5520 }
5521 for r in &records {
5522 println!(" · {}", r.actor);
5523 println!(
5524 " predictions: {} resolved: {} hits: {}",
5525 r.n_predictions, r.n_resolved, r.n_hit
5526 );
5527 match r.hit_rate {
5528 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
5529 None => println!(" hit rate: n/a"),
5530 }
5531 match r.brier_score {
5532 Some(b) => println!(
5533 " brier: {:.4} (lower is better; 0.25 = chance)",
5534 b
5535 ),
5536 None => println!(" brier: n/a"),
5537 }
5538 match r.log_score {
5539 Some(l) => println!(
5540 " log score: {:.4} (higher is better; 0 = perfect)",
5541 l
5542 ),
5543 None => println!(" log score: n/a"),
5544 }
5545 }
5546}
5547
5548#[allow(clippy::too_many_arguments)]
5550fn cmd_dataset_add(
5551 frontier: &Path,
5552 name: &str,
5553 version: Option<&str>,
5554 content_hash: &str,
5555 url: Option<&str>,
5556 license: Option<&str>,
5557 source_title: &str,
5558 doi: Option<&str>,
5559 row_count: Option<u64>,
5560 json: bool,
5561) {
5562 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5563
5564 let provenance = crate::bundle::Provenance {
5565 source_type: "data_release".to_string(),
5566 doi: doi.map(|s| s.to_string()),
5567 pmid: None,
5568 pmc: None,
5569 openalex_id: None,
5570 url: url.map(|s| s.to_string()),
5571 title: source_title.to_string(),
5572 authors: Vec::new(),
5573 year: None,
5574 journal: None,
5575 license: license.map(|s| s.to_string()),
5576 publisher: None,
5577 funders: Vec::new(),
5578 extraction: crate::bundle::Extraction {
5579 method: "manual_curation".to_string(),
5580 model: None,
5581 model_version: None,
5582 extracted_at: chrono::Utc::now().to_rfc3339(),
5583 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5584 },
5585 review: None,
5586 citation_count: None,
5587 };
5588
5589 let mut dataset = crate::bundle::Dataset::new(
5590 name.to_string(),
5591 version.map(|s| s.to_string()),
5592 content_hash.to_string(),
5593 url.map(|s| s.to_string()),
5594 license.map(|s| s.to_string()),
5595 provenance,
5596 );
5597 dataset.row_count = row_count;
5598
5599 if project.datasets.iter().any(|d| d.id == dataset.id) {
5600 if json {
5601 println!(
5602 "{}",
5603 serde_json::to_string_pretty(&json!({
5604 "ok": false,
5605 "command": "dataset.add",
5606 "reason": "dataset_already_exists",
5607 "id": dataset.id,
5608 }))
5609 .expect("serialize")
5610 );
5611 } else {
5612 println!(
5613 "{} dataset {} already exists in {}; skipping.",
5614 style::warn("dataset"),
5615 dataset.id,
5616 frontier.display()
5617 );
5618 }
5619 return;
5620 }
5621
5622 let new_id = dataset.id.clone();
5623 project.datasets.push(dataset);
5624 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5625
5626 if json {
5627 println!(
5628 "{}",
5629 serde_json::to_string_pretty(&json!({
5630 "ok": true,
5631 "command": "dataset.add",
5632 "id": new_id,
5633 "name": name,
5634 "version": version,
5635 "frontier": frontier.display().to_string(),
5636 }))
5637 .expect("failed to serialize dataset.add result")
5638 );
5639 } else {
5640 println!();
5641 println!(
5642 " {}",
5643 format!("VELA · DATASET · {}", new_id)
5644 .to_uppercase()
5645 .dimmed()
5646 );
5647 println!(" {}", style::tick_row(60));
5648 println!(" name: {name}");
5649 if let Some(v) = version {
5650 println!(" version: {v}");
5651 }
5652 println!(" content_hash: {content_hash}");
5653 if let Some(u) = url {
5654 println!(" url: {u}");
5655 }
5656 println!(" source: {source_title}");
5657 println!();
5658 println!(
5659 " {} dataset recorded in {}",
5660 style::ok("ok"),
5661 frontier.display()
5662 );
5663 }
5664}
5665
5666#[allow(clippy::too_many_arguments)]
5672fn cmd_negative_result_add(
5673 frontier: &Path,
5674 kind: &str,
5675 deposited_by: &str,
5676 reason: &str,
5677 conditions_text: &str,
5678 notes: &str,
5679 targets: Vec<String>,
5680 endpoint: Option<&str>,
5681 intervention: Option<&str>,
5682 comparator: Option<&str>,
5683 population: Option<&str>,
5684 n_enrolled: Option<u32>,
5685 power: Option<f64>,
5686 ci_lower: Option<f64>,
5687 ci_upper: Option<f64>,
5688 effect_size_threshold: Option<f64>,
5689 registry_id: Option<&str>,
5690 reagent: Option<&str>,
5691 observation: Option<&str>,
5692 attempts: Option<u32>,
5693 source_title: &str,
5694 doi: Option<&str>,
5695 url: Option<&str>,
5696 year: Option<i32>,
5697 json: bool,
5698) {
5699 let nr_kind = match kind {
5700 "registered_trial" => {
5701 let endpoint =
5702 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
5703 let intervention = intervention
5704 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
5705 let comparator = comparator
5706 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
5707 let population = population
5708 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
5709 let n_enrolled = n_enrolled
5710 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
5711 let power =
5712 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
5713 let ci_lower =
5714 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
5715 let ci_upper =
5716 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
5717 crate::bundle::NegativeResultKind::RegisteredTrial {
5718 endpoint: endpoint.to_string(),
5719 intervention: intervention.to_string(),
5720 comparator: comparator.to_string(),
5721 population: population.to_string(),
5722 n_enrolled,
5723 power,
5724 effect_size_ci: (ci_lower, ci_upper),
5725 effect_size_threshold,
5726 registry_id: registry_id.map(|s| s.to_string()),
5727 }
5728 }
5729 "exploratory" => {
5730 let reagent =
5731 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
5732 let observation = observation
5733 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
5734 let attempts =
5735 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
5736 crate::bundle::NegativeResultKind::Exploratory {
5737 reagent: reagent.to_string(),
5738 observation: observation.to_string(),
5739 attempts,
5740 }
5741 }
5742 other => fail_return(&format!(
5743 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
5744 )),
5745 };
5746
5747 let conditions = crate::bundle::Conditions {
5748 text: conditions_text.to_string(),
5749 species_verified: Vec::new(),
5750 species_unverified: Vec::new(),
5751 in_vitro: false,
5752 in_vivo: false,
5753 human_data: false,
5754 clinical_trial: matches!(kind, "registered_trial"),
5755 concentration_range: None,
5756 duration: None,
5757 age_group: None,
5758 cell_type: None,
5759 };
5760
5761 let provenance = crate::bundle::Provenance {
5762 source_type: if matches!(kind, "registered_trial") {
5763 "clinical_trial".to_string()
5764 } else {
5765 "lab_notebook".to_string()
5766 },
5767 doi: doi.map(|s| s.to_string()),
5768 pmid: None,
5769 pmc: None,
5770 openalex_id: None,
5771 url: url.map(|s| s.to_string()),
5772 title: source_title.to_string(),
5773 authors: Vec::new(),
5774 year,
5775 journal: None,
5776 license: None,
5777 publisher: None,
5778 funders: Vec::new(),
5779 extraction: crate::bundle::Extraction {
5780 method: "manual_curation".to_string(),
5781 model: None,
5782 model_version: None,
5783 extracted_at: chrono::Utc::now().to_rfc3339(),
5784 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5785 },
5786 review: None,
5787 citation_count: None,
5788 };
5789
5790 let report = state::add_negative_result(
5791 frontier,
5792 nr_kind,
5793 targets,
5794 deposited_by,
5795 conditions,
5796 provenance,
5797 notes,
5798 reason,
5799 )
5800 .unwrap_or_else(|e| fail_return(&e));
5801
5802 if json {
5803 println!(
5804 "{}",
5805 serde_json::to_string_pretty(&report).expect("serialize report")
5806 );
5807 } else {
5808 println!();
5809 println!(
5810 " {}",
5811 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5812 .to_uppercase()
5813 .dimmed()
5814 );
5815 println!(" {}", style::tick_row(60));
5816 println!(" kind: {kind}");
5817 println!(" deposited_by: {deposited_by}");
5818 if let Some(ev) = &report.applied_event_id {
5819 println!(" event: {ev}");
5820 }
5821 println!(
5822 " {} negative_result deposited in {}",
5823 style::ok("ok"),
5824 frontier.display()
5825 );
5826 }
5827}
5828
5829fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5832 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5833 let filtered: Vec<&crate::bundle::NegativeResult> = project
5834 .negative_results
5835 .iter()
5836 .filter(|nr| {
5837 target
5838 .map(|t| nr.target_findings.iter().any(|f| f == t))
5839 .unwrap_or(true)
5840 })
5841 .collect();
5842
5843 if json {
5844 println!(
5845 "{}",
5846 serde_json::to_string_pretty(&json!({
5847 "ok": true,
5848 "command": "negative_results",
5849 "frontier": frontier.display().to_string(),
5850 "count": filtered.len(),
5851 "negative_results": filtered,
5852 }))
5853 .expect("serialize negative_results")
5854 );
5855 return;
5856 }
5857
5858 if filtered.is_empty() {
5859 println!(" no negative_results in {}", frontier.display());
5860 return;
5861 }
5862
5863 println!();
5864 println!(
5865 " {} ({})",
5866 "VELA · NEGATIVE RESULTS".dimmed(),
5867 filtered.len()
5868 );
5869 println!(" {}", style::tick_row(60));
5870 for nr in &filtered {
5871 let kind_label = match &nr.kind {
5872 crate::bundle::NegativeResultKind::RegisteredTrial {
5873 endpoint, power, ..
5874 } => format!("trial · {endpoint} · power {power:.2}"),
5875 crate::bundle::NegativeResultKind::Exploratory {
5876 reagent, attempts, ..
5877 } => format!("exploratory · {reagent} · {attempts} attempts"),
5878 };
5879 let retracted = if nr.retracted { " [retracted]" } else { "" };
5880 let review = nr
5881 .review_state
5882 .as_ref()
5883 .map(|s| format!(" [{s:?}]"))
5884 .unwrap_or_default();
5885 println!(" {}{}{}", nr.id, retracted, review);
5886 println!(" {kind_label}");
5887 if !nr.target_findings.is_empty() {
5888 println!(" targets: {}", nr.target_findings.join(", "));
5889 }
5890 }
5891 println!();
5892}
5893
5894#[allow(clippy::too_many_arguments)]
5896fn cmd_tier_set(
5897 frontier: &Path,
5898 object_type: &str,
5899 object_id: &str,
5900 tier: &str,
5901 actor: &str,
5902 reason: &str,
5903 json: bool,
5904) {
5905 let parsed_tier =
5906 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5907 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5908 .unwrap_or_else(|e| fail_return(&e));
5909
5910 if json {
5911 println!(
5912 "{}",
5913 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5914 );
5915 } else {
5916 println!();
5917 println!(
5918 " {}",
5919 format!("VELA · TIER · {}", object_id)
5920 .to_uppercase()
5921 .dimmed()
5922 );
5923 println!(" {}", style::tick_row(60));
5924 println!(" object_type: {object_type}");
5925 println!(" new_tier: {}", parsed_tier.canonical());
5926 println!(" actor: {actor}");
5927 if let Some(ev) = &report.applied_event_id {
5928 println!(" event: {ev}");
5929 }
5930 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5931 }
5932}
5933
5934#[allow(clippy::too_many_arguments)]
5936fn cmd_trajectory_create(
5937 frontier: &Path,
5938 deposited_by: &str,
5939 reason: &str,
5940 targets: Vec<String>,
5941 notes: &str,
5942 json: bool,
5943) {
5944 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5945 .unwrap_or_else(|e| fail_return(&e));
5946
5947 if json {
5948 println!(
5949 "{}",
5950 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5951 );
5952 } else {
5953 println!();
5954 println!(
5955 " {}",
5956 format!("VELA · TRAJECTORY · {}", report.finding_id)
5957 .to_uppercase()
5958 .dimmed()
5959 );
5960 println!(" {}", style::tick_row(60));
5961 println!(" deposited_by: {deposited_by}");
5962 if let Some(ev) = &report.applied_event_id {
5963 println!(" event: {ev}");
5964 }
5965 println!(
5966 " {} trajectory opened in {}",
5967 style::ok("ok"),
5968 frontier.display()
5969 );
5970 }
5971}
5972
5973#[allow(clippy::too_many_arguments)]
5975fn cmd_trajectory_step(
5976 frontier: &Path,
5977 trajectory_id: &str,
5978 kind: &str,
5979 description: &str,
5980 actor: &str,
5981 reason: &str,
5982 references: Vec<String>,
5983 json: bool,
5984) {
5985 let parsed_kind = match kind {
5986 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5987 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5988 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5989 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5990 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5991 other => fail_return(&format!(
5992 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5993 )),
5994 };
5995 let report = state::append_trajectory_step(
5996 frontier,
5997 trajectory_id,
5998 parsed_kind,
5999 description,
6000 actor,
6001 references,
6002 reason,
6003 )
6004 .unwrap_or_else(|e| fail_return(&e));
6005
6006 if json {
6007 println!(
6008 "{}",
6009 serde_json::to_string_pretty(&report).expect("serialize step report")
6010 );
6011 } else {
6012 println!();
6013 println!(
6014 " {}",
6015 format!("VELA · STEP · {}", report.finding_id)
6016 .to_uppercase()
6017 .dimmed()
6018 );
6019 println!(" {}", style::tick_row(60));
6020 println!(" trajectory: {trajectory_id}");
6021 println!(" kind: {kind}");
6022 println!(" actor: {actor}");
6023 println!(
6024 " {} step appended in {}",
6025 style::ok("ok"),
6026 frontier.display()
6027 );
6028 }
6029}
6030
6031fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
6033 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6034 let filtered: Vec<&crate::bundle::Trajectory> = project
6035 .trajectories
6036 .iter()
6037 .filter(|t| {
6038 target
6039 .map(|tg| t.target_findings.iter().any(|f| f == tg))
6040 .unwrap_or(true)
6041 })
6042 .collect();
6043
6044 if json {
6045 println!(
6046 "{}",
6047 serde_json::to_string_pretty(&json!({
6048 "ok": true,
6049 "command": "trajectories",
6050 "frontier": frontier.display().to_string(),
6051 "count": filtered.len(),
6052 "trajectories": filtered,
6053 }))
6054 .expect("serialize trajectories")
6055 );
6056 return;
6057 }
6058
6059 if filtered.is_empty() {
6060 println!(" no trajectories in {}", frontier.display());
6061 return;
6062 }
6063
6064 println!();
6065 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
6066 println!(" {}", style::tick_row(60));
6067 for t in &filtered {
6068 let retracted = if t.retracted { " [retracted]" } else { "" };
6069 let review = t
6070 .review_state
6071 .as_ref()
6072 .map(|s| format!(" [{s:?}]"))
6073 .unwrap_or_default();
6074 println!(" {}{}{}", t.id, retracted, review);
6075 println!(
6076 " {} step(s){}",
6077 t.steps.len(),
6078 if t.target_findings.is_empty() {
6079 String::new()
6080 } else {
6081 format!(" · targets: {}", t.target_findings.join(", "))
6082 }
6083 );
6084 for step in &t.steps {
6085 let label = match step.kind {
6086 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
6087 crate::bundle::TrajectoryStepKind::Tried => "tried",
6088 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
6089 crate::bundle::TrajectoryStepKind::Observed => "observed",
6090 crate::bundle::TrajectoryStepKind::Refined => "refined",
6091 };
6092 let preview: String = step.description.chars().take(80).collect();
6093 println!(" [{label}] {preview}");
6094 }
6095 }
6096 println!();
6097}
6098
6099fn cmd_datasets(frontier: &Path, json: bool) {
6101 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6102 if json {
6103 println!(
6104 "{}",
6105 serde_json::to_string_pretty(&json!({
6106 "ok": true,
6107 "command": "datasets",
6108 "frontier": frontier.display().to_string(),
6109 "count": project.datasets.len(),
6110 "datasets": project.datasets,
6111 }))
6112 .expect("serialize datasets")
6113 );
6114 return;
6115 }
6116 println!();
6117 println!(
6118 " {}",
6119 format!("VELA · DATASETS · {}", frontier.display())
6120 .to_uppercase()
6121 .dimmed()
6122 );
6123 println!(" {}", style::tick_row(60));
6124 if project.datasets.is_empty() {
6125 println!(" (no datasets registered)");
6126 return;
6127 }
6128 for ds in &project.datasets {
6129 let v = ds
6130 .version
6131 .as_deref()
6132 .map(|s| format!("@{s}"))
6133 .unwrap_or_default();
6134 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
6135 if let Some(u) = &ds.url {
6136 println!(" url: {}", truncate(u, 80));
6137 }
6138 println!(" hash: {}", truncate(&ds.content_hash, 80));
6139 }
6140}
6141
6142#[allow(clippy::too_many_arguments)]
6144fn cmd_code_add(
6145 frontier: &Path,
6146 language: &str,
6147 repo_url: Option<&str>,
6148 commit: Option<&str>,
6149 path: &str,
6150 content_hash: &str,
6151 line_start: Option<u32>,
6152 line_end: Option<u32>,
6153 entry_point: Option<&str>,
6154 json: bool,
6155) {
6156 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6157
6158 let line_range = match (line_start, line_end) {
6159 (Some(a), Some(b)) => Some((a, b)),
6160 (Some(a), None) => Some((a, a)),
6161 _ => None,
6162 };
6163
6164 let artifact = crate::bundle::CodeArtifact::new(
6165 language.to_string(),
6166 repo_url.map(|s| s.to_string()),
6167 commit.map(|s| s.to_string()),
6168 path.to_string(),
6169 line_range,
6170 content_hash.to_string(),
6171 entry_point.map(|s| s.to_string()),
6172 );
6173
6174 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
6175 if json {
6176 println!(
6177 "{}",
6178 serde_json::to_string_pretty(&json!({
6179 "ok": false,
6180 "command": "code.add",
6181 "reason": "artifact_already_exists",
6182 "id": artifact.id,
6183 }))
6184 .expect("serialize")
6185 );
6186 } else {
6187 println!(
6188 "{} code artifact {} already exists in {}; skipping.",
6189 style::warn("code"),
6190 artifact.id,
6191 frontier.display()
6192 );
6193 }
6194 return;
6195 }
6196
6197 let new_id = artifact.id.clone();
6198 project.code_artifacts.push(artifact);
6199 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6200
6201 if json {
6202 println!(
6203 "{}",
6204 serde_json::to_string_pretty(&json!({
6205 "ok": true,
6206 "command": "code.add",
6207 "id": new_id,
6208 "language": language,
6209 "path": path,
6210 "frontier": frontier.display().to_string(),
6211 }))
6212 .expect("failed to serialize code.add result")
6213 );
6214 } else {
6215 println!();
6216 println!(
6217 " {}",
6218 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
6219 );
6220 println!(" {}", style::tick_row(60));
6221 println!(" language: {language}");
6222 if let Some(r) = repo_url {
6223 println!(" repo: {r}");
6224 }
6225 if let Some(c) = commit {
6226 println!(" commit: {c}");
6227 }
6228 println!(" path: {path}");
6229 if let Some((a, b)) = line_range {
6230 println!(" lines: {a}-{b}");
6231 }
6232 println!(" content_hash: {content_hash}");
6233 println!();
6234 println!(
6235 " {} code artifact recorded in {}",
6236 style::ok("ok"),
6237 frontier.display()
6238 );
6239 }
6240}
6241
6242fn cmd_code_artifacts(frontier: &Path, json: bool) {
6244 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6245 if json {
6246 println!(
6247 "{}",
6248 serde_json::to_string_pretty(&json!({
6249 "ok": true,
6250 "command": "code-artifacts",
6251 "frontier": frontier.display().to_string(),
6252 "count": project.code_artifacts.len(),
6253 "code_artifacts": project.code_artifacts,
6254 }))
6255 .expect("serialize code-artifacts")
6256 );
6257 return;
6258 }
6259 println!();
6260 println!(
6261 " {}",
6262 format!("VELA · CODE · {}", frontier.display())
6263 .to_uppercase()
6264 .dimmed()
6265 );
6266 println!(" {}", style::tick_row(60));
6267 if project.code_artifacts.is_empty() {
6268 println!(" (no code artifacts registered)");
6269 return;
6270 }
6271 for c in &project.code_artifacts {
6272 let lr = c
6273 .line_range
6274 .map(|(a, b)| format!(":{a}-{b}"))
6275 .unwrap_or_default();
6276 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
6277 if let Some(r) = &c.repo_url {
6278 println!(" repo: {}", truncate(r, 80));
6279 }
6280 if let Some(g) = &c.git_commit {
6281 println!(" commit: {g}");
6282 }
6283 }
6284}
6285
6286fn sha256_for_bytes(bytes: &[u8]) -> String {
6287 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
6288}
6289
6290fn sha256_hex_part(content_hash: &str) -> &str {
6291 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
6292}
6293
6294fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
6295 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
6296 return None;
6297 };
6298 let hex = sha256_hex_part(content_hash);
6299 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
6300 let path = root.join(&rel);
6301 if let Some(parent) = path.parent() {
6302 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
6303 fail(&format!(
6304 "Failed to create artifact blob directory {}: {e}",
6305 parent.display()
6306 ))
6307 });
6308 }
6309 if !path.is_file() {
6310 std::fs::write(&path, bytes)
6311 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
6312 }
6313 Some(rel)
6314}
6315
6316fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
6317 let mut out = BTreeMap::new();
6318 for pair in pairs {
6319 let Some((key, value)) = pair.split_once('=') else {
6320 fail(&format!("--metadata must be key=value, got {pair:?}"));
6321 };
6322 let key = key.trim();
6323 if key.is_empty() {
6324 fail("--metadata key must be non-empty");
6325 }
6326 out.insert(key.to_string(), Value::String(value.trim().to_string()));
6327 }
6328 out
6329}
6330
6331fn artifact_source_type(kind: &str) -> &'static str {
6332 match kind {
6333 "clinical_trial_record" | "protocol" => "clinical_trial",
6334 "dataset" => "data_release",
6335 "model_output" => "model_output",
6336 "registry_record" => "database_record",
6337 "lab_file" => "lab_notebook",
6338 _ => "database_record",
6339 }
6340}
6341
6342fn artifact_provenance(
6343 kind: &str,
6344 title: &str,
6345 url: Option<&str>,
6346 doi: Option<&str>,
6347 license: Option<&str>,
6348) -> crate::bundle::Provenance {
6349 crate::bundle::Provenance {
6350 source_type: artifact_source_type(kind).to_string(),
6351 doi: doi.map(str::to_string),
6352 pmid: None,
6353 pmc: None,
6354 openalex_id: None,
6355 url: url.map(str::to_string),
6356 title: title.to_string(),
6357 authors: Vec::new(),
6358 year: None,
6359 journal: None,
6360 license: license.map(str::to_string),
6361 publisher: None,
6362 funders: Vec::new(),
6363 extraction: crate::bundle::Extraction {
6364 method: "artifact_deposit".to_string(),
6365 model: None,
6366 model_version: None,
6367 extracted_at: chrono::Utc::now().to_rfc3339(),
6368 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6369 },
6370 review: None,
6371 citation_count: None,
6372 }
6373}
6374
6375#[allow(clippy::too_many_arguments)]
6376fn cmd_artifact_add(
6377 frontier: &Path,
6378 kind: &str,
6379 name: &str,
6380 file: Option<&Path>,
6381 url: Option<&str>,
6382 content_hash: Option<&str>,
6383 media_type: Option<&str>,
6384 license: Option<&str>,
6385 source_title: Option<&str>,
6386 source_url: Option<&str>,
6387 doi: Option<&str>,
6388 target: Vec<String>,
6389 metadata: Vec<String>,
6390 access_tier: &str,
6391 deposited_by: &str,
6392 reason: &str,
6393 json_out: bool,
6394) {
6395 let tier =
6396 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
6397 let mut size_bytes = None;
6398 let mut storage_mode = "pointer".to_string();
6399 let mut locator = url.map(str::to_string);
6400 let mut computed_hash = content_hash.map(str::to_string);
6401
6402 if let Some(path) = file {
6403 let bytes = std::fs::read(path)
6404 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
6405 let actual_hash = sha256_for_bytes(&bytes);
6406 if let Some(expected) = content_hash {
6407 let expected_hex = sha256_hex_part(expected);
6408 let actual_hex = sha256_hex_part(&actual_hash);
6409 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
6410 fail(&format!(
6411 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
6412 ));
6413 }
6414 }
6415 size_bytes = Some(bytes.len() as u64);
6416 computed_hash = Some(actual_hash.clone());
6417 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
6418 storage_mode = "local_blob".to_string();
6419 locator = Some(rel);
6420 } else {
6421 storage_mode = "local_file".to_string();
6422 locator = Some(path.display().to_string());
6423 }
6424 }
6425
6426 let Some(content_hash) = computed_hash else {
6427 fail("Provide --content-hash unless --file is present.");
6428 };
6429 let content_hash_for_print = content_hash.clone();
6430 if file.is_none() && url.is_some() {
6431 storage_mode = "remote".to_string();
6432 }
6433
6434 let source_url_effective = source_url.or(url);
6435 let source_title = source_title.unwrap_or(name);
6436 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
6437 let metadata = parse_metadata_pairs(metadata);
6438 let artifact = crate::bundle::Artifact::new(
6439 kind.to_string(),
6440 name.to_string(),
6441 content_hash,
6442 size_bytes,
6443 media_type.map(str::to_string),
6444 storage_mode,
6445 locator,
6446 source_url_effective.map(str::to_string),
6447 license.map(str::to_string),
6448 target,
6449 provenance,
6450 metadata,
6451 tier,
6452 )
6453 .unwrap_or_else(|e| fail_return(&e));
6454
6455 let artifact_id = artifact.id.clone();
6456 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6457 .unwrap_or_else(|e| fail_return(&e));
6458
6459 if json_out {
6460 println!(
6461 "{}",
6462 serde_json::to_string_pretty(&json!({
6463 "ok": true,
6464 "command": "artifact.add",
6465 "id": artifact_id,
6466 "frontier": frontier.display().to_string(),
6467 "event": report.applied_event_id,
6468 }))
6469 .expect("serialize artifact.add")
6470 );
6471 } else {
6472 println!();
6473 println!(
6474 " {}",
6475 format!("VELA · ARTIFACT · {}", artifact_id)
6476 .to_uppercase()
6477 .dimmed()
6478 );
6479 println!(" {}", style::tick_row(60));
6480 println!(" kind: {kind}");
6481 println!(" name: {name}");
6482 println!(" hash: {content_hash_for_print}");
6483 println!(
6484 " {} artifact recorded in {}",
6485 style::ok("ok"),
6486 frontier.display()
6487 );
6488 }
6489}
6490
6491fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
6492 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6493 let filtered: Vec<&crate::bundle::Artifact> = project
6494 .artifacts
6495 .iter()
6496 .filter(|artifact| {
6497 target
6498 .map(|t| artifact.target_findings.iter().any(|f| f == t))
6499 .unwrap_or(true)
6500 })
6501 .collect();
6502
6503 if json_out {
6504 println!(
6505 "{}",
6506 serde_json::to_string_pretty(&json!({
6507 "ok": true,
6508 "command": "artifacts",
6509 "frontier": frontier.display().to_string(),
6510 "count": filtered.len(),
6511 "artifacts": filtered,
6512 }))
6513 .expect("serialize artifacts")
6514 );
6515 return;
6516 }
6517
6518 println!();
6519 println!(
6520 " {}",
6521 format!("VELA · ARTIFACTS · {}", frontier.display())
6522 .to_uppercase()
6523 .dimmed()
6524 );
6525 println!(" {}", style::tick_row(60));
6526 if filtered.is_empty() {
6527 println!(" (no artifacts registered)");
6528 return;
6529 }
6530 for artifact in filtered {
6531 println!(
6532 " · {} {} · {}",
6533 artifact.id.dimmed(),
6534 artifact.kind,
6535 artifact.name
6536 );
6537 if let Some(locator) = &artifact.locator {
6538 println!(" locator: {}", truncate(locator, 88));
6539 }
6540 if !artifact.target_findings.is_empty() {
6541 println!(" targets: {}", artifact.target_findings.join(", "));
6542 }
6543 }
6544}
6545
6546fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
6547 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6548 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
6549 if json_out {
6550 println!(
6551 "{}",
6552 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
6553 );
6554 if !audit.ok {
6555 std::process::exit(1);
6556 }
6557 return;
6558 }
6559
6560 println!();
6561 println!(
6562 " {}",
6563 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
6564 .to_uppercase()
6565 .dimmed()
6566 );
6567 println!(" {}", style::tick_row(60));
6568 println!(" artifacts: {}", audit.artifact_count);
6569 println!(" checked local blobs: {}", audit.checked_local_blobs);
6570 println!(" local blob bytes: {}", audit.local_blob_bytes);
6571 if !audit.by_kind.is_empty() {
6572 let kinds = audit
6573 .by_kind
6574 .iter()
6575 .map(|(kind, count)| format!("{kind}:{count}"))
6576 .collect::<Vec<_>>()
6577 .join(", ");
6578 println!(" kinds: {kinds}");
6579 }
6580 if audit.ok {
6581 println!(" {} artifact audit passed.", style::ok("ok"));
6582 return;
6583 }
6584 for issue in &audit.issues {
6585 println!(
6586 " {} {} {}: {}",
6587 style::lost("invalid"),
6588 issue.id,
6589 issue.field,
6590 issue.message
6591 );
6592 }
6593 std::process::exit(1);
6594}
6595
6596fn cmd_decision_brief(frontier: &Path, json_out: bool) {
6597 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6598 let report = decision::load_decision_brief(frontier, &project);
6599 if json_out {
6600 println!(
6601 "{}",
6602 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
6603 );
6604 if !report.ok {
6605 std::process::exit(1);
6606 }
6607 return;
6608 }
6609 println!();
6610 println!(
6611 " {}",
6612 format!("VELA · DECISION BRIEF · {}", project.project.name)
6613 .to_uppercase()
6614 .dimmed()
6615 );
6616 println!(" {}", style::tick_row(60));
6617 if !report.ok {
6618 print_projection_issues(&report.issues, report.error.as_deref());
6619 std::process::exit(1);
6620 }
6621 let brief = report
6622 .projection
6623 .as_ref()
6624 .expect("ok decision report carries projection");
6625 for question in &brief.questions {
6626 println!(" · {} · {}", question.id.dimmed(), question.title);
6627 println!(" answer: {}", wrap_line(&question.short_answer, 82));
6628 println!(" caveat: {}", wrap_line(&question.caveat, 82));
6629 println!(" support: {}", question.supporting_findings.join(", "));
6630 if !question.tension_findings.is_empty() {
6631 println!(" tensions: {}", question.tension_findings.join(", "));
6632 }
6633 if !question.gap_findings.is_empty() {
6634 println!(" gaps: {}", question.gap_findings.join(", "));
6635 }
6636 if !question.artifact_ids.is_empty() {
6637 println!(" artifacts: {}", question.artifact_ids.join(", "));
6638 }
6639 println!(
6640 " would change: {}",
6641 wrap_line(&question.what_would_change_this_answer, 82)
6642 );
6643 }
6644}
6645
6646fn cmd_trial_summary(frontier: &Path, json_out: bool) {
6647 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6648 let report = decision::load_trial_outcomes(frontier, &project);
6649 if json_out {
6650 println!(
6651 "{}",
6652 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
6653 );
6654 if !report.ok {
6655 std::process::exit(1);
6656 }
6657 return;
6658 }
6659 println!();
6660 println!(
6661 " {}",
6662 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
6663 .to_uppercase()
6664 .dimmed()
6665 );
6666 println!(" {}", style::tick_row(60));
6667 if !report.ok {
6668 print_projection_issues(&report.issues, report.error.as_deref());
6669 std::process::exit(1);
6670 }
6671 let outcomes = report
6672 .projection
6673 .as_ref()
6674 .expect("ok trial report carries projection");
6675 for row in &outcomes.rows {
6676 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
6677 println!(" population: {}", wrap_line(&row.population, 82));
6678 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
6679 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
6680 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
6681 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
6682 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
6683 if !row.finding_ids.is_empty() {
6684 println!(" findings: {}", row.finding_ids.join(", "));
6685 }
6686 if !row.artifact_ids.is_empty() {
6687 println!(" artifacts: {}", row.artifact_ids.join(", "));
6688 }
6689 }
6690}
6691
6692fn cmd_source_verification(frontier: &Path, json_out: bool) {
6693 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6694 let report = decision::load_source_verification(frontier, &project);
6695 if json_out {
6696 println!(
6697 "{}",
6698 serde_json::to_string_pretty(&report).expect("serialize source verification report")
6699 );
6700 if !report.ok {
6701 std::process::exit(1);
6702 }
6703 return;
6704 }
6705 println!();
6706 println!(
6707 " {}",
6708 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
6709 .to_uppercase()
6710 .dimmed()
6711 );
6712 println!(" {}", style::tick_row(60));
6713 if !report.ok {
6714 print_projection_issues(&report.issues, report.error.as_deref());
6715 std::process::exit(1);
6716 }
6717 let verification = report
6718 .projection
6719 .as_ref()
6720 .expect("ok source verification report carries projection");
6721 println!(" verified_at: {}", verification.verified_at);
6722 for source in &verification.sources {
6723 println!(" · {} · {}", source.id.dimmed(), source.title);
6724 println!(" agency: {}", source.agency);
6725 println!(" url: {}", truncate(&source.url, 88));
6726 println!(" status: {}", wrap_line(&source.current_status, 82));
6727 }
6728}
6729
6730fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
6731 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6732 let report = decision::load_source_ingest_plan(frontier, &project);
6733 if json_out {
6734 println!(
6735 "{}",
6736 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
6737 );
6738 if !report.ok {
6739 std::process::exit(1);
6740 }
6741 return;
6742 }
6743 println!();
6744 println!(
6745 " {}",
6746 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6747 .to_uppercase()
6748 .dimmed()
6749 );
6750 println!(" {}", style::tick_row(60));
6751 if !report.ok {
6752 print_projection_issues(&report.issues, report.error.as_deref());
6753 std::process::exit(1);
6754 }
6755 let plan = report
6756 .projection
6757 .as_ref()
6758 .expect("ok source ingest plan report carries projection");
6759 println!(" verified_at: {}", plan.verified_at);
6760 println!(" entries: {}", plan.entries.len());
6761 for entry in &plan.entries {
6762 println!(
6763 " · {} · {} · {} · {}",
6764 entry.id.dimmed(),
6765 entry.category,
6766 entry.priority,
6767 entry.ingest_status
6768 );
6769 println!(" name: {}", wrap_line(&entry.name, 82));
6770 println!(" locator: {}", truncate(&entry.locator, 88));
6771 println!(" use: {}", wrap_line(&entry.target_use, 82));
6772 if let Some(id) = &entry.current_frontier_artifact_id {
6773 println!(" artifact: {id}");
6774 }
6775 if !entry.target_findings.is_empty() {
6776 println!(" findings: {}", entry.target_findings.join(", "));
6777 }
6778 }
6779}
6780
6781fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6782 if let Some(error) = error {
6783 println!(" {} {error}", style::lost("unavailable"));
6784 }
6785 for issue in issues {
6786 println!(
6787 " {} {}: {}",
6788 style::lost("invalid"),
6789 issue.path,
6790 issue.message
6791 );
6792 }
6793}
6794
6795fn wrap_line(text: &str, max_chars: usize) -> String {
6796 if text.chars().count() <= max_chars {
6797 return text.to_string();
6798 }
6799 let mut out = String::new();
6800 let mut line_len = 0usize;
6801 for word in text.split_whitespace() {
6802 let word_len = word.chars().count();
6803 if line_len > 0 && line_len + 1 + word_len > max_chars {
6804 out.push('\n');
6805 out.push_str(" ");
6806 out.push_str(word);
6807 line_len = word_len;
6808 } else {
6809 if line_len > 0 {
6810 out.push(' ');
6811 line_len += 1;
6812 }
6813 out.push_str(word);
6814 line_len += word_len;
6815 }
6816 }
6817 out
6818}
6819
6820fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6821 study.pointer(pointer).and_then(Value::as_str)
6822}
6823
6824fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6825 study
6826 .pointer(pointer)
6827 .and_then(Value::as_array)
6828 .map(|items| {
6829 items
6830 .iter()
6831 .filter_map(Value::as_str)
6832 .map(str::to_string)
6833 .collect()
6834 })
6835 .unwrap_or_default()
6836}
6837
6838fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6839 study
6840 .pointer(pointer)
6841 .and_then(Value::as_array)
6842 .map(|items| {
6843 items
6844 .iter()
6845 .filter_map(|item| item.get(field).and_then(Value::as_str))
6846 .map(str::to_string)
6847 .collect()
6848 })
6849 .unwrap_or_default()
6850}
6851
6852fn insert_string_vec_metadata(
6853 metadata: &mut BTreeMap<String, Value>,
6854 key: &str,
6855 values: Vec<String>,
6856) {
6857 if values.is_empty() {
6858 return;
6859 }
6860 metadata.insert(
6861 key.to_string(),
6862 Value::Array(values.into_iter().map(Value::String).collect()),
6863 );
6864}
6865
6866async fn cmd_clinical_trial_import(
6867 frontier: &Path,
6868 nct_id: &str,
6869 input_json: Option<&Path>,
6870 target: Vec<String>,
6871 deposited_by: &str,
6872 reason: &str,
6873 license: &str,
6874 json_out: bool,
6875) {
6876 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6877 let raw = if let Some(path) = input_json {
6878 std::fs::read_to_string(path)
6879 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6880 } else {
6881 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6882 fail(&format!(
6883 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6884 ))
6885 });
6886 let response = response.error_for_status().unwrap_or_else(|e| {
6887 fail(&format!(
6888 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6889 ))
6890 });
6891 response.text().await.unwrap_or_else(|e| {
6892 fail(&format!(
6893 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6894 ))
6895 })
6896 };
6897 let study: Value = serde_json::from_str(&raw)
6898 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6899 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6900 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6901 let content_hash = sha256_for_bytes(&canonical_bytes);
6902 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6903 .unwrap_or_else(|| api_url.clone());
6904 let storage_mode = if locator.starts_with(".vela/") {
6905 "local_blob"
6906 } else {
6907 "remote"
6908 };
6909
6910 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6911 .unwrap_or(nct_id)
6912 .to_string();
6913 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6914 .or_else(|| {
6915 clinical_str(
6916 &study,
6917 "/protocolSection/identificationModule/officialTitle",
6918 )
6919 })
6920 .unwrap_or(nct_id);
6921 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6922 let mut metadata = BTreeMap::new();
6923 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6924 metadata.insert(
6925 "source_api".to_string(),
6926 Value::String("clinicaltrials.gov-v2".to_string()),
6927 );
6928 metadata.insert(
6929 "retrieved_at".to_string(),
6930 Value::String(chrono::Utc::now().to_rfc3339()),
6931 );
6932 for (key, pointer) in [
6933 (
6934 "overall_status",
6935 "/protocolSection/statusModule/overallStatus",
6936 ),
6937 (
6938 "start_date",
6939 "/protocolSection/statusModule/startDateStruct/date",
6940 ),
6941 (
6942 "completion_date",
6943 "/protocolSection/statusModule/completionDateStruct/date",
6944 ),
6945 ] {
6946 if let Some(value) = clinical_str(&study, pointer) {
6947 metadata.insert(key.to_string(), Value::String(value.to_string()));
6948 }
6949 }
6950 insert_string_vec_metadata(
6951 &mut metadata,
6952 "phases",
6953 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6954 );
6955 insert_string_vec_metadata(
6956 &mut metadata,
6957 "conditions",
6958 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6959 );
6960 insert_string_vec_metadata(
6961 &mut metadata,
6962 "interventions",
6963 clinical_named_array(
6964 &study,
6965 "/protocolSection/armsInterventionsModule/interventions",
6966 "name",
6967 ),
6968 );
6969 insert_string_vec_metadata(
6970 &mut metadata,
6971 "primary_outcomes",
6972 clinical_named_array(
6973 &study,
6974 "/protocolSection/outcomesModule/primaryOutcomes",
6975 "measure",
6976 ),
6977 );
6978 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6979 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6980 }
6981
6982 let provenance = artifact_provenance(
6983 "clinical_trial_record",
6984 title,
6985 Some(&public_url),
6986 None,
6987 Some(license),
6988 );
6989 let artifact = crate::bundle::Artifact::new(
6990 "clinical_trial_record",
6991 title.to_string(),
6992 content_hash,
6993 Some(canonical_bytes.len() as u64),
6994 Some("application/json".to_string()),
6995 storage_mode.to_string(),
6996 Some(locator),
6997 Some(public_url.clone()),
6998 Some(license.to_string()),
6999 target,
7000 provenance,
7001 metadata,
7002 crate::access_tier::AccessTier::Public,
7003 )
7004 .unwrap_or_else(|e| fail_return(&e));
7005 let artifact_id = artifact.id.clone();
7006 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
7007 .unwrap_or_else(|e| fail_return(&e));
7008
7009 if json_out {
7010 println!(
7011 "{}",
7012 serde_json::to_string_pretty(&json!({
7013 "ok": true,
7014 "command": "clinical-trial-import",
7015 "nct_id": parsed_nct,
7016 "id": artifact_id,
7017 "frontier": frontier.display().to_string(),
7018 "event": report.applied_event_id,
7019 "source_url": public_url,
7020 }))
7021 .expect("serialize clinical-trial-import")
7022 );
7023 } else {
7024 println!();
7025 println!(
7026 " {}",
7027 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
7028 .to_uppercase()
7029 .dimmed()
7030 );
7031 println!(" {}", style::tick_row(60));
7032 println!(" nct_id: {parsed_nct}");
7033 println!(" title: {}", truncate(title, 96));
7034 println!(" source: {public_url}");
7035 println!(
7036 " {} trial record imported into {}",
7037 style::ok("ok"),
7038 frontier.display()
7039 );
7040 }
7041}
7042
7043#[allow(clippy::too_many_arguments)]
7050fn cmd_replicate(
7051 frontier: &Path,
7052 target: &str,
7053 outcome: &str,
7054 attempted_by: &str,
7055 conditions_text: &str,
7056 source_title: &str,
7057 doi: Option<&str>,
7058 pmid: Option<&str>,
7059 sample_size: Option<&str>,
7060 note: &str,
7061 previous_attempt: Option<&str>,
7062 no_cascade: bool,
7063 json: bool,
7064) {
7065 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
7066 fail(&format!(
7067 "invalid outcome '{outcome}'; valid: {:?}",
7068 crate::bundle::VALID_REPLICATION_OUTCOMES
7069 ));
7070 }
7071 if !target.starts_with("vf_") {
7072 fail(&format!("target '{target}' is not a vf_ finding id"));
7073 }
7074
7075 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7076
7077 if !project.findings.iter().any(|f| f.id == target) {
7078 fail(&format!(
7079 "target finding '{target}' not present in frontier '{}'",
7080 frontier.display()
7081 ));
7082 }
7083
7084 let lower = conditions_text.to_lowercase();
7089 let conditions = crate::bundle::Conditions {
7090 text: conditions_text.to_string(),
7091 species_verified: Vec::new(),
7092 species_unverified: Vec::new(),
7093 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
7094 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
7095 human_data: lower.contains("human")
7096 || lower.contains("clinical")
7097 || lower.contains("patient"),
7098 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
7099 concentration_range: None,
7100 duration: None,
7101 age_group: None,
7102 cell_type: None,
7103 };
7104
7105 let evidence = crate::bundle::Evidence {
7106 evidence_type: "experimental".to_string(),
7107 model_system: String::new(),
7108 species: None,
7109 method: "replication_attempt".to_string(),
7110 sample_size: sample_size.map(|s| s.to_string()),
7111 effect_size: None,
7112 p_value: None,
7113 replicated: outcome == "replicated",
7114 replication_count: None,
7115 evidence_spans: Vec::new(),
7116 };
7117
7118 let provenance = crate::bundle::Provenance {
7119 source_type: "published_paper".to_string(),
7120 doi: doi.map(|s| s.to_string()),
7121 pmid: pmid.map(|s| s.to_string()),
7122 pmc: None,
7123 openalex_id: None,
7124 url: None,
7125 title: source_title.to_string(),
7126 authors: Vec::new(),
7127 year: None,
7128 journal: None,
7129 license: None,
7130 publisher: None,
7131 funders: Vec::new(),
7132 extraction: crate::bundle::Extraction {
7133 method: "manual_curation".to_string(),
7134 model: None,
7135 model_version: None,
7136 extracted_at: chrono::Utc::now().to_rfc3339(),
7137 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
7138 },
7139 review: None,
7140 citation_count: None,
7141 };
7142
7143 let mut rep = crate::bundle::Replication::new(
7144 target.to_string(),
7145 attempted_by.to_string(),
7146 outcome.to_string(),
7147 evidence,
7148 conditions,
7149 provenance,
7150 note.to_string(),
7151 );
7152 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
7153
7154 if project.replications.iter().any(|r| r.id == rep.id) {
7157 if json {
7158 println!(
7159 "{}",
7160 serde_json::to_string_pretty(&json!({
7161 "ok": false,
7162 "command": "replicate",
7163 "reason": "replication_already_exists",
7164 "id": rep.id,
7165 }))
7166 .expect("serialize")
7167 );
7168 } else {
7169 println!(
7170 "{} replication {} already exists in {}; skipping.",
7171 style::warn("replicate"),
7172 rep.id,
7173 frontier.display()
7174 );
7175 }
7176 return;
7177 }
7178
7179 let new_id = rep.id.clone();
7180 project.replications.push(rep);
7181
7182 let cascade_result = if no_cascade {
7189 None
7190 } else {
7191 let result = propagate::propagate_correction(
7192 &mut project,
7193 target,
7194 propagate::PropagationAction::ReplicationOutcome {
7195 outcome: outcome.to_string(),
7196 vrep_id: new_id.clone(),
7197 },
7198 );
7199 project.review_events.extend(result.events.clone());
7202 project::recompute_stats(&mut project);
7203 Some(result)
7204 };
7205
7206 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
7207
7208 if json {
7209 let cascade_json = cascade_result.as_ref().map(|r| {
7210 json!({
7211 "affected": r.affected,
7212 "events": r.events.len(),
7213 })
7214 });
7215 println!(
7216 "{}",
7217 serde_json::to_string_pretty(&json!({
7218 "ok": true,
7219 "command": "replicate",
7220 "id": new_id,
7221 "target": target,
7222 "outcome": outcome,
7223 "attempted_by": attempted_by,
7224 "cascade": cascade_json,
7225 "frontier": frontier.display().to_string(),
7226 }))
7227 .expect("failed to serialize replicate result")
7228 );
7229 } else {
7230 println!();
7231 println!(
7232 " {}",
7233 format!("VELA · REPLICATE · {}", new_id)
7234 .to_uppercase()
7235 .dimmed()
7236 );
7237 println!(" {}", style::tick_row(60));
7238 println!(" target: {target}");
7239 println!(" outcome: {outcome}");
7240 println!(" attempted by: {attempted_by}");
7241 println!(" conditions: {conditions_text}");
7242 println!(" source: {source_title}");
7243 if let Some(d) = doi {
7244 println!(" doi: {d}");
7245 }
7246 println!();
7247 println!(
7248 " {} replication recorded in {}",
7249 style::ok("ok"),
7250 frontier.display()
7251 );
7252 if let Some(result) = cascade_result {
7253 println!(
7254 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
7255 style::ok("ok"),
7256 result.affected,
7257 result.events.len()
7258 );
7259 } else {
7260 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
7261 }
7262 }
7263}
7264
7265fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
7267 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7268 let filtered: Vec<&crate::bundle::Replication> = project
7269 .replications
7270 .iter()
7271 .filter(|r| target.is_none_or(|t| r.target_finding == t))
7272 .collect();
7273
7274 if json {
7275 let payload = json!({
7276 "ok": true,
7277 "command": "replications",
7278 "frontier": frontier.display().to_string(),
7279 "filter_target": target,
7280 "count": filtered.len(),
7281 "replications": filtered,
7282 });
7283 println!(
7284 "{}",
7285 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
7286 );
7287 return;
7288 }
7289
7290 println!();
7291 let header = match target {
7292 Some(t) => format!("VELA · REPLICATIONS · {t}"),
7293 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
7294 };
7295 println!(" {}", header.to_uppercase().dimmed());
7296 println!(" {}", style::tick_row(60));
7297 if filtered.is_empty() {
7298 println!(" (no replications recorded)");
7299 return;
7300 }
7301 for rep in &filtered {
7302 let outcome_chip = match rep.outcome.as_str() {
7303 "replicated" => style::ok(&rep.outcome),
7304 "failed" => style::lost(&rep.outcome),
7305 "partial" => style::warn(&rep.outcome),
7306 _ => rep.outcome.clone().normal().to_string(),
7307 };
7308 println!(
7309 " · {} {} by {}",
7310 rep.id.dimmed(),
7311 outcome_chip,
7312 rep.attempted_by
7313 );
7314 println!(" target: {}", rep.target_finding);
7315 if !rep.conditions.text.is_empty() {
7316 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
7317 }
7318 if !rep.provenance.title.is_empty() {
7319 println!(" source: {}", truncate(&rep.provenance.title, 80));
7320 }
7321 }
7322}
7323
7324async fn cmd_ingest(
7337 path: &str,
7338 frontier: &Path,
7339 backend: Option<&str>,
7340 actor: Option<&str>,
7341 dry_run: bool,
7342 json: bool,
7343) {
7344 let lowered = path.trim().to_lowercase();
7346 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
7347 cmd_source_fetch(path.trim(), None, None, false, json).await;
7348 if !json {
7354 eprintln!();
7355 eprintln!(
7356 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7357 );
7358 eprintln!(
7359 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7360 frontier.display()
7361 );
7362 }
7363 return;
7364 }
7365
7366 let p = std::path::PathBuf::from(path);
7367 if !p.exists() {
7368 fail(&format!(
7369 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7370 ));
7371 }
7372
7373 let ext = p
7375 .extension()
7376 .and_then(|s| s.to_str())
7377 .map(|s| s.to_ascii_lowercase());
7378
7379 if p.is_file() {
7380 match ext.as_deref() {
7381 Some("pdf") => {
7382 cmd_scout(&p, frontier, backend, dry_run, json).await;
7386 }
7387 Some("md") | Some("markdown") => {
7388 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7391 }
7392 Some("csv") | Some("tsv") => {
7393 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7396 }
7397 Some("json") => {
7398 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7400 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
7401 }
7402 other => {
7403 fail(&format!(
7404 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
7405 other.unwrap_or("(none)")
7406 ));
7407 }
7408 }
7409 return;
7410 }
7411
7412 if p.is_dir() {
7413 let mut pdf_count = 0usize;
7420 let mut md_count = 0usize;
7421 let mut data_count = 0usize;
7422 let mut json_count = 0usize;
7423 let mut unhandled_exts: std::collections::BTreeSet<String> =
7424 std::collections::BTreeSet::new();
7425 if let Ok(entries) = std::fs::read_dir(&p) {
7426 for entry in entries.flatten() {
7427 let path = entry.path();
7428 if !path.is_file() {
7429 continue;
7430 }
7431 if let Some(name) = entry.file_name().to_str()
7432 && let Some(dot) = name.rfind('.')
7433 {
7434 let ext = name[dot + 1..].to_ascii_lowercase();
7435 match ext.as_str() {
7436 "pdf" => pdf_count += 1,
7437 "md" | "markdown" => md_count += 1,
7438 "csv" | "tsv" => data_count += 1,
7439 "json" => json_count += 1,
7440 other => {
7441 if !name.starts_with('.') {
7444 unhandled_exts.insert(other.to_string());
7445 }
7446 }
7447 }
7448 }
7449 }
7450 }
7451
7452 let dispatched_types = (pdf_count > 0) as usize
7453 + (md_count > 0) as usize
7454 + (data_count > 0) as usize
7455 + (json_count > 0) as usize;
7456
7457 if dispatched_types == 0 {
7458 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
7461 return;
7462 }
7463
7464 if dispatched_types > 1 {
7465 eprintln!(
7466 " vela ingest · folder has multiple handlable types; running each in sequence"
7467 );
7468 eprintln!(
7469 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
7470 );
7471 }
7472
7473 if pdf_count > 0 {
7480 cmd_scout(&p, frontier, backend, dry_run, json).await;
7481 }
7482 if md_count > 0 {
7483 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7484 }
7485 if data_count > 0 {
7486 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7487 }
7488 if json_count > 0 {
7489 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7492 if let Ok(entries) = std::fs::read_dir(&p) {
7493 for entry in entries.flatten() {
7494 let path = entry.path();
7495 if path.is_file()
7496 && path
7497 .extension()
7498 .and_then(|s| s.to_str())
7499 .map(|s| s.eq_ignore_ascii_case("json"))
7500 .unwrap_or(false)
7501 {
7502 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
7503 }
7504 }
7505 }
7506 }
7507
7508 if !unhandled_exts.is_empty() {
7509 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
7510 eprintln!(
7511 " vela ingest · skipped {} file extension(s) with no handler: {}",
7512 kinds.len(),
7513 kinds.join(", ")
7514 );
7515 }
7516 return;
7517 }
7518
7519 fail(&format!(
7520 "ingest: path '{path}' is neither a file nor a directory"
7521 ));
7522}
7523
7524#[allow(clippy::too_many_arguments)]
7525async fn cmd_compile_data(
7527 root: &Path,
7528 frontier: &Path,
7529 backend: Option<&str>,
7530 sample_rows: Option<usize>,
7531 dry_run: bool,
7532 json_out: bool,
7533) {
7534 match DATASETS_HANDLER.get() {
7535 Some(handler) => {
7536 handler(
7537 root.to_path_buf(),
7538 frontier.to_path_buf(),
7539 backend.map(String::from),
7540 sample_rows,
7541 dry_run,
7542 json_out,
7543 )
7544 .await;
7545 }
7546 None => {
7547 eprintln!(
7548 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
7549 style::err_prefix()
7550 );
7551 std::process::exit(1);
7552 }
7553 }
7554}
7555
7556async fn cmd_review_pending(
7559 frontier: &Path,
7560 backend: Option<&str>,
7561 max_proposals: Option<usize>,
7562 batch_size: usize,
7563 dry_run: bool,
7564 json_out: bool,
7565) {
7566 match REVIEWER_HANDLER.get() {
7567 Some(handler) => {
7568 handler(
7569 frontier.to_path_buf(),
7570 backend.map(String::from),
7571 max_proposals,
7572 batch_size,
7573 dry_run,
7574 json_out,
7575 )
7576 .await;
7577 }
7578 None => {
7579 eprintln!(
7580 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
7581 style::err_prefix()
7582 );
7583 std::process::exit(1);
7584 }
7585 }
7586}
7587
7588async fn cmd_find_tensions(
7591 frontier: &Path,
7592 backend: Option<&str>,
7593 max_findings: Option<usize>,
7594 dry_run: bool,
7595 json_out: bool,
7596) {
7597 match TENSIONS_HANDLER.get() {
7598 Some(handler) => {
7599 handler(
7600 frontier.to_path_buf(),
7601 backend.map(String::from),
7602 max_findings,
7603 dry_run,
7604 json_out,
7605 )
7606 .await;
7607 }
7608 None => {
7609 eprintln!(
7610 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
7611 style::err_prefix()
7612 );
7613 std::process::exit(1);
7614 }
7615 }
7616}
7617
7618async fn cmd_plan_experiments(
7621 frontier: &Path,
7622 backend: Option<&str>,
7623 max_findings: Option<usize>,
7624 dry_run: bool,
7625 json_out: bool,
7626) {
7627 match EXPERIMENTS_HANDLER.get() {
7628 Some(handler) => {
7629 handler(
7630 frontier.to_path_buf(),
7631 backend.map(String::from),
7632 max_findings,
7633 dry_run,
7634 json_out,
7635 )
7636 .await;
7637 }
7638 None => {
7639 eprintln!(
7640 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
7641 style::err_prefix()
7642 );
7643 std::process::exit(1);
7644 }
7645 }
7646}
7647
7648async fn cmd_compile_code(
7651 root: &Path,
7652 frontier: &Path,
7653 backend: Option<&str>,
7654 max_files: Option<usize>,
7655 dry_run: bool,
7656 json_out: bool,
7657) {
7658 match CODE_HANDLER.get() {
7659 Some(handler) => {
7660 handler(
7661 root.to_path_buf(),
7662 frontier.to_path_buf(),
7663 backend.map(String::from),
7664 max_files,
7665 dry_run,
7666 json_out,
7667 )
7668 .await;
7669 }
7670 None => {
7671 eprintln!(
7672 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
7673 style::err_prefix()
7674 );
7675 std::process::exit(1);
7676 }
7677 }
7678}
7679
7680async fn cmd_compile_notes(
7685 vault: &Path,
7686 frontier: &Path,
7687 backend: Option<&str>,
7688 max_files: Option<usize>,
7689 max_items_per_category: Option<usize>,
7690 dry_run: bool,
7691 json_out: bool,
7692) {
7693 match NOTES_HANDLER.get() {
7694 Some(handler) => {
7695 handler(
7696 vault.to_path_buf(),
7697 frontier.to_path_buf(),
7698 backend.map(String::from),
7699 max_files,
7700 max_items_per_category,
7701 dry_run,
7702 json_out,
7703 )
7704 .await;
7705 }
7706 None => {
7707 eprintln!(
7708 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
7709 style::err_prefix()
7710 );
7711 std::process::exit(1);
7712 }
7713 }
7714}
7715
7716async fn cmd_scout(
7723 folder: &Path,
7724 frontier: &Path,
7725 backend: Option<&str>,
7726 dry_run: bool,
7727 json_out: bool,
7728) {
7729 match SCOUT_HANDLER.get() {
7730 Some(handler) => {
7731 handler(
7732 folder.to_path_buf(),
7733 frontier.to_path_buf(),
7734 backend.map(String::from),
7735 dry_run,
7736 json_out,
7737 )
7738 .await;
7739 }
7740 None => {
7741 eprintln!(
7742 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
7743 style::err_prefix()
7744 );
7745 std::process::exit(1);
7746 }
7747 }
7748}
7749
7750#[allow(clippy::too_many_arguments)]
7751pub fn scan_for_sensitive_paths(root: &Path) -> Vec<PathBuf> {
7760 let mut hits: Vec<PathBuf> = Vec::new();
7761 let skip_dirs: &[&str] = &[".git", "target", "node_modules", "dist", "build"];
7762 let bad_exts: &[&str] = &["key", "pem", "p12", "pfx"];
7763 let bad_substrings: &[&str] = &["private", "secret", "credential"];
7764 let mut stack: Vec<PathBuf> = vec![root.to_path_buf()];
7765 while let Some(dir) = stack.pop() {
7766 let Ok(entries) = std::fs::read_dir(&dir) else {
7767 continue;
7768 };
7769 for entry in entries.flatten() {
7770 let path = entry.path();
7771 let name_os = path.file_name();
7772 let Some(name) = name_os.and_then(|n| n.to_str()) else {
7773 continue;
7774 };
7775 let lower = name.to_lowercase();
7776 if path.is_dir() {
7777 if skip_dirs.contains(&name) {
7778 continue;
7779 }
7780 stack.push(path);
7781 continue;
7782 }
7783 if lower.ends_with(".pub") || lower.ends_with(".pubkey") {
7785 continue;
7786 }
7787 if lower == "public.key" {
7789 continue;
7790 }
7791 let ext = path
7792 .extension()
7793 .and_then(|e| e.to_str())
7794 .map(str::to_lowercase)
7795 .unwrap_or_default();
7796 let mut hit = false;
7797 if bad_exts.iter().any(|x| ext == *x) {
7798 hit = true;
7799 }
7800 if bad_substrings.iter().any(|s| lower.contains(s)) {
7801 hit = true;
7802 }
7803 if hit {
7804 hits.push(path);
7805 }
7806 }
7807 }
7808 hits.sort();
7809 hits
7810}
7811
7812fn cmd_check(
7813 source: Option<&Path>,
7814 schema: bool,
7815 stats: bool,
7816 conformance_flag: bool,
7817 conformance_dir: &Path,
7818 all: bool,
7819 schema_only: bool,
7820 strict: bool,
7821 fix: bool,
7822 json_output: bool,
7823) {
7824 if json_output {
7825 let Some(src) = source else {
7826 fail("--json requires a frontier source");
7827 };
7828 let payload = check_json_payload(src, schema_only, strict);
7829 println!(
7830 "{}",
7831 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
7832 );
7833 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
7834 std::process::exit(1);
7835 }
7836 return;
7837 }
7838
7839 if strict && let Some(src) = source {
7850 let hits = scan_for_sensitive_paths(src);
7851 if !hits.is_empty() {
7852 eprintln!(
7853 "{} secret-audit: {} sensitive path(s) found under {}",
7854 style::err_prefix(),
7855 hits.len(),
7856 src.display()
7857 );
7858 for hit in &hits {
7859 eprintln!(" - {}", hit.display());
7860 }
7861 eprintln!(
7862 " hint: add `keys/` and `*.key` to .gitignore so these never reach a public repo (see THREAT_MODEL.md A17)"
7863 );
7864 std::process::exit(1);
7865 }
7866 }
7867
7868 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
7869 if run_all || schema || schema_only {
7870 let Some(src) = source else {
7871 fail("check requires a frontier source");
7872 };
7873 validate::run(src);
7874 }
7875 if !schema_only && (run_all || stats) {
7876 let Some(src) = source else {
7877 fail("--stats requires a frontier source");
7878 };
7879 let frontier = load_frontier_or_fail(src);
7880 let report = lint::lint(&frontier, None, None);
7881 lint::print_report(&report);
7882 let replay_report = events::replay_report(&frontier);
7883 println!("event replay: {}", replay_report.status);
7884 if !replay_report.conflicts.is_empty() {
7885 for conflict in &replay_report.conflicts {
7886 println!(" - {conflict}");
7887 }
7888 }
7889 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
7890 && signature_report.signed > 0
7891 {
7892 println!(
7893 "Signatures: {} valid / {} invalid / {} unsigned",
7894 signature_report.valid, signature_report.invalid, signature_report.unsigned
7895 );
7896 }
7897 let signal_report = signals::analyze(&frontier, &[]);
7898 print_signal_summary(&signal_report, strict);
7899 if !replay_report.ok
7900 || (strict
7901 && (!signal_report.review_queue.is_empty()
7902 || signal_report.proof_readiness.status != "ready"))
7903 {
7904 std::process::exit(1);
7905 }
7906 }
7907 if run_all || conformance_flag {
7908 if conformance_flag || conformance_dir.is_dir() {
7918 conformance::run(conformance_dir);
7919 } else {
7920 eprintln!(
7921 " conformance: skipped ({} not present; pass --conformance-dir <path> to point at the source repo's tests/conformance)",
7922 conformance_dir.display()
7923 );
7924 }
7925 }
7926 let _ = fix;
7927}
7928
7929fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
7930 let report = validate::validate(src);
7931 let loaded = repo::load_from_path(src).ok();
7932 let (method_report, graph_report) = if schema_only {
7933 (None, None)
7934 } else if let Some(frontier) = loaded.as_ref() {
7935 (
7936 Some(lint::lint(frontier, None, None)),
7937 Some(lint::lint_frontier(frontier)),
7938 )
7939 } else {
7940 (None, None)
7941 };
7942 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7943 let mut diagnostics = Vec::new();
7944 diagnostics.extend(report.errors.iter().map(|e| {
7945 json!({
7946 "severity": "error",
7947 "rule_id": "schema",
7948 "finding_id": null,
7949 "file": &e.file,
7950 "field_path": null,
7951 "message": &e.error,
7952 "suggestion": schema_error_suggestion(&e.error),
7953 "fixable": schema_error_fix(&e.error),
7954 "normalize_action": schema_error_action(&e.error),
7955 })
7956 }));
7957 for (check_id, lint_report) in [
7958 ("methodology", method_report.as_ref()),
7959 ("frontier_graph", graph_report.as_ref()),
7960 ] {
7961 if let Some(lint_report) = lint_report {
7962 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7963 json!({
7964 "severity": d.severity.to_string(),
7965 "rule_id": &d.rule_id,
7966 "check": check_id,
7967 "finding_id": &d.finding_id,
7968 "field_path": null,
7969 "message": &d.message,
7970 "suggestion": &d.suggestion,
7971 "fixable": false,
7972 "normalize_action": null,
7973 })
7974 }));
7975 }
7976 }
7977 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7978 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7979 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7980 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7981 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7982 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7983 let replay_report = loaded.as_ref().map(events::replay_report);
7984 let state_integrity_report = if schema_only {
7985 loaded.as_ref().map(state_integrity::analyze)
7986 } else {
7987 state_integrity::analyze_path(src).ok()
7988 };
7989 if let Some(replay) = replay_report.as_ref()
7990 && !replay.ok
7991 {
7992 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7993 json!({
7994 "severity": "error",
7995 "rule_id": "event_replay",
7996 "check": "events",
7997 "finding_id": null,
7998 "field_path": null,
7999 "message": conflict,
8000 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
8001 "fixable": false,
8002 "normalize_action": null,
8003 })
8004 }));
8005 }
8006 let event_errors = replay_report
8007 .as_ref()
8008 .map_or(0, |replay| usize::from(!replay.ok));
8009 let state_integrity_errors = state_integrity_report
8010 .as_ref()
8011 .map_or(0, |report| report.structural_errors.len());
8012 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
8013 .as_ref()
8014 .map(|frontier| {
8015 (
8016 sources::source_summary(frontier),
8017 sources::evidence_summary(frontier),
8018 sources::condition_summary(frontier),
8019 proposals::summary(frontier),
8020 proposals::proof_state_json(&frontier.proof_state),
8021 )
8022 })
8023 .unwrap_or_else(|| {
8024 (
8025 sources::SourceRegistrySummary::default(),
8026 sources::EvidenceAtomSummary::default(),
8027 sources::ConditionSummary::default(),
8028 proposals::ProposalSummary::default(),
8029 Value::Null,
8030 )
8031 });
8032 let signature_report = loaded
8033 .as_ref()
8034 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
8035 if let Some(frontier) = loaded.as_ref()
8036 && !schema_only
8037 {
8038 let projection = sources::derive_projection(frontier);
8039 let existing_sources = frontier
8040 .sources
8041 .iter()
8042 .map(|source| source.id.as_str())
8043 .collect::<std::collections::BTreeSet<_>>();
8044 let existing_atoms = frontier
8045 .evidence_atoms
8046 .iter()
8047 .map(|atom| atom.id.as_str())
8048 .collect::<std::collections::BTreeSet<_>>();
8049 let existing_conditions = frontier
8050 .condition_records
8051 .iter()
8052 .map(|record| record.id.as_str())
8053 .collect::<std::collections::BTreeSet<_>>();
8054 for source in projection
8055 .sources
8056 .iter()
8057 .filter(|source| !existing_sources.contains(source.id.as_str()))
8058 {
8059 diagnostics.push(json!({
8060 "severity": "warning",
8061 "rule_id": "missing_source_record",
8062 "check": "source_registry",
8063 "finding_id": source.finding_ids.first(),
8064 "field_path": "sources",
8065 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
8066 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
8067 "fixable": true,
8068 "normalize_action": "materialize_source_record",
8069 }));
8070 }
8071 for atom in projection
8072 .evidence_atoms
8073 .iter()
8074 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
8075 {
8076 diagnostics.push(json!({
8077 "severity": "warning",
8078 "rule_id": "missing_evidence_atom",
8079 "check": "evidence_atoms",
8080 "finding_id": atom.finding_id,
8081 "field_path": "evidence_atoms",
8082 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
8083 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
8084 "fixable": true,
8085 "normalize_action": "materialize_evidence_atom",
8086 }));
8087 }
8088 for atom in projection
8089 .evidence_atoms
8090 .iter()
8091 .filter(|atom| atom.locator.is_none())
8092 {
8093 diagnostics.push(json!({
8094 "severity": "warning",
8095 "rule_id": "missing_evidence_locator",
8096 "check": "evidence_atoms",
8097 "finding_id": atom.finding_id,
8098 "field_path": "evidence_atoms[].locator",
8099 "message": format!("Evidence atom {} has no source locator.", atom.id),
8100 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
8101 "fixable": false,
8102 "normalize_action": null,
8103 }));
8104 }
8105 for condition in projection
8106 .condition_records
8107 .iter()
8108 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
8109 {
8110 diagnostics.push(json!({
8111 "severity": "warning",
8112 "rule_id": "condition_record_missing",
8113 "check": "conditions",
8114 "finding_id": condition.finding_id,
8115 "field_path": "condition_records",
8116 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
8117 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
8118 "fixable": true,
8119 "normalize_action": "materialize_condition_record",
8120 }));
8121 }
8122 for proposal in frontier.proposals.iter().filter(|proposal| {
8123 matches!(proposal.status.as_str(), "accepted" | "applied")
8124 && proposal
8125 .reviewed_by
8126 .as_deref()
8127 .is_none_or(proposals::is_placeholder_reviewer)
8128 }) {
8129 diagnostics.push(json!({
8130 "severity": "error",
8131 "rule_id": "reviewer_identity_missing",
8132 "check": "proposals",
8133 "finding_id": proposal.target.id,
8134 "field_path": "proposals[].reviewed_by",
8135 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
8136 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
8137 "fixable": false,
8138 "normalize_action": null,
8139 }));
8140 }
8141 }
8142 let signal_report = loaded
8143 .as_ref()
8144 .map(|frontier| signals::analyze(frontier, &diagnostics))
8145 .unwrap_or_else(empty_signal_report);
8146 let errors =
8147 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
8148 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
8149 let infos = method_infos + graph_infos;
8150 let strict_blockers = signal_report
8151 .signals
8152 .iter()
8153 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
8154 .count();
8155 let fixable = diagnostics
8156 .iter()
8157 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
8158 .count();
8159 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
8160
8161 json!({
8162 "ok": ok,
8163 "command": "check",
8164 "schema_version": project::VELA_SCHEMA_VERSION,
8165 "source": {
8166 "path": src.display().to_string(),
8167 "hash": format!("sha256:{source_hash}"),
8168 },
8169 "summary": {
8170 "status": if ok { "pass" } else { "fail" },
8171 "checked_findings": report.total_files,
8172 "valid_findings": report.valid,
8173 "invalid_findings": report.invalid,
8174 "errors": errors,
8175 "warnings": warnings,
8176 "info": infos,
8177 "fixable": fixable,
8178 "strict": strict,
8179 "schema_only": schema_only,
8180 },
8181 "checks": [
8182 {
8183 "id": "schema",
8184 "status": if report.invalid == 0 { "pass" } else { "fail" },
8185 "checked": report.total_files,
8186 "failed": report.invalid,
8187 "errors": report.errors.iter().map(|e| json!({
8188 "file": e.file,
8189 "message": e.error,
8190 })).collect::<Vec<_>>(),
8191 },
8192 {
8193 "id": "methodology",
8194 "status": if method_errors == 0 { "pass" } else { "fail" },
8195 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
8196 "failed": method_errors,
8197 "warnings": method_warnings,
8198 "info": method_infos,
8199 "skipped": schema_only,
8200 },
8201 {
8202 "id": "frontier_graph",
8203 "status": if graph_errors == 0 { "pass" } else { "fail" },
8204 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
8205 "failed": graph_errors,
8206 "warnings": graph_warnings,
8207 "info": graph_infos,
8208 "skipped": schema_only,
8209 },
8210 {
8211 "id": "signals",
8212 "status": if strict_blockers == 0 { "pass" } else { "fail" },
8213 "checked": signal_report.signals.len(),
8214 "failed": strict_blockers,
8215 "warnings": signal_report.proof_readiness.warnings,
8216 "skipped": loaded.is_none(),
8217 "blockers": signal_report.signals.iter()
8218 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
8219 .map(|s| json!({
8220 "id": s.id,
8221 "kind": s.kind,
8222 "severity": s.severity,
8223 "reason": s.reason,
8224 }))
8225 .collect::<Vec<_>>(),
8226 },
8227 {
8228 "id": "events",
8229 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
8230 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
8231 "failed": event_errors,
8232 "skipped": schema_only || loaded.is_none(),
8233 },
8234 {
8235 "id": "state_integrity",
8236 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
8237 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
8238 "failed": state_integrity_errors,
8239 "skipped": schema_only || loaded.is_none(),
8240 }
8241 ],
8242 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
8243 "replay": replay_report,
8244 "state_integrity": state_integrity_report,
8245 "source_registry": source_registry,
8246 "evidence_atoms": evidence_atoms,
8247 "conditions": conditions,
8248 "proposals": proposal_summary,
8249 "proof_state": proof_state,
8250 "signatures": signature_report,
8251 "diagnostics": diagnostics,
8252 "signals": signal_report.signals,
8253 "review_queue": signal_report.review_queue,
8254 "proof_readiness": signal_report.proof_readiness,
8255 "repair_plan": build_repair_plan(&diagnostics),
8256 })
8257}
8258
8259#[allow(clippy::too_many_arguments)]
8260fn cmd_normalize(
8261 source: &Path,
8262 out: Option<&Path>,
8263 write: bool,
8264 dry_run: bool,
8265 rewrite_ids: bool,
8266 id_map: Option<&Path>,
8267 resync_provenance: bool,
8268 json_output: bool,
8269) {
8270 if write && out.is_some() {
8271 fail("Use either --write or --out, not both.");
8272 }
8273 if dry_run && (write || out.is_some()) {
8274 fail("--dry-run cannot be combined with --write or --out.");
8275 }
8276 if id_map.is_some() && !rewrite_ids {
8277 fail("--id-map requires --rewrite-ids.");
8278 }
8279
8280 let detected = repo::detect(source).unwrap_or_else(|e| {
8281 eprintln!("{e}");
8282 std::process::exit(1);
8283 });
8284 if matches!(detected, repo::VelaSource::PacketDir(_)) {
8285 fail(
8286 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
8287 );
8288 }
8289 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
8290 let has_substantive_events = frontier
8295 .events
8296 .iter()
8297 .any(|event| event.kind != "frontier.created");
8298 if has_substantive_events && (write || out.is_some()) {
8299 fail(
8300 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
8301 );
8302 }
8303 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
8304 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8305 let (entity_type_fixes, entity_name_fixes) =
8306 normalize::normalize_findings(&mut frontier.findings);
8307 let confidence_updates =
8308 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
8309 let provenance_resync_count = if resync_provenance {
8313 sources::resync_provenance_from_sources(&mut frontier)
8314 } else {
8315 0
8316 };
8317 let before_source_count = frontier.sources.len();
8318 let before_evidence_atom_count = frontier.evidence_atoms.len();
8319 let before_condition_record_count = frontier.condition_records.len();
8320
8321 let mut id_rewrites = Vec::new();
8322 if rewrite_ids {
8323 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
8324 for finding in &frontier.findings {
8325 let expected =
8326 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
8327 if expected != finding.id {
8328 id_map_values.insert(finding.id.clone(), expected);
8329 }
8330 }
8331 let new_ids = id_map_values
8332 .values()
8333 .map(String::as_str)
8334 .collect::<std::collections::HashSet<_>>();
8335 if new_ids.len() != id_map_values.len() {
8336 fail("Refusing to rewrite IDs because two findings map to the same content address.");
8337 }
8338 for finding in &mut frontier.findings {
8339 if let Some(new_id) = id_map_values.get(&finding.id) {
8340 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
8341 finding.previous_version = Some(finding.id.clone());
8342 finding.id = new_id.clone();
8343 }
8344 }
8345 for finding in &mut frontier.findings {
8346 for link in &mut finding.links {
8347 if let Some(new_target) = id_map_values.get(&link.target) {
8348 link.target = new_target.clone();
8349 }
8350 }
8351 }
8352 if let Some(path) = id_map {
8353 std::fs::write(
8354 path,
8355 serde_json::to_string_pretty(&id_map_values)
8356 .expect("failed to serialize normalize id map"),
8357 )
8358 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
8359 }
8360 }
8361
8362 sources::materialize_project(&mut frontier);
8363 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
8364 let evidence_atoms_materialized = frontier
8365 .evidence_atoms
8366 .len()
8367 .saturating_sub(before_evidence_atom_count);
8368 let condition_records_materialized = frontier
8369 .condition_records
8370 .len()
8371 .saturating_sub(before_condition_record_count);
8372 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8373 let id_rewrite_count = id_rewrites.len();
8374 let wrote_to = if write {
8375 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
8376 Some(source.display().to_string())
8377 } else if let Some(out_path) = out {
8378 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
8379 Some(out_path.display().to_string())
8380 } else {
8381 None
8382 };
8383 let wrote = wrote_to.is_some();
8384 let planned_changes = entity_type_fixes
8385 + entity_name_fixes
8386 + confidence_updates
8387 + id_rewrite_count
8388 + source_records_materialized
8389 + evidence_atoms_materialized
8390 + condition_records_materialized
8391 + provenance_resync_count;
8392 let payload = json!({
8393 "ok": true,
8394 "command": "normalize",
8395 "schema_version": project::VELA_SCHEMA_VERSION,
8396 "source": {
8397 "path": source.display().to_string(),
8398 "hash": format!("sha256:{source_hash}"),
8399 },
8400 "dry_run": wrote_to.is_none(),
8401 "wrote_to": wrote_to,
8402 "summary": {
8403 "planned": planned_changes,
8404 "safe": planned_changes,
8405 "unsafe": 0,
8406 "applied": if wrote { planned_changes } else { 0 },
8407 },
8408 "changes": {
8409 "entity_type_fixes": entity_type_fixes,
8410 "entity_name_fixes": entity_name_fixes,
8411 "confidence_updates": confidence_updates,
8412 "id_rewrites": id_rewrite_count,
8413 "source_records_materialized": source_records_materialized,
8414 "evidence_atoms_materialized": evidence_atoms_materialized,
8415 "condition_records_materialized": condition_records_materialized,
8416 "provenance_resyncs": provenance_resync_count,
8417 "stats_changed": before_stats != after_stats,
8418 },
8419 "id_rewrites": id_rewrites,
8420 "repair_plan": if wrote { Vec::<Value>::new() } else {
8421 vec![json!({
8422 "action": "apply_normalization",
8423 "command": "vela normalize <frontier> --out frontier.normalized.json"
8424 })]
8425 },
8426 });
8427 if json_output {
8428 println!(
8429 "{}",
8430 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
8431 );
8432 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
8433 println!("{} normalized frontier written to {path}", style::ok("ok"));
8434 println!(
8435 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8436 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8437 );
8438 } else {
8439 println!("normalize dry run for {}", source.display());
8440 println!(
8441 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8442 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8443 );
8444 }
8445}
8446
8447fn cmd_proof(
8448 frontier: &Path,
8449 out: &Path,
8450 template: &str,
8451 gold: Option<&Path>,
8452 record_proof_state: bool,
8453 json_output: bool,
8454) {
8455 if template != "bbb-alzheimer" {
8456 fail(&format!(
8457 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
8458 ));
8459 }
8460 let mut loaded = load_frontier_or_fail(frontier);
8461 let source_hash = hash_path_or_fail(frontier);
8462 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
8463 .unwrap_or_else(|e| fail(&e));
8464 let benchmark_summary = gold.map(|gold_path| {
8465 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
8466 fail(&format!(
8467 "Failed to run proof benchmark '{}': {e}",
8468 gold_path.display()
8469 ))
8470 });
8471 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
8472 fail(&format!("Failed to write benchmark summary: {e}"));
8473 });
8474 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
8475 fail(&format!(
8476 "Proof benchmark failed for {}",
8477 gold_path.display()
8478 ));
8479 }
8480 summary
8481 });
8482 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
8483 fail(&format!("Proof packet validation failed: {e}"));
8484 });
8485 proposals::record_proof_export(
8486 &mut loaded,
8487 proposals::ProofPacketRecord {
8488 generated_at: export_record.generated_at.clone(),
8489 snapshot_hash: export_record.snapshot_hash.clone(),
8490 event_log_hash: export_record.event_log_hash.clone(),
8491 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
8492 },
8493 );
8494 project::recompute_stats(&mut loaded);
8495 if record_proof_state {
8496 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
8497 }
8498 let signal_report = signals::analyze(&loaded, &[]);
8499 if json_output {
8500 let payload = json!({
8501 "ok": true,
8502 "command": "proof",
8503 "schema_version": project::VELA_SCHEMA_VERSION,
8504 "recorded_proof_state": record_proof_state,
8505 "frontier": {
8506 "name": &loaded.project.name,
8507 "source": frontier.display().to_string(),
8508 "hash": format!("sha256:{source_hash}"),
8509 },
8510 "template": template,
8511 "gold": gold.map(|p| p.display().to_string()),
8512 "benchmark": benchmark_summary,
8513 "output": out.display().to_string(),
8514 "packet": {
8515 "manifest_path": out.join("manifest.json").display().to_string(),
8516 },
8517 "validation": {
8518 "status": "ok",
8519 "summary": validation_summary,
8520 },
8521 "proposals": proposals::summary(&loaded),
8522 "proof_state": loaded.proof_state,
8523 "signals": signal_report.signals,
8524 "review_queue": signal_report.review_queue,
8525 "proof_readiness": signal_report.proof_readiness,
8526 "trace_path": out.join("proof-trace.json").display().to_string(),
8527 });
8528 println!(
8529 "{}",
8530 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
8531 );
8532 } else {
8533 println!("vela proof");
8534 println!(" source: {}", frontier.display());
8535 println!(" template: {template}");
8536 println!(" output: {}", out.display());
8537 println!(" trace: {}", out.join("proof-trace.json").display());
8538 println!(
8539 " proof state: {}",
8540 if record_proof_state {
8541 "recorded"
8542 } else {
8543 "not recorded"
8544 }
8545 );
8546 println!();
8547 println!("{validation_summary}");
8548 }
8549}
8550
8551fn cmd_status(path: &Path, json: bool) {
8555 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8556
8557 let mut pending_total = 0usize;
8559 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
8560 std::collections::BTreeMap::new();
8561 for p in &project.proposals {
8562 if p.status == "pending_review" {
8563 pending_total += 1;
8564 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8565 }
8566 }
8567
8568 let audit = crate::causal_reasoning::audit_frontier(&project);
8570 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
8571
8572 let mut last_sync: Option<&crate::events::StateEvent> = None;
8574 let mut last_conflict: Option<&crate::events::StateEvent> = None;
8575 let mut total_conflicts = 0usize;
8576 for e in &project.events {
8577 match e.kind.as_str() {
8578 "frontier.synced_with_peer" => {
8579 if last_sync
8580 .map(|prev| e.timestamp > prev.timestamp)
8581 .unwrap_or(true)
8582 {
8583 last_sync = Some(e);
8584 }
8585 }
8586 "frontier.conflict_detected" => {
8587 total_conflicts += 1;
8588 if last_conflict
8589 .map(|prev| e.timestamp > prev.timestamp)
8590 .unwrap_or(true)
8591 {
8592 last_conflict = Some(e);
8593 }
8594 }
8595 _ => {}
8596 }
8597 }
8598
8599 let mut targets_with_success = std::collections::HashSet::new();
8601 let mut failed_replications = 0usize;
8602 for r in &project.replications {
8603 if r.outcome == "replicated" {
8604 targets_with_success.insert(r.target_finding.clone());
8605 } else if r.outcome == "failed" {
8606 failed_replications += 1;
8607 }
8608 }
8609
8610 if json {
8611 println!(
8612 "{}",
8613 serde_json::to_string_pretty(&json!({
8614 "ok": true,
8615 "command": "status",
8616 "frontier": frontier_label(&project),
8617 "vfr_id": project.frontier_id(),
8618 "findings": project.findings.len(),
8619 "events": project.events.len(),
8620 "actors": project.actors.len(),
8621 "peers": project.peers.len(),
8622 "inbox": {
8623 "pending_total": pending_total,
8624 "pending_by_kind": pending_by_kind,
8625 },
8626 "causal_audit": {
8627 "identified": audit_summary.identified,
8628 "conditional": audit_summary.conditional,
8629 "underidentified": audit_summary.underidentified,
8630 "underdetermined": audit_summary.underdetermined,
8631 },
8632 "replications": {
8633 "total": project.replications.len(),
8634 "findings_with_success": targets_with_success.len(),
8635 "failed": failed_replications,
8636 },
8637 "federation": {
8638 "peers": project.peers.len(),
8639 "last_sync": last_sync.map(|e| e.timestamp.clone()),
8640 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
8641 "total_conflicts": total_conflicts,
8642 },
8643 }))
8644 .expect("serialize status")
8645 );
8646 return;
8647 }
8648
8649 println!();
8650 println!(
8651 " {}",
8652 format!("VELA · STATUS · {}", path.display())
8653 .to_uppercase()
8654 .dimmed()
8655 );
8656 println!(" {}", style::tick_row(60));
8657 println!();
8658 println!(" frontier: {}", frontier_label(&project));
8659 println!(" vfr_id: {}", project.frontier_id());
8660 println!(
8661 " findings: {} events: {} peers: {} actors: {}",
8662 project.findings.len(),
8663 project.events.len(),
8664 project.peers.len(),
8665 project.actors.len(),
8666 );
8667 println!();
8668 if pending_total > 0 {
8669 println!(
8670 " {} {pending_total} pending proposals",
8671 style::warn("inbox")
8672 );
8673 for (k, n) in &pending_by_kind {
8674 println!(" · {n:>3} {k}");
8675 }
8676 } else {
8677 println!(" {} inbox clean", style::ok("ok"));
8678 }
8679 println!();
8680 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
8681 let chip = if audit_summary.underidentified > 0 {
8682 style::lost("audit")
8683 } else {
8684 style::warn("audit")
8685 };
8686 println!(
8687 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
8688 chip,
8689 audit_summary.identified,
8690 audit_summary.conditional,
8691 audit_summary.underidentified,
8692 audit_summary.underdetermined,
8693 );
8694 if audit_summary.underidentified > 0 {
8695 println!(
8696 " next: vela causal audit {} --problems-only",
8697 path.display()
8698 );
8699 }
8700 } else if audit_summary.underdetermined == 0 {
8701 println!(
8702 " {} causal audit: all {} identified",
8703 style::ok("ok"),
8704 audit_summary.identified
8705 );
8706 } else {
8707 println!(
8708 " {} causal audit: {} identified, {} ungraded",
8709 style::warn("audit"),
8710 audit_summary.identified,
8711 audit_summary.underdetermined,
8712 );
8713 }
8714 println!();
8715 if !project.replications.is_empty() {
8716 println!(
8717 " {} {} records · {} findings replicated · {} failed",
8718 style::ok("replications"),
8719 project.replications.len(),
8720 targets_with_success.len(),
8721 failed_replications,
8722 );
8723 }
8724 if project.peers.is_empty() {
8725 println!(
8726 " {} no federation peers registered",
8727 style::warn("federation")
8728 );
8729 } else {
8730 let last = last_sync
8731 .map(|e| fmt_timestamp(&e.timestamp))
8732 .unwrap_or_else(|| "never".to_string());
8733 let chip = if total_conflicts > 0 {
8734 style::warn("federation")
8735 } else {
8736 style::ok("federation")
8737 };
8738 println!(
8739 " {} {} peer(s) · last sync {} · {} conflict events",
8740 chip,
8741 project.peers.len(),
8742 last,
8743 total_conflicts,
8744 );
8745 }
8746 println!();
8747}
8748
8749fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
8751 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8752 let mut events: Vec<&crate::events::StateEvent> = project
8753 .events
8754 .iter()
8755 .filter(|e| match kind_filter {
8756 Some(k) => e.kind.contains(k),
8757 None => true,
8758 })
8759 .collect();
8760 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8761 events.truncate(limit);
8762
8763 if json {
8764 let payload: Vec<_> = events
8765 .iter()
8766 .map(|e| {
8767 json!({
8768 "id": e.id,
8769 "kind": e.kind,
8770 "actor": e.actor.id,
8771 "target": &e.target.id,
8772 "target_type": &e.target.r#type,
8773 "timestamp": e.timestamp,
8774 "reason": e.reason,
8775 })
8776 })
8777 .collect();
8778 println!(
8779 "{}",
8780 serde_json::to_string_pretty(&json!({
8781 "ok": true,
8782 "command": "log",
8783 "events": payload,
8784 }))
8785 .expect("serialize log")
8786 );
8787 return;
8788 }
8789
8790 println!();
8791 println!(
8792 " {}",
8793 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
8794 .to_uppercase()
8795 .dimmed()
8796 );
8797 println!(" {}", style::tick_row(60));
8798 if events.is_empty() {
8799 println!(" (no events)");
8800 return;
8801 }
8802 for e in &events {
8803 let when = fmt_timestamp(&e.timestamp);
8804 let target_short = if e.target.id.len() > 22 {
8805 format!("{}…", &e.target.id[..21])
8806 } else {
8807 e.target.id.clone()
8808 };
8809 let reason: String = e.reason.chars().take(70).collect();
8810 println!(
8811 " {:<19} {:<32} {:<24} {}",
8812 when, e.kind, target_short, reason
8813 );
8814 }
8815 println!();
8816}
8817
8818fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
8820 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8821
8822 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
8825 std::collections::HashMap::new();
8826 for p in &project.proposals {
8827 if p.kind != "finding.note" {
8828 continue;
8829 }
8830 if p.actor.id != "agent:reviewer-agent" {
8831 continue;
8832 }
8833 let reason = &p.reason;
8834 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
8835 continue;
8836 };
8837 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
8838 let extract = |k: &str| -> f64 {
8839 let pat = format!("{k} ");
8840 text.find(&pat)
8841 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
8842 .and_then(|t| t.parse::<f64>().ok())
8843 .unwrap_or(0.0)
8844 };
8845 score_map.insert(
8846 target.to_string(),
8847 (
8848 extract("plausibility"),
8849 extract("evidence"),
8850 extract("scope"),
8851 extract("duplicate-risk"),
8852 ),
8853 );
8854 }
8855
8856 let mut pending: Vec<&crate::proposals::StateProposal> = project
8857 .proposals
8858 .iter()
8859 .filter(|p| {
8860 p.status == "pending_review"
8861 && match kind_filter {
8862 Some(k) => p.kind.contains(k),
8863 None => true,
8864 }
8865 })
8866 .collect();
8867 pending.sort_by(|a, b| {
8869 let sa = score_map
8870 .get(&a.id)
8871 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8872 let sb = score_map
8873 .get(&b.id)
8874 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8875 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
8876 });
8877 pending.truncate(limit);
8878
8879 if json {
8880 let payload: Vec<_> = pending
8881 .iter()
8882 .map(|p| {
8883 let assertion_text = p
8884 .payload
8885 .get("finding")
8886 .and_then(|f| f.get("assertion"))
8887 .and_then(|a| a.get("text"))
8888 .and_then(|t| t.as_str());
8889 let assertion_type = p
8890 .payload
8891 .get("finding")
8892 .and_then(|f| f.get("assertion"))
8893 .and_then(|a| a.get("type"))
8894 .and_then(|t| t.as_str());
8895 let composite = score_map
8896 .get(&p.id)
8897 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8898 json!({
8899 "proposal_id": p.id,
8900 "kind": p.kind,
8901 "actor": p.actor,
8902 "reason": p.reason,
8903 "assertion_text": assertion_text,
8904 "assertion_type": assertion_type,
8905 "reviewer_composite": composite,
8906 })
8907 })
8908 .collect();
8909 println!(
8910 "{}",
8911 serde_json::to_string_pretty(&json!({
8912 "ok": true,
8913 "command": "inbox",
8914 "shown": pending.len(),
8915 "proposals": payload,
8916 }))
8917 .expect("serialize inbox")
8918 );
8919 return;
8920 }
8921
8922 println!();
8923 println!(
8924 " {}",
8925 format!(
8926 "VELA · INBOX · {} ({} pending shown)",
8927 path.display(),
8928 pending.len()
8929 )
8930 .to_uppercase()
8931 .dimmed()
8932 );
8933 println!(" {}", style::tick_row(60));
8934 if pending.is_empty() {
8935 println!(" (inbox clean)");
8936 return;
8937 }
8938 for p in &pending {
8939 let assertion_text = p
8940 .payload
8941 .get("finding")
8942 .and_then(|f| f.get("assertion"))
8943 .and_then(|a| a.get("text"))
8944 .and_then(|t| t.as_str())
8945 .unwrap_or("");
8946 let assertion_type = p
8947 .payload
8948 .get("finding")
8949 .and_then(|f| f.get("assertion"))
8950 .and_then(|a| a.get("type"))
8951 .and_then(|t| t.as_str())
8952 .unwrap_or("");
8953 let composite = score_map
8954 .get(&p.id)
8955 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8956 let score_str = composite
8957 .map(|c| format!("[{:.2}]", c))
8958 .unwrap_or_else(|| "[—] ".to_string());
8959 let kind_short = if p.kind.len() > 12 {
8960 format!("{}…", &p.kind[..11])
8961 } else {
8962 p.kind.clone()
8963 };
8964 let summary: String = if !assertion_text.is_empty() {
8965 assertion_text.chars().take(80).collect()
8966 } else {
8967 p.reason.chars().take(80).collect()
8968 };
8969 println!(
8970 " {} {} {:<13} {:<18} {}",
8971 score_str, p.id, kind_short, assertion_type, summary
8972 );
8973 }
8974 println!();
8975}
8976
8977fn cmd_ask(path: &Path, question: &str, json: bool) {
8982 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8983
8984 if question.trim().is_empty() {
8985 use std::io::{BufRead, Write};
8987 println!();
8988 println!(
8989 " {}",
8990 format!("VELA · ASK · {}", path.display())
8991 .to_uppercase()
8992 .dimmed()
8993 );
8994 println!(" {}", style::tick_row(60));
8995 println!(" Ask a question. Type `exit` to quit.");
8996 println!(" Examples:");
8997 println!(" · what's pending?");
8998 println!(" · what's underidentified?");
8999 println!(" · how many findings?");
9000 println!(" · what changed recently?");
9001 println!(" · who has what calibration?");
9002 println!();
9003 let stdin = std::io::stdin();
9004 let mut stdout = std::io::stdout();
9005 loop {
9006 print!(" ask> ");
9007 stdout.flush().ok();
9008 let mut line = String::new();
9009 if stdin.lock().read_line(&mut line).is_err() {
9010 break;
9011 }
9012 let q = line.trim();
9013 if q.is_empty() {
9014 continue;
9015 }
9016 if matches!(q, "exit" | "quit" | "q") {
9017 break;
9018 }
9019 answer(&project, q, false);
9020 }
9021 return;
9022 }
9023
9024 answer(&project, question, json);
9025}
9026
9027fn answer(project: &crate::project::Project, q: &str, json: bool) {
9028 let lower = q.to_lowercase();
9029
9030 if lower.contains("pending")
9032 || lower.contains("inbox")
9033 || lower.contains("queue")
9034 || lower.contains("to review")
9035 {
9036 let pending: Vec<&crate::proposals::StateProposal> = project
9037 .proposals
9038 .iter()
9039 .filter(|p| p.status == "pending_review")
9040 .collect();
9041 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
9042 for p in &pending {
9043 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
9044 }
9045 if json {
9046 println!(
9047 "{}",
9048 serde_json::to_string_pretty(&json!({
9049 "answer": "pending",
9050 "total": pending.len(),
9051 "by_kind": by_kind,
9052 }))
9053 .unwrap()
9054 );
9055 } else {
9056 println!(" {} pending proposals.", pending.len());
9057 for (k, n) in &by_kind {
9058 println!(" · {n:>3} {k}");
9059 }
9060 if pending.is_empty() {
9061 println!(" Inbox is clean.");
9062 } else {
9063 println!(" Run `vela inbox <frontier>` to triage.");
9064 }
9065 }
9066 return;
9067 }
9068
9069 if lower.contains("underident")
9071 || lower.contains("audit")
9072 || lower.contains("identif")
9073 || lower.contains("causal")
9074 {
9075 let entries = crate::causal_reasoning::audit_frontier(project);
9076 let summary = crate::causal_reasoning::summarize_audit(&entries);
9077 if json {
9078 println!(
9079 "{}",
9080 serde_json::to_string_pretty(&json!({
9081 "answer": "audit",
9082 "summary": {
9083 "identified": summary.identified,
9084 "conditional": summary.conditional,
9085 "underidentified": summary.underidentified,
9086 "underdetermined": summary.underdetermined,
9087 },
9088 }))
9089 .unwrap()
9090 );
9091 } else {
9092 println!(
9093 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
9094 summary.identified,
9095 summary.conditional,
9096 summary.underidentified,
9097 summary.underdetermined,
9098 );
9099 if summary.underidentified > 0 {
9100 println!(
9101 " The {} underidentified findings are concrete review items:",
9102 summary.underidentified
9103 );
9104 for e in entries
9105 .iter()
9106 .filter(|e| {
9107 matches!(
9108 e.verdict,
9109 crate::causal_reasoning::Identifiability::Underidentified
9110 )
9111 })
9112 .take(8)
9113 {
9114 let txt: String = e.assertion_text.chars().take(70).collect();
9115 println!(" · {} {}", e.finding_id, txt);
9116 }
9117 }
9118 }
9119 return;
9120 }
9121
9122 if lower.contains("recent")
9124 || lower.contains("changed")
9125 || lower.contains("latest")
9126 || lower.contains("happen")
9127 {
9128 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
9129 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
9130 events.truncate(8);
9131 if json {
9132 println!(
9133 "{}",
9134 serde_json::to_string_pretty(&json!({
9135 "answer": "recent_events",
9136 "events": events.iter().map(|e| json!({
9137 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
9138 "actor": e.actor.id, "target": e.target.id,
9139 })).collect::<Vec<_>>(),
9140 }))
9141 .unwrap()
9142 );
9143 } else {
9144 println!(" Most recent {} events:", events.len());
9145 for e in &events {
9146 let when = fmt_timestamp(&e.timestamp);
9147 println!(" · {when} {:<28} {}", e.kind, e.target.id);
9148 }
9149 }
9150 return;
9151 }
9152
9153 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
9155 let n = project.findings.len();
9156 let evs = project.events.len();
9157 let peers = project.peers.len();
9158 let actors = project.actors.len();
9159 if json {
9160 println!(
9161 "{}",
9162 serde_json::to_string_pretty(&json!({
9163 "answer": "counts",
9164 "findings": n,
9165 "events": evs,
9166 "peers": peers,
9167 "actors": actors,
9168 "replications": project.replications.len(),
9169 "predictions": project.predictions.len(),
9170 }))
9171 .unwrap()
9172 );
9173 } else {
9174 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
9175 println!(
9176 " {} replications · {} predictions · {} datasets · {} code artifacts.",
9177 project.replications.len(),
9178 project.predictions.len(),
9179 project.datasets.len(),
9180 project.code_artifacts.len(),
9181 );
9182 }
9183 return;
9184 }
9185
9186 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
9188 let records =
9189 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
9190 if json {
9191 println!("{}", serde_json::to_string_pretty(&records).unwrap());
9192 } else if records.is_empty() {
9193 println!(" No predictions yet. The calibration ledger is empty.");
9194 } else {
9195 println!(" Calibration over {} actor(s):", records.len());
9196 for r in &records {
9197 let brier = r
9198 .brier_score
9199 .map(|b| format!("{:.3}", b))
9200 .unwrap_or_else(|| "—".into());
9201 println!(
9202 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
9203 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
9204 );
9205 }
9206 }
9207 return;
9208 }
9209
9210 if lower.contains("peer")
9212 || lower.contains("federat")
9213 || lower.contains("sync")
9214 || lower.contains("conflict")
9215 {
9216 let mut total_conflicts = 0usize;
9217 for e in &project.events {
9218 if e.kind == "frontier.conflict_detected" {
9219 total_conflicts += 1;
9220 }
9221 }
9222 if json {
9223 println!(
9224 "{}",
9225 serde_json::to_string_pretty(&json!({
9226 "answer": "federation",
9227 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
9228 "total_conflicts": total_conflicts,
9229 }))
9230 .unwrap()
9231 );
9232 } else {
9233 println!(" {} peer(s) registered:", project.peers.len());
9234 for p in &project.peers {
9235 println!(" · {:<24} {}", p.id, p.url);
9236 }
9237 println!(" {total_conflicts} conflict events on the canonical log.");
9238 }
9239 return;
9240 }
9241
9242 if json {
9244 println!(
9245 "{}",
9246 serde_json::to_string_pretty(&json!({
9247 "answer": "unknown_question",
9248 "question": q,
9249 "hint": "Try: pending, audit, recent, how many, calibration, peers."
9250 }))
9251 .unwrap()
9252 );
9253 } else {
9254 println!(" Don't know how to route that question yet.");
9255 println!(" Try: pending · audit · recent · how many · calibration · peers");
9256 }
9257}
9258
9259fn frontier_label(p: &crate::project::Project) -> String {
9260 if p.project.name.trim().is_empty() {
9261 "(unnamed)".to_string()
9262 } else {
9263 p.project.name.clone()
9264 }
9265}
9266
9267fn fmt_timestamp(ts: &str) -> String {
9268 chrono::DateTime::parse_from_rfc3339(ts)
9271 .map(|dt| dt.format("%m-%d %H:%M").to_string())
9272 .unwrap_or_else(|_| ts.chars().take(16).collect())
9273}
9274
9275fn cmd_stats(path: &Path) {
9276 let frontier = load_frontier_or_fail(path);
9277 let s = &frontier.stats;
9278 println!();
9279 println!(" {}", "FRONTIER · V0.36.0".dimmed());
9280 println!(" {}", frontier.project.name.bold());
9281 println!(" {}", style::tick_row(60));
9282 println!(" id: {}", frontier.frontier_id());
9283 println!(" compiled: {}", frontier.project.compiled_at);
9284 println!(" papers: {}", frontier.project.papers_processed);
9285 println!(" findings: {}", s.findings);
9286 println!(" links: {}", s.links);
9287 println!(" replicated: {}", s.replicated);
9288 println!(" avg confidence: {}", s.avg_confidence);
9289 println!(" gaps: {}", s.gaps);
9290 println!(" contested: {}", s.contested);
9291 println!(" reviewed: {}", s.human_reviewed);
9292 println!(" proposals: {}", s.proposal_count);
9293 println!(
9294 " recorded proof: {}",
9295 frontier.proof_state.latest_packet.status
9296 );
9297 if frontier.proof_state.latest_packet.status != "never_exported" {
9298 println!(
9299 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
9300 );
9301 }
9302 if !s.categories.is_empty() {
9303 println!();
9304 println!(" {}", "categories".dimmed());
9305 let mut categories = s.categories.iter().collect::<Vec<_>>();
9306 categories.sort_by(|a, b| b.1.cmp(a.1));
9307 for (category, count) in categories {
9308 println!(" {category}: {}", count);
9309 }
9310 }
9311 println!();
9312 println!(" {}", style::tick_row(60));
9313 println!();
9314}
9315
9316fn cmd_proposals(action: ProposalAction) {
9317 match action {
9318 ProposalAction::List {
9319 frontier,
9320 status,
9321 json,
9322 } => {
9323 let frontier_state =
9324 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9325 let proposals_list = proposals::list(&frontier_state, status.as_deref());
9326 let payload = json!({
9327 "ok": true,
9328 "command": "proposals.list",
9329 "frontier": frontier_state.project.name,
9330 "status_filter": status,
9331 "summary": proposals::summary(&frontier_state),
9332 "proposals": proposals_list,
9333 });
9334 if json {
9335 println!(
9336 "{}",
9337 serde_json::to_string_pretty(&payload)
9338 .expect("failed to serialize proposals list")
9339 );
9340 } else {
9341 println!("vela proposals list");
9342 println!(" frontier: {}", frontier_state.project.name);
9343 println!(
9344 " proposals: {}",
9345 payload["proposals"].as_array().map_or(0, Vec::len)
9346 );
9347 }
9348 }
9349 ProposalAction::Show {
9350 frontier,
9351 proposal_id,
9352 json,
9353 } => {
9354 let frontier_state =
9355 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9356 let proposal =
9357 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
9358 let payload = json!({
9359 "ok": true,
9360 "command": "proposals.show",
9361 "frontier": frontier_state.project.name,
9362 "proposal": proposal,
9363 });
9364 if json {
9365 println!(
9366 "{}",
9367 serde_json::to_string_pretty(&payload)
9368 .expect("failed to serialize proposal show")
9369 );
9370 } else {
9371 println!("vela proposals show");
9372 println!(" frontier: {}", frontier_state.project.name);
9373 println!(" proposal: {}", proposal_id);
9374 println!(" kind: {}", proposal.kind);
9375 println!(" status: {}", proposal.status);
9376 }
9377 }
9378 ProposalAction::Preview {
9379 frontier,
9380 proposal_id,
9381 reviewer,
9382 json,
9383 } => {
9384 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
9385 .unwrap_or_else(|e| fail_return(&e));
9386 let payload = json!({
9387 "ok": true,
9388 "command": "proposals.preview",
9389 "frontier": frontier.display().to_string(),
9390 "preview": preview,
9391 });
9392 if json {
9393 println!(
9394 "{}",
9395 serde_json::to_string_pretty(&payload)
9396 .expect("failed to serialize proposal preview")
9397 );
9398 } else {
9399 println!("vela proposals preview");
9400 println!(" proposal: {}", proposal_id);
9401 println!(" kind: {}", preview.kind);
9402 println!(
9403 " findings: {} -> {}",
9404 preview.findings_before, preview.findings_after
9405 );
9406 println!(
9407 " artifacts: {} -> {}",
9408 preview.artifacts_before, preview.artifacts_after
9409 );
9410 println!(
9411 " events: {} -> {}",
9412 preview.events_before, preview.events_after
9413 );
9414 if !preview.changed_findings.is_empty() {
9415 println!(
9416 " findings changed: {}",
9417 preview.changed_findings.join(", ")
9418 );
9419 }
9420 if !preview.changed_artifacts.is_empty() {
9421 println!(
9422 " artifacts changed: {}",
9423 preview.changed_artifacts.join(", ")
9424 );
9425 }
9426 if !preview.event_kinds.is_empty() {
9427 println!(" event kinds: {}", preview.event_kinds.join(", "));
9428 }
9429 println!(" event: {}", preview.applied_event_id);
9430 }
9431 }
9432 ProposalAction::Import {
9433 frontier,
9434 source,
9435 json,
9436 } => {
9437 let report =
9438 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
9439 let payload = json!({
9440 "ok": true,
9441 "command": "proposals.import",
9442 "frontier": frontier.display().to_string(),
9443 "source": source.display().to_string(),
9444 "summary": {
9445 "imported": report.imported,
9446 "applied": report.applied,
9447 "rejected": report.rejected,
9448 "duplicates": report.duplicates,
9449 },
9450 });
9451 if json {
9452 println!(
9453 "{}",
9454 serde_json::to_string_pretty(&payload)
9455 .expect("failed to serialize proposal import")
9456 );
9457 } else {
9458 println!(
9459 "Imported {} proposals into {}",
9460 report.imported, report.wrote_to
9461 );
9462 }
9463 }
9464 ProposalAction::Validate { source, json } => {
9465 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
9466 let payload = json!({
9467 "ok": report.ok,
9468 "command": "proposals.validate",
9469 "source": source.display().to_string(),
9470 "summary": {
9471 "checked": report.checked,
9472 "valid": report.valid,
9473 "invalid": report.invalid,
9474 },
9475 "proposal_ids": report.proposal_ids,
9476 "errors": report.errors,
9477 });
9478 if json {
9479 println!(
9480 "{}",
9481 serde_json::to_string_pretty(&payload)
9482 .expect("failed to serialize proposal validation")
9483 );
9484 } else if report.ok {
9485 println!("{} validated {} proposals", style::ok("ok"), report.valid);
9486 } else {
9487 println!(
9488 "{} validated {} proposals, {} invalid",
9489 style::lost("lost"),
9490 report.valid,
9491 report.invalid
9492 );
9493 for error in &report.errors {
9494 println!(" · {error}");
9495 }
9496 std::process::exit(1);
9497 }
9498 }
9499 ProposalAction::Export {
9500 frontier,
9501 output,
9502 status,
9503 json,
9504 } => {
9505 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
9506 .unwrap_or_else(|e| fail_return(&e));
9507 let payload = json!({
9508 "ok": true,
9509 "command": "proposals.export",
9510 "frontier": frontier.display().to_string(),
9511 "output": output.display().to_string(),
9512 "status": status,
9513 "exported": count,
9514 });
9515 if json {
9516 println!(
9517 "{}",
9518 serde_json::to_string_pretty(&payload)
9519 .expect("failed to serialize proposal export")
9520 );
9521 } else {
9522 println!("sealed · {count} proposals · {}", output.display());
9523 }
9524 }
9525 ProposalAction::Accept {
9526 frontier,
9527 proposal_id,
9528 reviewer,
9529 reason,
9530 json,
9531 } => {
9532 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
9533 .unwrap_or_else(|e| fail_return(&e));
9534 let payload = json!({
9535 "ok": true,
9536 "command": "proposals.accept",
9537 "frontier": frontier.display().to_string(),
9538 "proposal_id": proposal_id,
9539 "reviewer": reviewer,
9540 "applied_event_id": event_id,
9541 });
9542 if json {
9543 println!(
9544 "{}",
9545 serde_json::to_string_pretty(&payload)
9546 .expect("failed to serialize proposal accept")
9547 );
9548 } else {
9549 println!(
9550 "{} accepted and applied proposal {}",
9551 style::ok("ok"),
9552 proposal_id
9553 );
9554 println!(" event: {}", event_id);
9555 }
9556 }
9557 ProposalAction::Reject {
9558 frontier,
9559 proposal_id,
9560 reviewer,
9561 reason,
9562 json,
9563 } => {
9564 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
9565 .unwrap_or_else(|e| fail_return(&e));
9566 let payload = json!({
9567 "ok": true,
9568 "command": "proposals.reject",
9569 "frontier": frontier.display().to_string(),
9570 "proposal_id": proposal_id,
9571 "reviewer": reviewer,
9572 "status": "rejected",
9573 });
9574 if json {
9575 println!(
9576 "{}",
9577 serde_json::to_string_pretty(&payload)
9578 .expect("failed to serialize proposal reject")
9579 );
9580 } else {
9581 println!(
9582 "{} rejected proposal {}",
9583 style::warn("rejected"),
9584 proposal_id
9585 );
9586 }
9587 }
9588 }
9589}
9590
9591fn cmd_artifact_to_state(
9592 frontier: &Path,
9593 packet: &Path,
9594 actor: &str,
9595 apply_artifacts: bool,
9596 json: bool,
9597) {
9598 let report =
9599 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
9600 .unwrap_or_else(|e| fail_return(&e));
9601 if json {
9602 println!(
9603 "{}",
9604 serde_json::to_string_pretty(&report)
9605 .expect("failed to serialize artifact-to-state report")
9606 );
9607 } else {
9608 println!("vela artifact-to-state");
9609 println!(" packet: {}", report.packet_id);
9610 println!(" frontier: {}", report.frontier);
9611 println!(" artifact proposals: {}", report.artifact_proposals);
9612 println!(" finding proposals: {}", report.finding_proposals);
9613 println!(" gap proposals: {}", report.gap_proposals);
9614 println!(
9615 " applied artifact events: {}",
9616 report.applied_artifact_events
9617 );
9618 println!(
9619 " pending truth proposals: {}",
9620 report.pending_truth_proposals
9621 );
9622 }
9623}
9624
9625async fn cmd_bridge_kit(action: BridgeKitAction) {
9626 match action {
9627 BridgeKitAction::Validate { source, json } => {
9628 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
9629 if json {
9630 println!(
9631 "{}",
9632 serde_json::to_string_pretty(&report)
9633 .expect("failed to serialize bridge-kit validation report")
9634 );
9635 } else {
9636 println!("vela bridge-kit validate");
9637 println!(" source: {}", report.source);
9638 println!(" packets: {}", report.packet_count);
9639 println!(" valid: {}", report.valid_packet_count);
9640 println!(" invalid: {}", report.invalid_packet_count);
9641 for packet in &report.packets {
9642 if packet.ok {
9643 println!(
9644 " ok: {} · {} artifacts · {} claims · {} needs",
9645 packet
9646 .packet_id
9647 .as_deref()
9648 .unwrap_or("packet id unavailable"),
9649 packet.artifact_count,
9650 packet.candidate_claim_count,
9651 packet.open_need_count
9652 );
9653 } else {
9654 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
9655 }
9656 }
9657 for error in &report.errors {
9658 println!(" error: {error}");
9659 }
9660 }
9661 if !report.ok {
9662 std::process::exit(1);
9663 }
9664 }
9665 BridgeKitAction::VerifyProvenance { packet, json } => {
9666 let report = verify_packet_provenance(&packet).await;
9667 if json {
9668 println!(
9669 "{}",
9670 serde_json::to_string_pretty(&report)
9671 .expect("failed to serialize provenance verification report")
9672 );
9673 } else {
9674 println!("vela bridge-kit verify-provenance");
9675 println!(" packet: {}", report.packet);
9676 println!(" identifiers: {}", report.identifiers.len());
9677 println!(" resolved: {}", report.resolved_count);
9678 println!(" unresolved: {}", report.unresolved_count);
9679 println!(" skipped: {}", report.skipped_count);
9680 for entry in &report.identifiers {
9681 let status = match entry.status.as_str() {
9682 "resolved" => "ok ",
9683 "unresolved" => "FAIL",
9684 "skipped" => "skip",
9685 _ => "? ",
9686 };
9687 println!(
9688 " {} {} ({})",
9689 status,
9690 entry.identifier,
9691 entry.note.as_deref().unwrap_or(entry.kind.as_str())
9692 );
9693 }
9694 }
9695 if report.unresolved_count > 0 {
9696 std::process::exit(1);
9697 }
9698 }
9699 }
9700}
9701
9702#[derive(Debug, Clone, Serialize)]
9703struct ProvenanceVerificationReport {
9704 command: String,
9705 packet: String,
9706 identifiers: Vec<ProvenanceVerificationEntry>,
9707 resolved_count: usize,
9708 unresolved_count: usize,
9709 skipped_count: usize,
9710}
9711
9712#[derive(Debug, Clone, Serialize)]
9713struct ProvenanceVerificationEntry {
9714 identifier: String,
9715 kind: String,
9716 status: String,
9717 #[serde(skip_serializing_if = "Option::is_none")]
9718 note: Option<String>,
9719}
9720
9721async fn verify_packet_provenance(packet_path: &Path) -> ProvenanceVerificationReport {
9726 use crate::artifact_to_state::ArtifactPacket;
9727 let raw = std::fs::read_to_string(packet_path)
9728 .unwrap_or_else(|e| fail_return(&format!("read packet: {e}")));
9729 let parsed: ArtifactPacket =
9730 serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse packet: {e}")));
9731 let packet = parsed
9732 .validate()
9733 .unwrap_or_else(|e| fail_return(&format!("validate packet: {e}")));
9734
9735 let mut candidates: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
9737 for artifact in &packet.artifacts {
9738 if let Some(ident) = extract_identifier(&artifact.locator) {
9739 candidates.insert(ident);
9740 }
9741 }
9742 for claim in &packet.candidate_claims {
9743 for source_ref in &claim.source_refs {
9744 if let Some(ident) = extract_identifier(source_ref) {
9745 candidates.insert(ident);
9746 }
9747 }
9748 }
9749
9750 let client = reqwest::Client::builder()
9751 .user_agent("vela/0.108 (+https://github.com/vela-science/vela)")
9752 .timeout(std::time::Duration::from_secs(15))
9753 .build()
9754 .unwrap_or_else(|e| fail_return(&format!("build http client: {e}")));
9755
9756 let mut entries: Vec<ProvenanceVerificationEntry> = Vec::new();
9757 let mut resolved = 0usize;
9758 let mut unresolved = 0usize;
9759 let mut skipped = 0usize;
9760 for candidate in &candidates {
9761 let entry = if let Some(doi) = candidate.strip_prefix("doi:") {
9762 verify_doi(&client, doi).await
9763 } else if let Some(pmid) = candidate.strip_prefix("pmid:") {
9764 verify_pmid(&client, pmid).await
9765 } else if let Some(s2_id) = candidate.strip_prefix("s2:") {
9766 verify_s2(&client, s2_id).await
9767 } else {
9768 ProvenanceVerificationEntry {
9769 identifier: candidate.clone(),
9770 kind: "unknown".to_string(),
9771 status: "skipped".to_string(),
9772 note: Some("no recognized identifier prefix".to_string()),
9773 }
9774 };
9775 match entry.status.as_str() {
9776 "resolved" => resolved += 1,
9777 "unresolved" => unresolved += 1,
9778 _ => skipped += 1,
9779 }
9780 entries.push(entry);
9781 }
9782
9783 ProvenanceVerificationReport {
9784 command: "bridge-kit.verify-provenance".to_string(),
9785 packet: packet_path.display().to_string(),
9786 identifiers: entries,
9787 resolved_count: resolved,
9788 unresolved_count: unresolved,
9789 skipped_count: skipped,
9790 }
9791}
9792
9793fn extract_identifier(s: &str) -> Option<String> {
9798 let trimmed = s.trim();
9799 if trimmed.is_empty() {
9800 return None;
9801 }
9802 if trimmed.starts_with("doi:") || trimmed.starts_with("pmid:") || trimmed.starts_with("s2:") {
9804 return Some(trimmed.to_string());
9805 }
9806 for prefix in ["https://doi.org/", "http://doi.org/", "https://dx.doi.org/"] {
9808 if let Some(rest) = trimmed.strip_prefix(prefix) {
9809 return Some(format!("doi:{rest}"));
9810 }
9811 }
9812 for prefix in [
9814 "https://pubmed.ncbi.nlm.nih.gov/",
9815 "http://pubmed.ncbi.nlm.nih.gov/",
9816 ] {
9817 if let Some(rest) = trimmed.strip_prefix(prefix) {
9818 let pmid = rest.trim_end_matches('/');
9819 return Some(format!("pmid:{pmid}"));
9820 }
9821 }
9822 for prefix in [
9827 "https://www.semanticscholar.org/paper/",
9828 "http://www.semanticscholar.org/paper/",
9829 "https://api.semanticscholar.org/graph/v1/paper/",
9830 "https://api.semanticscholar.org/v1/paper/",
9831 ] {
9832 if let Some(rest) = trimmed.strip_prefix(prefix) {
9833 let s2_id = rest
9834 .split('/')
9835 .next_back()
9836 .unwrap_or(rest)
9837 .split('?')
9838 .next()
9839 .unwrap_or(rest);
9840 if !s2_id.is_empty() {
9841 return Some(format!("s2:{s2_id}"));
9842 }
9843 }
9844 }
9845 if trimmed.starts_with("10.") && trimmed.contains('/') && !trimmed.contains(' ') {
9847 return Some(format!("doi:{trimmed}"));
9848 }
9849 None
9850}
9851
9852async fn verify_doi(client: &reqwest::Client, doi: &str) -> ProvenanceVerificationEntry {
9853 let url = format!("https://api.crossref.org/works/{doi}");
9854 match client.get(&url).send().await {
9855 Ok(resp) if resp.status().is_success() => ProvenanceVerificationEntry {
9856 identifier: format!("doi:{doi}"),
9857 kind: "doi".to_string(),
9858 status: "resolved".to_string(),
9859 note: None,
9860 },
9861 Ok(resp) => ProvenanceVerificationEntry {
9862 identifier: format!("doi:{doi}"),
9863 kind: "doi".to_string(),
9864 status: "unresolved".to_string(),
9865 note: Some(format!("crossref returned {}", resp.status())),
9866 },
9867 Err(e) => ProvenanceVerificationEntry {
9868 identifier: format!("doi:{doi}"),
9869 kind: "doi".to_string(),
9870 status: "skipped".to_string(),
9871 note: Some(format!("crossref unreachable: {e}")),
9872 },
9873 }
9874}
9875
9876async fn verify_pmid(client: &reqwest::Client, pmid: &str) -> ProvenanceVerificationEntry {
9877 let url = format!(
9878 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id={pmid}&retmode=json"
9879 );
9880 match client.get(&url).send().await {
9881 Ok(resp) if resp.status().is_success() => {
9882 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
9885 let result = body.get("result");
9886 let uids = result
9887 .and_then(|r| r.get("uids"))
9888 .and_then(|u| u.as_array());
9889 let resolved = uids.is_some_and(|a| !a.is_empty());
9890 if resolved {
9891 ProvenanceVerificationEntry {
9892 identifier: format!("pmid:{pmid}"),
9893 kind: "pmid".to_string(),
9894 status: "resolved".to_string(),
9895 note: None,
9896 }
9897 } else {
9898 ProvenanceVerificationEntry {
9899 identifier: format!("pmid:{pmid}"),
9900 kind: "pmid".to_string(),
9901 status: "unresolved".to_string(),
9902 note: Some("eutils returned empty uids".to_string()),
9903 }
9904 }
9905 }
9906 Ok(resp) => ProvenanceVerificationEntry {
9907 identifier: format!("pmid:{pmid}"),
9908 kind: "pmid".to_string(),
9909 status: "unresolved".to_string(),
9910 note: Some(format!("eutils returned {}", resp.status())),
9911 },
9912 Err(e) => ProvenanceVerificationEntry {
9913 identifier: format!("pmid:{pmid}"),
9914 kind: "pmid".to_string(),
9915 status: "skipped".to_string(),
9916 note: Some(format!("eutils unreachable: {e}")),
9917 },
9918 }
9919}
9920
9921async fn verify_s2(client: &reqwest::Client, s2_id: &str) -> ProvenanceVerificationEntry {
9928 let url = format!("https://api.semanticscholar.org/graph/v1/paper/{s2_id}");
9929 match client.get(&url).send().await {
9930 Ok(resp) if resp.status().is_success() => {
9931 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
9932 let has_paper_id = body
9933 .get("paperId")
9934 .and_then(serde_json::Value::as_str)
9935 .is_some_and(|v| !v.is_empty());
9936 if has_paper_id {
9937 ProvenanceVerificationEntry {
9938 identifier: format!("s2:{s2_id}"),
9939 kind: "s2".to_string(),
9940 status: "resolved".to_string(),
9941 note: None,
9942 }
9943 } else {
9944 ProvenanceVerificationEntry {
9945 identifier: format!("s2:{s2_id}"),
9946 kind: "s2".to_string(),
9947 status: "unresolved".to_string(),
9948 note: Some("semantic scholar returned 200 with no paperId".to_string()),
9949 }
9950 }
9951 }
9952 Ok(resp) => ProvenanceVerificationEntry {
9953 identifier: format!("s2:{s2_id}"),
9954 kind: "s2".to_string(),
9955 status: "unresolved".to_string(),
9956 note: Some(format!("semantic scholar returned {}", resp.status())),
9957 },
9958 Err(e) => ProvenanceVerificationEntry {
9959 identifier: format!("s2:{s2_id}"),
9960 kind: "s2".to_string(),
9961 status: "skipped".to_string(),
9962 note: Some(format!("semantic scholar unreachable: {e}")),
9963 },
9964 }
9965}
9966
9967async fn cmd_source_adapter(action: SourceAdapterAction) {
9968 match action {
9969 SourceAdapterAction::Run {
9970 frontier,
9971 adapter,
9972 actor,
9973 entries,
9974 priority,
9975 include_excluded,
9976 allow_partial,
9977 dry_run,
9978 input_dir,
9979 apply_artifacts,
9980 json,
9981 } => {
9982 let report = crate::source_adapters::run(
9983 &frontier,
9984 crate::source_adapters::SourceAdapterRunOptions {
9985 adapter,
9986 actor,
9987 entries,
9988 priority,
9989 include_excluded,
9990 allow_partial,
9991 dry_run,
9992 input_dir,
9993 apply_artifacts,
9994 },
9995 )
9996 .await
9997 .unwrap_or_else(|e| fail_return(&e));
9998 if json {
9999 println!(
10000 "{}",
10001 serde_json::to_string_pretty(&report)
10002 .expect("failed to serialize source adapter report")
10003 );
10004 } else {
10005 println!("vela source-adapter run");
10006 println!(" adapter: {}", report.adapter);
10007 println!(" run: {}", report.run_id);
10008 println!(" frontier: {}", report.frontier);
10009 println!(" selected entries: {}", report.selected_entries);
10010 println!(" fetched records: {}", report.fetched_records);
10011 println!(" changed records: {}", report.changed_records);
10012 println!(" unchanged records: {}", report.unchanged_records);
10013 println!(" failed records: {}", report.failed_records.len());
10014 if let Some(packet_id) = report.packet_id {
10015 println!(" packet: {packet_id}");
10016 }
10017 println!(" artifact proposals: {}", report.artifact_proposals);
10018 println!(" review note proposals: {}", report.review_note_proposals);
10019 println!(" applied events: {}", report.applied_event_ids.len());
10020 }
10021 }
10022 }
10023}
10024
10025fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
10026 match action {
10027 RuntimeAdapterAction::Run {
10028 frontier,
10029 adapter,
10030 input,
10031 actor,
10032 dry_run,
10033 apply_artifacts,
10034 json,
10035 } => {
10036 let report = crate::runtime_adapters::run(
10037 &frontier,
10038 crate::runtime_adapters::RuntimeAdapterRunOptions {
10039 adapter,
10040 input,
10041 actor,
10042 dry_run,
10043 apply_artifacts,
10044 },
10045 )
10046 .unwrap_or_else(|e| fail_return(&e));
10047 if json {
10048 println!(
10049 "{}",
10050 serde_json::to_string_pretty(&report)
10051 .expect("failed to serialize runtime adapter report")
10052 );
10053 } else {
10054 println!("vela runtime-adapter run");
10055 println!(" adapter: {}", report.adapter);
10056 println!(" run: {}", report.run_id);
10057 println!(" frontier: {}", report.frontier);
10058 if let Some(packet_id) = report.packet_id {
10059 println!(" packet: {packet_id}");
10060 }
10061 println!(" artifact proposals: {}", report.artifact_proposals);
10062 println!(" finding proposals: {}", report.finding_proposals);
10063 println!(" gap proposals: {}", report.gap_proposals);
10064 println!(" review note proposals: {}", report.review_note_proposals);
10065 println!(
10066 " applied artifact events: {}",
10067 report.applied_artifact_events
10068 );
10069 println!(
10070 " pending truth proposals: {}",
10071 report.pending_truth_proposals
10072 );
10073 }
10074 }
10075 }
10076}
10077
10078fn cmd_sign(action: SignAction) {
10079 match action {
10080 SignAction::GenerateKeypair { out, json } => {
10081 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
10082 let payload = json!({
10083 "ok": true,
10084 "command": "sign.generate-keypair",
10085 "output_dir": out.display().to_string(),
10086 "public_key": public_key,
10087 });
10088 if json {
10089 println!(
10090 "{}",
10091 serde_json::to_string_pretty(&payload)
10092 .expect("failed to serialize sign.generate-keypair")
10093 );
10094 } else {
10095 println!("{} keypair · {}", style::ok("generated"), out.display());
10096 println!(" public key: {public_key}");
10097 }
10098 }
10099 SignAction::Apply {
10100 frontier,
10101 private_key,
10102 json,
10103 } => {
10104 let count =
10105 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
10106 let payload = json!({
10107 "ok": true,
10108 "command": "sign.apply",
10109 "frontier": frontier.display().to_string(),
10110 "private_key": private_key.display().to_string(),
10111 "signed": count,
10112 });
10113 if json {
10114 println!(
10115 "{}",
10116 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
10117 );
10118 } else {
10119 println!(
10120 "{} {count} findings in {}",
10121 style::ok("signed"),
10122 frontier.display()
10123 );
10124 }
10125 }
10126 SignAction::Verify {
10127 frontier,
10128 public_key,
10129 json,
10130 } => {
10131 let report = sign::verify_frontier(&frontier, public_key.as_deref())
10132 .unwrap_or_else(|e| fail_return(&e));
10133 if json {
10134 println!(
10135 "{}",
10136 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
10137 );
10138 } else {
10139 println!();
10140 println!(
10141 " {}",
10142 format!("VELA · SIGN · VERIFY · {}", frontier.display())
10143 .to_uppercase()
10144 .dimmed()
10145 );
10146 println!(" {}", style::tick_row(60));
10147 println!(" total findings: {}", report.total_findings);
10148 println!(" signed: {}", report.signed);
10149 println!(" unsigned: {}", report.unsigned);
10150 println!(" valid: {}", report.valid);
10151 println!(" invalid: {}", report.invalid);
10152 if report.findings_with_threshold > 0 {
10153 println!(" with threshold: {}", report.findings_with_threshold);
10154 println!(" jointly accepted: {}", report.jointly_accepted);
10155 }
10156 }
10157 }
10158 SignAction::ThresholdSet {
10159 frontier,
10160 finding_id,
10161 to,
10162 json,
10163 } => {
10164 if to == 0 {
10165 fail("--to must be >= 1");
10166 }
10167 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10168 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
10169 fail(&format!("finding '{finding_id}' not present in frontier"));
10170 };
10171 project.findings[idx].flags.signature_threshold = Some(to);
10172 sign::refresh_jointly_accepted(&mut project);
10176 let met = project.findings[idx].flags.jointly_accepted;
10177 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10178
10179 if json {
10180 println!(
10181 "{}",
10182 serde_json::to_string_pretty(&json!({
10183 "ok": true,
10184 "command": "sign.threshold-set",
10185 "finding_id": finding_id,
10186 "threshold": to,
10187 "jointly_accepted": met,
10188 "frontier": frontier.display().to_string(),
10189 }))
10190 .expect("failed to serialize sign.threshold-set")
10191 );
10192 } else {
10193 println!(
10194 "{} signature_threshold={to} on {finding_id} ({})",
10195 style::ok("set"),
10196 if met {
10197 "jointly accepted"
10198 } else {
10199 "awaiting signatures"
10200 }
10201 );
10202 }
10203 }
10204 }
10205}
10206
10207fn cmd_actor(action: ActorAction) {
10208 match action {
10209 ActorAction::Add {
10210 frontier,
10211 id,
10212 pubkey,
10213 tier,
10214 orcid,
10215 clearance,
10216 json,
10217 } => {
10218 let trimmed = pubkey.trim();
10220 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
10221 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
10222 }
10223 let orcid_normalized = orcid
10225 .as_deref()
10226 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
10227 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
10230 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
10231 });
10232
10233 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10234 if project.actors.iter().any(|actor| actor.id == id) {
10235 fail(&format!(
10236 "Actor '{id}' already registered in this frontier."
10237 ));
10238 }
10239 project.actors.push(sign::ActorRecord {
10240 id: id.clone(),
10241 public_key: trimmed.to_string(),
10242 algorithm: "ed25519".to_string(),
10243 created_at: chrono::Utc::now().to_rfc3339(),
10244 tier: tier.clone(),
10245 orcid: orcid_normalized.clone(),
10246 access_clearance: clearance,
10247 });
10248 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10249 let payload = json!({
10250 "ok": true,
10251 "command": "actor.add",
10252 "frontier": frontier.display().to_string(),
10253 "actor_id": id,
10254 "public_key": trimmed,
10255 "tier": tier,
10256 "orcid": orcid_normalized,
10257 "registered_count": project.actors.len(),
10258 });
10259 if json {
10260 println!(
10261 "{}",
10262 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
10263 );
10264 } else {
10265 let tier_suffix = tier
10266 .as_deref()
10267 .map_or_else(String::new, |t| format!(" tier={t}"));
10268 println!(
10269 "{} actor {} (pubkey {}{tier_suffix})",
10270 style::ok("registered"),
10271 id,
10272 &trimmed[..16]
10273 );
10274 }
10275 }
10276 ActorAction::List { frontier, json } => {
10277 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10278 if json {
10279 let payload = json!({
10280 "ok": true,
10281 "command": "actor.list",
10282 "frontier": frontier.display().to_string(),
10283 "actors": project.actors,
10284 });
10285 println!(
10286 "{}",
10287 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
10288 );
10289 } else {
10290 println!();
10291 println!(
10292 " {}",
10293 format!("VELA · ACTOR · LIST · {}", frontier.display())
10294 .to_uppercase()
10295 .dimmed()
10296 );
10297 println!(" {}", style::tick_row(60));
10298 if project.actors.is_empty() {
10299 println!(" (no actors registered)");
10300 } else {
10301 for actor in &project.actors {
10302 println!(
10303 " {:<28} {}… registered {}",
10304 actor.id,
10305 &actor.public_key[..16],
10306 actor.created_at
10307 );
10308 }
10309 }
10310 }
10311 }
10312 }
10313}
10314
10315fn cmd_causal(action: CausalAction) {
10317 use crate::causal_reasoning;
10318
10319 match action {
10320 CausalAction::Audit {
10321 frontier,
10322 problems_only,
10323 json,
10324 } => {
10325 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10326 let mut entries = causal_reasoning::audit_frontier(&project);
10327 if problems_only {
10328 entries.retain(|e| e.verdict.needs_reviewer_attention());
10329 }
10330 let summary = causal_reasoning::summarize_audit(&entries);
10331
10332 if json {
10333 println!(
10334 "{}",
10335 serde_json::to_string_pretty(&json!({
10336 "ok": true,
10337 "command": "causal.audit",
10338 "frontier": frontier.display().to_string(),
10339 "summary": summary,
10340 "entries": entries,
10341 }))
10342 .expect("serialize causal.audit")
10343 );
10344 return;
10345 }
10346
10347 println!();
10348 println!(
10349 " {}",
10350 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
10351 .to_uppercase()
10352 .dimmed()
10353 );
10354 println!(" {}", style::tick_row(60));
10355 println!(
10356 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
10357 summary.total,
10358 summary.identified,
10359 summary.conditional,
10360 summary.underidentified,
10361 summary.underdetermined,
10362 );
10363 if entries.is_empty() {
10364 println!(" (no entries to report)");
10365 return;
10366 }
10367 for e in &entries {
10368 let chip = match e.verdict {
10369 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
10370 crate::causal_reasoning::Identifiability::Conditional => {
10371 style::warn("conditional")
10372 }
10373 crate::causal_reasoning::Identifiability::Underidentified => {
10374 style::lost("underidentified")
10375 }
10376 crate::causal_reasoning::Identifiability::Underdetermined => {
10377 style::warn("underdetermined")
10378 }
10379 };
10380 let claim = e
10381 .causal_claim
10382 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
10383 let grade = e
10384 .causal_evidence_grade
10385 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
10386 println!();
10387 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
10388 let assertion_short: String = e.assertion_text.chars().take(78).collect();
10389 println!(" {assertion_short}");
10390 println!(" {} {}", style::ok("why:"), e.rationale);
10391 if e.verdict.needs_reviewer_attention()
10392 || matches!(
10393 e.verdict,
10394 crate::causal_reasoning::Identifiability::Underdetermined
10395 )
10396 {
10397 println!(" {} {}", style::ok("fix:"), e.remediation);
10398 }
10399 }
10400 }
10401 CausalAction::Effect {
10402 frontier,
10403 source,
10404 on: target,
10405 json,
10406 } => {
10407 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
10408
10409 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10410 let verdict = identify_effect(&project, &source, &target);
10411
10412 if json {
10413 println!(
10414 "{}",
10415 serde_json::to_string_pretty(&json!({
10416 "ok": true,
10417 "command": "causal.effect",
10418 "frontier": frontier.display().to_string(),
10419 "source": source,
10420 "target": target,
10421 "verdict": verdict,
10422 }))
10423 .expect("serialize causal.effect")
10424 );
10425 return;
10426 }
10427
10428 println!();
10429 println!(
10430 " {}",
10431 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
10432 .to_uppercase()
10433 .dimmed()
10434 );
10435 println!(" {}", style::tick_row(60));
10436 match verdict {
10437 CausalEffectVerdict::Identified {
10438 adjustment_set,
10439 back_door_paths_considered,
10440 } => {
10441 if adjustment_set.is_empty() {
10442 println!(
10443 " {} no back-door adjustment needed",
10444 style::ok("identified")
10445 );
10446 } else {
10447 println!(" {} identified by adjusting on:", style::ok("identified"));
10448 for z in &adjustment_set {
10449 println!(" · {z}");
10450 }
10451 }
10452 println!(
10453 " back-door paths considered: {}",
10454 back_door_paths_considered
10455 );
10456 }
10457 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
10458 println!(
10459 " {} identified via front-door criterion (Pearl 1995 §3.3)",
10460 style::ok("identified")
10461 );
10462 println!(" mediators that intercept all directed paths:");
10463 for m in &mediator_set {
10464 println!(" · {m}");
10465 }
10466 println!(
10467 " applies when source-target confounders are unobserved but the mediator chain is."
10468 );
10469 }
10470 CausalEffectVerdict::NoCausalPath { reason } => {
10471 println!(" {} no causal path: {reason}", style::warn("no_path"));
10472 }
10473 CausalEffectVerdict::Underidentified {
10474 unblocked_back_door_paths,
10475 candidates_tried,
10476 } => {
10477 println!(
10478 " {} no observational adjustment set found ({} candidates tried)",
10479 style::lost("underidentified"),
10480 candidates_tried
10481 );
10482 println!(" open back-door paths:");
10483 for path in unblocked_back_door_paths.iter().take(5) {
10484 println!(" · {}", path.join(" — "));
10485 }
10486 println!(
10487 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
10488 );
10489 }
10490 CausalEffectVerdict::UnknownNode { which } => {
10491 fail(&which);
10492 }
10493 }
10494 println!();
10495 }
10496 CausalAction::Graph {
10497 frontier,
10498 node,
10499 json,
10500 } => {
10501 use crate::causal_graph::CausalGraph;
10502 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10503 let graph = CausalGraph::from_project(&project);
10504
10505 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
10508 if !graph.contains(n) {
10509 fail(&format!("node not in frontier: {n}"));
10510 }
10511 vec![n]
10512 } else {
10513 project.findings.iter().map(|f| f.id.as_str()).collect()
10514 };
10515
10516 if json {
10517 let payload: Vec<_> = nodes
10518 .iter()
10519 .map(|n| {
10520 let parents: Vec<&str> = graph.parents_of(n).collect();
10521 let children: Vec<&str> = graph.children_of(n).collect();
10522 json!({
10523 "node": n,
10524 "parents": parents,
10525 "children": children,
10526 })
10527 })
10528 .collect();
10529 println!(
10530 "{}",
10531 serde_json::to_string_pretty(&json!({
10532 "ok": true,
10533 "command": "causal.graph",
10534 "node_count": graph.node_count(),
10535 "edge_count": graph.edge_count(),
10536 "nodes": payload,
10537 }))
10538 .expect("serialize causal.graph")
10539 );
10540 return;
10541 }
10542
10543 println!();
10544 println!(
10545 " {}",
10546 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
10547 .to_uppercase()
10548 .dimmed()
10549 );
10550 println!(" {}", style::tick_row(60));
10551 println!(
10552 " {} nodes · {} edges",
10553 graph.node_count(),
10554 graph.edge_count()
10555 );
10556 println!();
10557 for n in &nodes {
10558 let parents: Vec<&str> = graph.parents_of(n).collect();
10559 let children: Vec<&str> = graph.children_of(n).collect();
10560 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
10561 continue; }
10563 println!(" {n}");
10564 if !parents.is_empty() {
10565 println!(" parents: {}", parents.join(", "));
10566 }
10567 if !children.is_empty() {
10568 println!(" children: {}", children.join(", "));
10569 }
10570 }
10571 }
10572 CausalAction::Counterfactual {
10573 frontier,
10574 intervene_on,
10575 set_to,
10576 target,
10577 json,
10578 } => {
10579 use crate::counterfactual::{
10580 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
10581 };
10582
10583 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10584 let query = CounterfactualQuery {
10585 intervene_on: intervene_on.clone(),
10586 set_to,
10587 target: target.clone(),
10588 };
10589 let verdict = answer_counterfactual(&project, &query);
10590
10591 if json {
10592 println!(
10593 "{}",
10594 serde_json::to_string_pretty(&json!({
10595 "ok": true,
10596 "command": "causal.counterfactual",
10597 "frontier": frontier.display().to_string(),
10598 "query": query,
10599 "verdict": verdict,
10600 }))
10601 .expect("serialize causal.counterfactual")
10602 );
10603 return;
10604 }
10605
10606 println!();
10607 println!(
10608 " {}",
10609 format!(
10610 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
10611 )
10612 .to_uppercase()
10613 .dimmed()
10614 );
10615 println!(" {}", style::tick_row(72));
10616 match verdict {
10617 CounterfactualVerdict::Resolved {
10618 factual,
10619 counterfactual,
10620 delta,
10621 paths_used,
10622 } => {
10623 println!(
10624 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
10625 style::ok("resolved")
10626 );
10627 println!(
10628 " twin-network propagation through {} causal path(s):",
10629 paths_used.len()
10630 );
10631 for p in paths_used.iter().take(5) {
10632 println!(" · {}", p.join(" → "));
10633 }
10634 println!(
10635 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
10636 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
10637 );
10638 }
10639 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
10640 println!(
10641 " {} causal path exists but {} edge(s) lack a mechanism annotation",
10642 style::warn("mechanism_unspecified"),
10643 unspecified_edges.len()
10644 );
10645 for (parent, child) in unspecified_edges.iter().take(8) {
10646 println!(" · {parent} → {child}");
10647 }
10648 println!(
10649 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
10650 );
10651 }
10652 CounterfactualVerdict::NoCausalPath { factual } => {
10653 println!(
10654 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
10655 style::warn("no_path")
10656 );
10657 }
10658 CounterfactualVerdict::UnknownNode { which } => {
10659 fail(&format!("node not in frontier: {which}"));
10660 }
10661 CounterfactualVerdict::InvalidIntervention { reason } => {
10662 fail(&reason);
10663 }
10664 }
10665 println!();
10666 }
10667 }
10668}
10669
10670fn cmd_bridges(action: BridgesAction) {
10673 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
10674 use std::collections::HashMap;
10675
10676 fn bridges_dir(frontier: &Path) -> PathBuf {
10677 frontier.join(".vela/bridges")
10678 }
10679
10680 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
10681 let path = bridges_dir(frontier).join(format!("{id}.json"));
10682 if !path.is_file() {
10683 return Err(format!("bridge not found: {id}"));
10684 }
10685 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
10686 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
10687 }
10688
10689 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
10690 let dir = bridges_dir(frontier);
10691 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
10692 let path = dir.join(format!("{}.json", b.id));
10693 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
10694 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
10695 }
10696
10697 fn default_reviewer_id() -> String {
10700 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
10701 }
10702
10703 fn emit_bridge_reviewed_event(
10714 frontier: &Path,
10715 bridge_id: &str,
10716 status: &str,
10717 reviewer_id: &str,
10718 note: Option<&str>,
10719 ) -> Result<(), String> {
10720 let mut payload = serde_json::json!({
10721 "bridge_id": bridge_id,
10722 "status": status,
10723 });
10724 if let Some(n) = note
10725 && !n.trim().is_empty()
10726 {
10727 payload["note"] = serde_json::Value::String(n.to_string());
10728 }
10729 let known_ids: Vec<String> = list_bridges(frontier)
10731 .unwrap_or_default()
10732 .into_iter()
10733 .map(|b| b.id)
10734 .collect();
10735 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
10736 let event = crate::events::new_bridge_reviewed_event(
10737 bridge_id,
10738 reviewer_id,
10739 "human",
10740 &format!("Bridge {status} by {reviewer_id}"),
10741 payload,
10742 Vec::new(),
10743 );
10744 let events_dir = frontier.join(".vela/events");
10745 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
10746 let event_path = events_dir.join(format!("{}.json", event.id));
10747 let data =
10748 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
10749 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
10750 }
10751
10752 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
10753 let dir = bridges_dir(frontier);
10754 if !dir.is_dir() {
10755 return Ok(Vec::new());
10756 }
10757 let mut out = Vec::new();
10758 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
10759 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
10760 let path = entry.path();
10761 if path.extension().and_then(|s| s.to_str()) != Some("json") {
10762 continue;
10763 }
10764 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
10765 let b: Bridge =
10766 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
10767 out.push(b);
10768 }
10769 out.sort_by(|a, b| {
10770 b.finding_refs
10771 .len()
10772 .cmp(&a.finding_refs.len())
10773 .then(a.entity_name.cmp(&b.entity_name))
10774 });
10775 Ok(out)
10776 }
10777
10778 match action {
10779 BridgesAction::Derive {
10780 frontier_a,
10781 label_a,
10782 frontier_b,
10783 label_b,
10784 json,
10785 } => {
10786 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
10787 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
10788 let now = chrono::Utc::now().to_rfc3339();
10789 let new_bridges =
10790 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
10791
10792 let existing = list_bridges(&frontier_a).unwrap_or_default();
10796 let existing_by_id: HashMap<String, Bridge> =
10797 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
10798 let mut written = 0;
10799 let mut preserved = 0;
10800 let mut new_ids = Vec::new();
10801 for mut bridge in new_bridges {
10802 if let Some(prev) = existing_by_id.get(&bridge.id)
10803 && prev.status != BridgeStatus::Derived
10804 {
10805 bridge.status = prev.status;
10807 bridge.derived_at = prev.derived_at.clone();
10808 preserved += 1;
10809 }
10810 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
10811 new_ids.push(bridge.id.clone());
10812 written += 1;
10813 }
10814
10815 if json {
10816 println!(
10817 "{}",
10818 serde_json::to_string_pretty(&json!({
10819 "ok": true,
10820 "command": "bridges.derive",
10821 "frontier_a": frontier_a.display().to_string(),
10822 "frontier_b": frontier_b.display().to_string(),
10823 "bridges_written": written,
10824 "reviewer_judgments_preserved": preserved,
10825 "ids": new_ids,
10826 }))
10827 .expect("serialize bridges.derive")
10828 );
10829 return;
10830 }
10831
10832 println!();
10833 println!(
10834 " {}",
10835 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
10836 .to_uppercase()
10837 .dimmed()
10838 );
10839 println!(" {}", style::tick_row(60));
10840 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
10841 if preserved > 0 {
10842 println!(
10843 " {} {} reviewer judgment(s) preserved",
10844 style::ok("kept"),
10845 preserved
10846 );
10847 }
10848 for id in new_ids.iter().take(10) {
10849 println!(" · {id}");
10850 }
10851 if new_ids.len() > 10 {
10852 println!(" … and {} more", new_ids.len() - 10);
10853 }
10854 println!();
10855 }
10856 BridgesAction::List {
10857 frontier,
10858 status,
10859 json,
10860 } => {
10861 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
10862 if let Some(s) = status.as_deref() {
10863 let want = match s.to_lowercase().as_str() {
10864 "derived" => BridgeStatus::Derived,
10865 "confirmed" => BridgeStatus::Confirmed,
10866 "refuted" => BridgeStatus::Refuted,
10867 other => fail_return(&format!(
10868 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
10869 )),
10870 };
10871 bridges.retain(|b| b.status == want);
10872 }
10873 if json {
10874 println!(
10875 "{}",
10876 serde_json::to_string_pretty(&json!({
10877 "ok": true,
10878 "command": "bridges.list",
10879 "frontier": frontier.display().to_string(),
10880 "count": bridges.len(),
10881 "bridges": bridges,
10882 }))
10883 .expect("serialize bridges.list")
10884 );
10885 return;
10886 }
10887 println!();
10888 println!(
10889 " {}",
10890 format!("VELA · BRIDGES · LIST · {}", frontier.display())
10891 .to_uppercase()
10892 .dimmed()
10893 );
10894 println!(" {}", style::tick_row(60));
10895 println!(" {} bridge(s)", bridges.len());
10896 for b in &bridges {
10897 let chip = match b.status {
10898 BridgeStatus::Derived => style::warn("derived"),
10899 BridgeStatus::Confirmed => style::ok("confirmed"),
10900 BridgeStatus::Refuted => style::lost("refuted"),
10901 };
10902 println!();
10903 println!(
10904 " {chip} {} {} ↔ findings:{}",
10905 b.id,
10906 b.entity_name,
10907 b.finding_refs.len()
10908 );
10909 println!(" frontiers: {}", b.frontiers.join(", "));
10910 if let Some(t) = &b.tension {
10911 println!(" tension: {t}");
10912 }
10913 }
10914 println!();
10915 }
10916 BridgesAction::Show {
10917 frontier,
10918 bridge_id,
10919 json,
10920 } => {
10921 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10922 if json {
10923 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10924 return;
10925 }
10926 println!();
10927 println!(
10928 " {}",
10929 format!("VELA · BRIDGES · SHOW · {}", b.id)
10930 .to_uppercase()
10931 .dimmed()
10932 );
10933 println!(" {}", style::tick_row(60));
10934 println!(" entity: {}", b.entity_name);
10935 println!(" status: {:?}", b.status);
10936 println!(" frontiers: {}", b.frontiers.join(", "));
10937 if !b.frontier_ids.is_empty() {
10938 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
10939 }
10940 if let Some(t) = &b.tension {
10941 println!(" tension: {t}");
10942 }
10943 println!(" derived_at: {}", b.derived_at);
10944 println!(" finding refs ({}):", b.finding_refs.len());
10945 for r in &b.finding_refs {
10946 let dir = r.direction.as_deref().unwrap_or("—");
10947 let truncated: String = r.assertion_text.chars().take(72).collect();
10948 println!(
10949 " · [{}] {} (conf={:.2}, dir={})",
10950 r.frontier, r.finding_id, r.confidence, dir
10951 );
10952 println!(" {truncated}");
10953 }
10954 println!();
10955 }
10956 BridgesAction::Confirm {
10957 frontier,
10958 bridge_id,
10959 reviewer,
10960 note,
10961 json,
10962 } => {
10963 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10964 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10965 b.status = BridgeStatus::Confirmed;
10966 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10967 let _ = emit_bridge_reviewed_event(
10971 &frontier,
10972 &bridge_id,
10973 "confirmed",
10974 &reviewer_id,
10975 note.as_deref(),
10976 );
10977 if json {
10978 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10979 return;
10980 }
10981 println!();
10982 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
10983 println!();
10984 }
10985 BridgesAction::Refute {
10986 frontier,
10987 bridge_id,
10988 reviewer,
10989 note,
10990 json,
10991 } => {
10992 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10993 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10994 b.status = BridgeStatus::Refuted;
10995 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10996 let _ = emit_bridge_reviewed_event(
10997 &frontier,
10998 &bridge_id,
10999 "refuted",
11000 &reviewer_id,
11001 note.as_deref(),
11002 );
11003 if json {
11004 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
11005 return;
11006 }
11007 println!();
11008 println!(" {} {} now refuted", style::lost("refuted"), b.id);
11009 println!();
11010 }
11011 }
11012}
11013
11014fn cmd_federation(action: FederationAction) {
11016 use crate::federation::PeerHub;
11017
11018 match action {
11019 FederationAction::PeerAdd {
11020 frontier,
11021 id,
11022 url,
11023 pubkey,
11024 note,
11025 json,
11026 } => {
11027 let peer = PeerHub {
11028 id: id.clone(),
11029 url: url.clone(),
11030 public_key: pubkey.trim().to_string(),
11031 added_at: chrono::Utc::now().to_rfc3339(),
11032 note: note.clone(),
11033 };
11034 peer.validate().unwrap_or_else(|e| fail_return(&e));
11035
11036 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11037 if project.peers.iter().any(|p| p.id == id) {
11038 fail(&format!("peer '{id}' already in registry"));
11039 }
11040 project.peers.push(peer.clone());
11041 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11042
11043 if json {
11044 println!(
11045 "{}",
11046 serde_json::to_string_pretty(&json!({
11047 "ok": true,
11048 "command": "federation.peer-add",
11049 "frontier": frontier.display().to_string(),
11050 "peer": peer,
11051 "registered_count": project.peers.len(),
11052 }))
11053 .expect("serialize federation.peer-add")
11054 );
11055 } else {
11056 println!(
11057 "{} peer {} (pubkey {}…) at {}",
11058 style::ok("registered"),
11059 id,
11060 &peer.public_key[..16],
11061 peer.url
11062 );
11063 }
11064 }
11065 FederationAction::PeerList { frontier, json } => {
11066 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11067 if json {
11068 println!(
11069 "{}",
11070 serde_json::to_string_pretty(&json!({
11071 "ok": true,
11072 "command": "federation.peer-list",
11073 "frontier": frontier.display().to_string(),
11074 "peers": project.peers,
11075 }))
11076 .expect("serialize federation.peer-list")
11077 );
11078 } else {
11079 println!();
11080 println!(
11081 " {}",
11082 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
11083 .to_uppercase()
11084 .dimmed()
11085 );
11086 println!(" {}", style::tick_row(60));
11087 if project.peers.is_empty() {
11088 println!(" (no peers registered)");
11089 } else {
11090 for p in &project.peers {
11091 let note_suffix = if p.note.is_empty() {
11092 String::new()
11093 } else {
11094 format!(" · {}", p.note)
11095 };
11096 println!(
11097 " {:<24} {} {}…{note_suffix}",
11098 p.id,
11099 p.url,
11100 &p.public_key[..16]
11101 );
11102 }
11103 }
11104 }
11105 }
11106 FederationAction::Sync {
11107 frontier,
11108 peer_id,
11109 url,
11110 via_hub,
11111 vfr_id,
11112 allow_cross_vfr,
11113 dry_run,
11114 json,
11115 } => {
11116 use crate::federation::{self, DiscoveryResult};
11117
11118 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11119 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
11120 fail(&format!(
11121 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
11122 ));
11123 };
11124 let local_frontier_id = project.frontier_id();
11125
11126 if via_hub
11133 && let Some(target) = vfr_id.as_deref()
11134 && target != local_frontier_id
11135 && !allow_cross_vfr
11136 {
11137 fail(&format!(
11138 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
11139 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
11140 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
11141 ));
11142 }
11143
11144 #[derive(Debug)]
11146 enum SyncOutcome {
11147 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
11151 }
11152
11153 let outcome = if via_hub {
11154 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
11155 match federation::discover_peer_frontier(
11156 &peer.url,
11157 &target_vfr,
11158 Some(&peer.public_key),
11159 ) {
11160 DiscoveryResult::Resolved(p) => {
11161 let src =
11162 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
11163 SyncOutcome::Resolved(p, src)
11164 }
11165 DiscoveryResult::BrokenLocator {
11166 vfr_id,
11167 locator,
11168 status,
11169 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
11170 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
11171 SyncOutcome::UnverifiedEntry(vfr_id, reason)
11172 }
11173 DiscoveryResult::EntryNotFound { vfr_id, status } => {
11174 SyncOutcome::EntryNotFound(vfr_id, status)
11175 }
11176 DiscoveryResult::Unreachable { url, error } => {
11177 fail(&format!("peer hub unreachable ({url}): {error}"));
11178 }
11179 }
11180 } else {
11181 let resolved_url = url.unwrap_or_else(|| {
11182 let base = peer.url.trim_end_matches('/');
11183 format!("{base}/manifest/{local_frontier_id}.json")
11184 });
11185 match federation::fetch_peer_frontier(&resolved_url) {
11186 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
11187 Err(e) => fail(&format!("direct fetch failed: {e}")),
11188 }
11189 };
11190
11191 let peer_source: String;
11194 let peer_state = match outcome {
11195 SyncOutcome::Resolved(p, src) => {
11196 if !json {
11197 println!(" · resolved via {src}");
11198 }
11199 peer_source = src;
11200 p
11201 }
11202 SyncOutcome::BrokenLocator(vfr, locator, status) => {
11203 if dry_run {
11204 if json {
11205 println!(
11206 "{}",
11207 serde_json::to_string_pretty(&json!({
11208 "ok": true,
11209 "command": "federation.sync",
11210 "dry_run": true,
11211 "outcome": "broken_locator",
11212 "vfr_id": vfr,
11213 "locator": locator,
11214 "http_status": status,
11215 }))
11216 .expect("serialize")
11217 );
11218 } else {
11219 println!(
11220 "{} dry-run: peer entry resolved but locator dead",
11221 style::warn("broken_locator")
11222 );
11223 println!(" vfr_id: {vfr}");
11224 println!(" locator: {locator} (HTTP {status})");
11225 }
11226 return;
11227 }
11228 let report = federation::record_locator_failure(
11229 &mut project,
11230 &peer_id,
11231 &vfr,
11232 &locator,
11233 status,
11234 );
11235 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11236 if json {
11237 println!(
11238 "{}",
11239 serde_json::to_string_pretty(&json!({
11240 "ok": true,
11241 "command": "federation.sync",
11242 "outcome": "broken_locator",
11243 "report": report,
11244 }))
11245 .expect("serialize")
11246 );
11247 } else {
11248 println!(
11249 "{} sync recorded broken-locator conflict against {peer_id}",
11250 style::warn("broken_locator")
11251 );
11252 println!(" vfr_id: {vfr}");
11253 println!(" locator: {locator} (HTTP {status})");
11254 println!(" events appended: {}", report.events_appended);
11255 }
11256 return;
11257 }
11258 SyncOutcome::UnverifiedEntry(vfr, reason) => {
11259 if dry_run {
11260 if json {
11261 println!(
11262 "{}",
11263 serde_json::to_string_pretty(&json!({
11264 "ok": true,
11265 "command": "federation.sync",
11266 "dry_run": true,
11267 "outcome": "unverified_peer_entry",
11268 "vfr_id": vfr,
11269 "reason": reason,
11270 }))
11271 .expect("serialize")
11272 );
11273 } else {
11274 println!(
11275 "{} dry-run: peer entry signature did not verify",
11276 style::lost("unverified_peer_entry")
11277 );
11278 println!(" vfr_id: {vfr}");
11279 println!(" reason: {reason}");
11280 }
11281 return;
11282 }
11283 let report =
11284 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
11285 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11286 if json {
11287 println!(
11288 "{}",
11289 serde_json::to_string_pretty(&json!({
11290 "ok": true,
11291 "command": "federation.sync",
11292 "outcome": "unverified_peer_entry",
11293 "report": report,
11294 }))
11295 .expect("serialize")
11296 );
11297 } else {
11298 println!(
11299 "{} sync halted; peer's registry entry signature did not verify",
11300 style::lost("unverified_peer_entry")
11301 );
11302 println!(" vfr_id: {vfr}");
11303 println!(" reason: {reason}");
11304 }
11305 return;
11306 }
11307 SyncOutcome::EntryNotFound(vfr, status) => {
11308 if json {
11309 println!(
11310 "{}",
11311 serde_json::to_string_pretty(&json!({
11312 "ok": false,
11313 "command": "federation.sync",
11314 "outcome": "entry_not_found",
11315 "vfr_id": vfr,
11316 "http_status": status,
11317 }))
11318 .expect("serialize")
11319 );
11320 } else {
11321 println!(
11322 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
11323 style::warn("entry_not_found")
11324 );
11325 }
11326 return;
11327 }
11328 };
11329
11330 if dry_run {
11331 let conflicts = federation::diff_frontiers(&project, &peer_state);
11332 if json {
11333 println!(
11334 "{}",
11335 serde_json::to_string_pretty(&json!({
11336 "ok": true,
11337 "command": "federation.sync",
11338 "dry_run": true,
11339 "peer_id": peer_id,
11340 "peer_source": peer_source,
11341 "conflicts": conflicts,
11342 }))
11343 .expect("serialize federation.sync (dry-run)")
11344 );
11345 } else {
11346 println!(
11347 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
11348 style::ok("ok"),
11349 peer_source,
11350 conflicts.len()
11351 );
11352 for c in &conflicts {
11353 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
11354 }
11355 }
11356 return;
11357 }
11358
11359 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
11360 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11361
11362 if json {
11363 println!(
11364 "{}",
11365 serde_json::to_string_pretty(&json!({
11366 "ok": true,
11367 "command": "federation.sync",
11368 "peer_id": peer_id,
11369 "peer_source": peer_source,
11370 "report": report,
11371 }))
11372 .expect("serialize federation.sync")
11373 );
11374 } else {
11375 println!(
11376 "{} synced with {} ({})",
11377 style::ok("ok"),
11378 peer_id,
11379 peer_source
11380 );
11381 println!(
11382 " our: {}",
11383 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
11384 );
11385 println!(
11386 " peer: {}",
11387 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
11388 );
11389 println!(
11390 " conflicts: {} events appended: {}",
11391 report.conflicts.len(),
11392 report.events_appended
11393 );
11394 for c in &report.conflicts {
11395 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
11396 }
11397 }
11398 }
11399 FederationAction::PushResolution {
11400 frontier,
11401 conflict_event_id,
11402 to,
11403 key,
11404 vfr_id,
11405 json,
11406 } => {
11407 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
11408 }
11409 FederationAction::PeerRemove { frontier, id, json } => {
11410 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11411 let before = project.peers.len();
11412 project.peers.retain(|p| p.id != id);
11413 if project.peers.len() == before {
11414 fail(&format!("peer '{id}' not found in registry"));
11415 }
11416 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11417
11418 if json {
11419 println!(
11420 "{}",
11421 serde_json::to_string_pretty(&json!({
11422 "ok": true,
11423 "command": "federation.peer-remove",
11424 "frontier": frontier.display().to_string(),
11425 "removed": id,
11426 "remaining": project.peers.len(),
11427 }))
11428 .expect("serialize federation.peer-remove")
11429 );
11430 } else {
11431 println!(
11432 "{} peer {} ({} remaining)",
11433 style::ok("removed"),
11434 id,
11435 project.peers.len()
11436 );
11437 }
11438 }
11439 }
11440}
11441
11442fn cmd_federation_push_resolution(
11454 frontier: PathBuf,
11455 conflict_event_id: String,
11456 to: String,
11457 key: Option<PathBuf>,
11458 vfr_id: Option<String>,
11459 json: bool,
11460) {
11461 use crate::canonical;
11462 use crate::sign;
11463
11464 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11465
11466 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
11467 fail(&format!(
11468 "peer '{to}' not in registry; run `vela federation peer-add` first"
11469 ));
11470 };
11471
11472 let Some(resolution) = project
11474 .events
11475 .iter()
11476 .find(|e| {
11477 e.kind == "frontier.conflict_resolved"
11478 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
11479 == Some(conflict_event_id.as_str())
11480 })
11481 .cloned()
11482 else {
11483 fail(&format!(
11484 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
11485 frontier.display()
11486 ));
11487 };
11488
11489 let actor_id = resolution.actor.id.clone();
11492 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
11493 fail(&format!(
11494 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
11495 register the reviewer with `vela actor add` before pushing"
11496 ));
11497 };
11498
11499 let key_path = key.unwrap_or_else(|| {
11502 let home = std::env::var("HOME").unwrap_or_default();
11503 let base = PathBuf::from(home)
11504 .join(".config")
11505 .join("vela")
11506 .join("keys");
11507 let safe_id = actor.id.replace([':', '/'], "_");
11508 let by_actor = base.join(format!("{safe_id}.key"));
11509 if by_actor.exists() {
11510 by_actor
11511 } else {
11512 base.join("private.key")
11513 }
11514 });
11515
11516 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
11517 fail_return(&format!(
11518 "load private key from {}: {e}",
11519 key_path.display()
11520 ))
11521 });
11522 let pubkey_hex = sign::pubkey_hex(&signing_key);
11523 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
11524 fail(&format!(
11525 "private key at {} does not match actor {}'s registered public key. \
11526 Loaded pubkey {}, expected {}.",
11527 key_path.display(),
11528 actor.id,
11529 &pubkey_hex[..16],
11530 &actor.public_key[..16]
11531 ));
11532 }
11533
11534 let signature_hex = sign::sign_event(&resolution, &signing_key)
11537 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
11538
11539 let mut body = resolution.clone();
11544 body.signature = None;
11545 let body_value =
11546 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
11547 let _canonical_check = canonical::to_canonical_bytes(&body_value)
11548 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
11549
11550 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
11551 let url = format!(
11552 "{}/entries/{}/events",
11553 peer.url.trim_end_matches('/'),
11554 target_vfr
11555 );
11556
11557 let url_owned = url.clone();
11559 let pubkey_owned = pubkey_hex.clone();
11560 let signature_owned = signature_hex.clone();
11561 let body_owned = body_value.clone();
11562 let response: Result<(u16, String), String> = std::thread::spawn(move || {
11563 let client = reqwest::blocking::Client::new();
11564 let resp = client
11565 .post(&url_owned)
11566 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
11567 .header("X-Vela-Signature", &signature_owned)
11568 .json(&body_owned)
11569 .send()
11570 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
11571 let status = resp.status().as_u16();
11572 let text = resp.text().unwrap_or_default();
11573 Ok((status, text))
11574 })
11575 .join()
11576 .map_err(|_| "push thread panicked".to_string())
11577 .unwrap_or_else(|e| fail_return(&e));
11578
11579 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
11580 let parsed: serde_json::Value =
11581 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
11582
11583 let accepted = matches!(status, 200..=202);
11584 if json {
11585 println!(
11586 "{}",
11587 serde_json::to_string_pretty(&json!({
11588 "ok": accepted,
11589 "command": "federation.push-resolution",
11590 "frontier": frontier.display().to_string(),
11591 "peer_id": to,
11592 "url": url,
11593 "conflict_event_id": conflict_event_id,
11594 "event_id": resolution.id,
11595 "actor_id": actor.id,
11596 "http_status": status,
11597 "response": parsed,
11598 }))
11599 .expect("serialize federation.push-resolution")
11600 );
11601 } else if accepted {
11602 println!(
11603 "{} resolution {} pushed to {} (HTTP {})",
11604 style::ok("ok"),
11605 &resolution.id[..16.min(resolution.id.len())],
11606 to,
11607 status
11608 );
11609 println!(" url: {url}");
11610 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
11611 } else {
11612 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
11613 println!(" url: {url}");
11614 println!(" response: {text}");
11615 std::process::exit(1);
11616 }
11617}
11618
11619fn cmd_queue(action: QueueAction) {
11624 use crate::queue;
11625 match action {
11626 QueueAction::List { queue_file, json } => {
11627 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11628 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11629 if json {
11630 let payload = json!({
11631 "ok": true,
11632 "command": "queue.list",
11633 "queue_file": path.display().to_string(),
11634 "schema": q.schema,
11635 "actions": q.actions,
11636 });
11637 println!(
11638 "{}",
11639 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
11640 );
11641 } else {
11642 println!();
11643 println!(
11644 " {}",
11645 format!("VELA · QUEUE · LIST · {}", path.display())
11646 .to_uppercase()
11647 .dimmed()
11648 );
11649 println!(" {}", style::tick_row(60));
11650 if q.actions.is_empty() {
11651 println!(" (queue is empty)");
11652 } else {
11653 for (idx, action) in q.actions.iter().enumerate() {
11654 println!(
11655 " [{idx}] {} → {} queued {}",
11656 action.kind,
11657 action.frontier.display(),
11658 action.queued_at
11659 );
11660 }
11661 }
11662 }
11663 }
11664 QueueAction::Clear { queue_file, json } => {
11665 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11666 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
11667 if json {
11668 let payload = json!({
11669 "ok": true,
11670 "command": "queue.clear",
11671 "queue_file": path.display().to_string(),
11672 "dropped": dropped,
11673 });
11674 println!(
11675 "{}",
11676 serde_json::to_string_pretty(&payload)
11677 .expect("failed to serialize queue.clear")
11678 );
11679 } else {
11680 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
11681 }
11682 }
11683 QueueAction::Sign {
11684 actor,
11685 key,
11686 queue_file,
11687 yes_to_all,
11688 json,
11689 } => {
11690 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11691 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11692 if q.actions.is_empty() {
11693 if json {
11694 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
11695 } else {
11696 println!("{} queue is empty", style::ok("ok"));
11697 }
11698 return;
11699 }
11700 let key_hex = std::fs::read_to_string(&key)
11701 .map(|s| s.trim().to_string())
11702 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
11703 let signing_key = parse_signing_key(&key_hex);
11704 let mut signed_count = 0usize;
11705 let mut remaining = Vec::new();
11706 for action in q.actions.iter() {
11707 if !yes_to_all && !confirm_action(action) {
11708 remaining.push(action.clone());
11709 continue;
11710 }
11711 match sign_and_apply(&signing_key, &actor, action) {
11712 Ok(report) => {
11713 signed_count += 1;
11714 if !json {
11715 println!(
11716 "{} {} on {} → {}",
11717 style::ok("signed"),
11718 action.kind,
11719 action.frontier.display(),
11720 report
11721 );
11722 }
11723 }
11724 Err(error) => {
11725 remaining.push(action.clone());
11727 if !json {
11728 eprintln!(
11729 "{} {} on {}: {error}",
11730 style::warn("failed"),
11731 action.kind,
11732 action.frontier.display()
11733 );
11734 }
11735 }
11736 }
11737 }
11738 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
11739 if json {
11740 let payload = json!({
11741 "ok": true,
11742 "command": "queue.sign",
11743 "signed": signed_count,
11744 "remaining": remaining.len(),
11745 });
11746 println!(
11747 "{}",
11748 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
11749 );
11750 } else {
11751 println!(
11752 "{} signed {signed_count} action(s); {} remaining in queue",
11753 style::ok("ok"),
11754 remaining.len()
11755 );
11756 }
11757 }
11758 }
11759}
11760
11761fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
11762 let bytes = hex::decode(hex_str)
11763 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
11764 let key_bytes: [u8; 32] = bytes
11765 .try_into()
11766 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
11767 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
11768}
11769
11770fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
11771 use std::io::{self, BufRead, Write};
11772 let mut stdout = io::stdout().lock();
11773 let _ = writeln!(
11774 stdout,
11775 " sign {} on {}? [y/N] ",
11776 action.kind,
11777 action.frontier.display()
11778 );
11779 let _ = stdout.flush();
11780 drop(stdout);
11781 let stdin = io::stdin();
11782 let mut line = String::new();
11783 if stdin.lock().read_line(&mut line).is_err() {
11784 return false;
11785 }
11786 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
11787}
11788
11789fn sign_and_apply(
11794 signing_key: &ed25519_dalek::SigningKey,
11795 actor: &str,
11796 action: &crate::queue::QueuedAction,
11797) -> Result<String, String> {
11798 use crate::events::StateTarget;
11799 use crate::proposals;
11800 let args = &action.args;
11801 match action.kind.as_str() {
11802 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
11803 let kind = match action.kind.as_str() {
11804 "propose_review" => "finding.review",
11805 "propose_note" => "finding.note",
11806 "propose_revise_confidence" => "finding.confidence_revise",
11807 "propose_retract" => "finding.retract",
11808 _ => unreachable!(),
11809 };
11810 let target_id = args
11811 .get("target_finding_id")
11812 .and_then(Value::as_str)
11813 .ok_or("target_finding_id missing")?;
11814 let reason = args
11815 .get("reason")
11816 .and_then(Value::as_str)
11817 .ok_or("reason missing")?;
11818 let payload = match action.kind.as_str() {
11819 "propose_review" => {
11820 let status = args
11821 .get("status")
11822 .and_then(Value::as_str)
11823 .ok_or("status missing")?;
11824 json!({"status": status})
11825 }
11826 "propose_note" => {
11827 let text = args
11828 .get("text")
11829 .and_then(Value::as_str)
11830 .ok_or("text missing")?;
11831 json!({"text": text})
11832 }
11833 "propose_revise_confidence" => {
11834 let new_score = args
11835 .get("new_score")
11836 .and_then(Value::as_f64)
11837 .ok_or("new_score missing")?;
11838 json!({"new_score": new_score})
11839 }
11840 "propose_retract" => json!({}),
11841 _ => unreachable!(),
11842 };
11843 let created_at = args
11844 .get("created_at")
11845 .and_then(Value::as_str)
11846 .map(String::from)
11847 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11848 let mut proposal = proposals::new_proposal(
11849 kind,
11850 StateTarget {
11851 r#type: "finding".to_string(),
11852 id: target_id.to_string(),
11853 },
11854 actor,
11855 "human",
11856 reason,
11857 payload,
11858 Vec::new(),
11859 Vec::new(),
11860 );
11861 proposal.created_at = created_at;
11862 proposal.id = proposals::proposal_id(&proposal);
11863 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
11867 let result = proposals::create_or_apply(&action.frontier, proposal, false)
11868 .map_err(|e| format!("create_or_apply: {e}"))?;
11869 Ok(format!("proposal {}", result.proposal_id))
11870 }
11871 "accept_proposal" | "reject_proposal" => {
11872 let proposal_id = args
11873 .get("proposal_id")
11874 .and_then(Value::as_str)
11875 .ok_or("proposal_id missing")?;
11876 let reason = args
11877 .get("reason")
11878 .and_then(Value::as_str)
11879 .ok_or("reason missing")?;
11880 let timestamp = args
11881 .get("timestamp")
11882 .and_then(Value::as_str)
11883 .map(String::from)
11884 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11885 let preimage = json!({
11887 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
11888 "proposal_id": proposal_id,
11889 "reviewer_id": actor,
11890 "reason": reason,
11891 "timestamp": timestamp,
11892 });
11893 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
11894 use ed25519_dalek::Signer;
11895 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
11896 if action.kind == "accept_proposal" {
11897 let event_id =
11898 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
11899 .map_err(|e| format!("accept_at_path: {e}"))?;
11900 Ok(format!("event {event_id}"))
11901 } else {
11902 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
11903 .map_err(|e| format!("reject_at_path: {e}"))?;
11904 Ok(format!("rejected {proposal_id}"))
11905 }
11906 }
11907 other => Err(format!("unsupported queued action kind '{other}'")),
11908 }
11909}
11910
11911fn cmd_entity(action: EntityAction) {
11923 use crate::entity_resolve;
11924 match action {
11925 EntityAction::Resolve {
11926 frontier,
11927 force,
11928 json,
11929 } => {
11930 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11931 let report = entity_resolve::resolve_frontier(&mut p, force);
11932 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11933 if json {
11934 println!(
11935 "{}",
11936 serde_json::to_string_pretty(&serde_json::json!({
11937 "ok": true,
11938 "command": "entity.resolve",
11939 "frontier_path": frontier.display().to_string(),
11940 "report": report,
11941 }))
11942 .expect("serialize")
11943 );
11944 } else {
11945 println!(
11946 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
11947 style::ok("entity"),
11948 report.resolved,
11949 report.total_entities,
11950 report.already_resolved,
11951 report.unresolved_count,
11952 report.findings_touched,
11953 );
11954 let unresolved_summary: std::collections::BTreeSet<&str> = report
11955 .per_finding
11956 .iter()
11957 .flat_map(|f| f.unresolved.iter().map(String::as_str))
11958 .collect();
11959 if !unresolved_summary.is_empty() {
11960 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
11961 println!(
11962 " unresolved (first {}): {}",
11963 take.len(),
11964 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
11965 );
11966 }
11967 }
11968 }
11969 EntityAction::List { json } => {
11970 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
11971 .map(|(name, etype, source, id)| {
11972 serde_json::json!({
11973 "canonical_name": name,
11974 "entity_type": etype,
11975 "source": source,
11976 "id": id,
11977 })
11978 })
11979 .collect();
11980 if json {
11981 println!(
11982 "{}",
11983 serde_json::to_string_pretty(&serde_json::json!({
11984 "ok": true,
11985 "command": "entity.list",
11986 "count": entries.len(),
11987 "entries": entries,
11988 }))
11989 .expect("serialize")
11990 );
11991 } else {
11992 println!("{} {} bundled entries", style::ok("entity"), entries.len());
11993 for e in &entries {
11994 println!(
11995 " {:32} {:18} {} {}",
11996 e["canonical_name"].as_str().unwrap_or("?"),
11997 e["entity_type"].as_str().unwrap_or("?"),
11998 e["source"].as_str().unwrap_or("?"),
11999 e["id"].as_str().unwrap_or("?"),
12000 );
12001 }
12002 }
12003 }
12004 }
12005}
12006
12007fn cmd_link(action: LinkAction) {
12008 use crate::bundle::{Link, LinkRef};
12009 match action {
12010 LinkAction::Add {
12011 frontier,
12012 from,
12013 to,
12014 r#type,
12015 note,
12016 inferred_by,
12017 no_check_target,
12018 json,
12019 } => {
12020 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
12021 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
12022 fail(&format!(
12023 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
12024 ));
12025 }
12026 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
12027 fail(&format!(
12028 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
12029 ))
12030 });
12031 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12032 let source_idx = p
12033 .findings
12034 .iter()
12035 .position(|f| f.id == from)
12036 .unwrap_or_else(|| {
12037 fail_return(&format!("--from finding '{from}' not in frontier"))
12038 });
12039 if let LinkRef::Local { vf_id } = &parsed
12040 && !p.findings.iter().any(|f| &f.id == vf_id)
12041 {
12042 fail(&format!(
12043 "local --to target '{vf_id}' not in frontier; add the target finding first"
12044 ));
12045 }
12046 if let LinkRef::Cross { vfr_id, .. } = &parsed
12047 && p.dep_for_vfr(vfr_id).is_none()
12048 {
12049 fail(&format!(
12050 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
12051 ));
12052 }
12053
12054 let mut target_warning: Option<String> = None;
12060 if let LinkRef::Cross {
12061 vfr_id: target_vfr,
12062 vf_id: target_vf,
12063 } = &parsed
12064 && !no_check_target
12065 && let Some(dep) = p.dep_for_vfr(target_vfr)
12066 && let Some(locator) = dep.locator.as_deref()
12067 && (locator.starts_with("http://") || locator.starts_with("https://"))
12068 {
12069 let client = reqwest::blocking::Client::builder()
12070 .timeout(std::time::Duration::from_secs(15))
12071 .build()
12072 .ok();
12073 if let Some(client) = client
12074 && let Ok(resp) = client.get(locator).send()
12075 && resp.status().is_success()
12076 && let Ok(dep_project) = resp.json::<crate::project::Project>()
12077 {
12078 if let Some(target_finding) =
12079 dep_project.findings.iter().find(|f| &f.id == target_vf)
12080 {
12081 if target_finding.flags.superseded {
12082 target_warning = Some(format!(
12083 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
12084You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
12085Use --no-check-target to skip this check."
12086 ));
12087 }
12088 } else {
12089 target_warning = Some(format!(
12090 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
12091The target may have been removed or never existed in the pinned snapshot."
12092 ));
12093 }
12094 }
12095 }
12096
12097 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
12098 let link = Link {
12099 target: to.clone(),
12100 link_type: r#type.clone(),
12101 note: note.clone(),
12102 inferred_by: inferred_by.clone(),
12103 created_at: now,
12104 mechanism: None,
12105 };
12106 p.findings[source_idx].links.push(link);
12107 project::recompute_stats(&mut p);
12108 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12109 let payload = json!({
12110 "ok": true,
12111 "command": "link.add",
12112 "frontier": frontier.display().to_string(),
12113 "from": from,
12114 "to": to,
12115 "type": r#type,
12116 "cross_frontier": parsed.is_cross_frontier(),
12117 });
12118 if json {
12119 let mut p2 = payload.clone();
12120 if let Some(w) = &target_warning
12121 && let serde_json::Value::Object(m) = &mut p2
12122 {
12123 m.insert(
12124 "target_warning".to_string(),
12125 serde_json::Value::String(w.clone()),
12126 );
12127 }
12128 println!(
12129 "{}",
12130 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
12131 );
12132 } else {
12133 println!(
12134 "{} {} --[{}]--> {}{}",
12135 style::ok("link"),
12136 from,
12137 r#type,
12138 to,
12139 if parsed.is_cross_frontier() {
12140 " (cross-frontier)"
12141 } else {
12142 ""
12143 }
12144 );
12145 if let Some(w) = target_warning {
12146 println!(" {w}");
12147 }
12148 }
12149 }
12150 }
12151}
12152
12153fn cmd_frontier(action: FrontierAction) {
12154 use crate::project::ProjectDependency;
12155 use crate::repo;
12156 match action {
12157 FrontierAction::New {
12158 path,
12159 name,
12160 description,
12161 force,
12162 json,
12163 } => {
12164 if path.exists() && !force {
12165 fail(&format!(
12166 "{} already exists; pass --force to overwrite",
12167 path.display()
12168 ));
12169 }
12170 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
12171 let project = project::Project {
12172 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
12173 schema: project::VELA_SCHEMA_URL.to_string(),
12174 frontier_id: None,
12175 project: project::ProjectMeta {
12176 name: name.clone(),
12177 description: description.clone(),
12178 compiled_at: now,
12179 compiler: project::VELA_COMPILER_VERSION.to_string(),
12180 papers_processed: 0,
12181 errors: 0,
12182 dependencies: Vec::new(),
12183 },
12184 stats: project::ProjectStats::default(),
12185 findings: Vec::new(),
12186 sources: Vec::new(),
12187 evidence_atoms: Vec::new(),
12188 condition_records: Vec::new(),
12189 review_events: Vec::new(),
12190 confidence_updates: Vec::new(),
12191 events: Vec::new(),
12192 proposals: Vec::new(),
12193 proof_state: proposals::ProofState::default(),
12194 signatures: Vec::new(),
12195 actors: Vec::new(),
12196 replications: Vec::new(),
12197 datasets: Vec::new(),
12198 code_artifacts: Vec::new(),
12199 artifacts: Vec::new(),
12200 predictions: Vec::new(),
12201 resolutions: Vec::new(),
12202 peers: Vec::new(),
12203 negative_results: Vec::new(),
12204 trajectories: Vec::new(),
12205 };
12206 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
12207 let payload = json!({
12208 "ok": true,
12209 "command": "frontier.new",
12210 "path": path.display().to_string(),
12211 "name": name,
12212 "schema": project::VELA_SCHEMA_URL,
12213 "vela_version": env!("CARGO_PKG_VERSION"),
12214 "next_steps": [
12215 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
12216 "vela sign generate-keypair --out keys",
12217 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
12218 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
12219 ],
12220 });
12221 if json {
12222 println!(
12223 "{}",
12224 serde_json::to_string_pretty(&payload)
12225 .expect("failed to serialize frontier.new")
12226 );
12227 } else {
12228 println!(
12229 "{} scaffolded frontier '{name}' at {}",
12230 style::ok("frontier"),
12231 path.display()
12232 );
12233 println!(" next steps:");
12234 println!(
12235 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
12236 path.display()
12237 );
12238 println!(" 2. vela sign generate-keypair --out keys");
12239 println!(
12240 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
12241 path.display()
12242 );
12243 println!(
12244 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
12245 path.display()
12246 );
12247 }
12248 }
12249 FrontierAction::Materialize { frontier, json } => {
12250 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
12251 if json {
12252 println!(
12253 "{}",
12254 serde_json::to_string_pretty(&payload)
12255 .expect("failed to serialize frontier materialize")
12256 );
12257 } else {
12258 println!(
12259 "{} materialized frontier repo at {}",
12260 style::ok("frontier"),
12261 frontier.display()
12262 );
12263 }
12264 }
12265 FrontierAction::AddDep {
12266 frontier,
12267 vfr_id,
12268 locator,
12269 snapshot,
12270 name,
12271 json,
12272 } => {
12273 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12274 if p.project
12275 .dependencies
12276 .iter()
12277 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
12278 {
12279 fail(&format!(
12280 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
12281 ));
12282 }
12283 let dep = ProjectDependency {
12284 name: name.unwrap_or_else(|| vfr_id.clone()),
12285 source: "vela.hub".into(),
12286 version: None,
12287 pinned_hash: None,
12288 vfr_id: Some(vfr_id.clone()),
12289 locator: Some(locator.clone()),
12290 pinned_snapshot_hash: Some(snapshot.clone()),
12291 };
12292 p.project.dependencies.push(dep);
12293 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12294 let payload = json!({
12295 "ok": true,
12296 "command": "frontier.add-dep",
12297 "frontier": frontier.display().to_string(),
12298 "vfr_id": vfr_id,
12299 "locator": locator,
12300 "pinned_snapshot_hash": snapshot,
12301 "declared_count": p.project.dependencies.len(),
12302 });
12303 if json {
12304 println!(
12305 "{}",
12306 serde_json::to_string_pretty(&payload)
12307 .expect("failed to serialize frontier.add-dep")
12308 );
12309 } else {
12310 println!(
12311 "{} declared cross-frontier dep {vfr_id}",
12312 style::ok("frontier")
12313 );
12314 println!(" locator: {locator}");
12315 println!(" snapshot: {snapshot}");
12316 }
12317 }
12318 FrontierAction::ListDeps { frontier, json } => {
12319 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12320 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
12321 if json {
12322 let payload = json!({
12323 "ok": true,
12324 "command": "frontier.list-deps",
12325 "frontier": frontier.display().to_string(),
12326 "count": deps.len(),
12327 "dependencies": deps,
12328 });
12329 println!(
12330 "{}",
12331 serde_json::to_string_pretty(&payload)
12332 .expect("failed to serialize frontier.list-deps")
12333 );
12334 } else {
12335 println!();
12336 println!(
12337 " {}",
12338 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
12339 .to_uppercase()
12340 .dimmed()
12341 );
12342 println!(" {}", style::tick_row(60));
12343 if deps.is_empty() {
12344 println!(" (no dependencies declared)");
12345 } else {
12346 for d in &deps {
12347 let kind = if d.is_cross_frontier() {
12348 "cross-frontier"
12349 } else {
12350 "compile-time"
12351 };
12352 println!(" · {} [{kind}]", d.name);
12353 if let Some(v) = &d.vfr_id {
12354 println!(" vfr_id: {v}");
12355 }
12356 if let Some(l) = &d.locator {
12357 println!(" locator: {l}");
12358 }
12359 if let Some(s) = &d.pinned_snapshot_hash {
12360 println!(" snapshot: {s}");
12361 }
12362 }
12363 }
12364 }
12365 }
12366 FrontierAction::RemoveDep {
12367 frontier,
12368 vfr_id,
12369 json,
12370 } => {
12371 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12372 for f in &p.findings {
12374 for l in &f.links {
12375 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
12376 crate::bundle::LinkRef::parse(&l.target)
12377 && v == &vfr_id
12378 {
12379 fail(&format!(
12380 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
12381 f.id, l.target
12382 ));
12383 }
12384 }
12385 }
12386 let before = p.project.dependencies.len();
12387 p.project
12388 .dependencies
12389 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
12390 let removed = before - p.project.dependencies.len();
12391 if removed == 0 {
12392 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
12393 }
12394 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12395 let payload = json!({
12396 "ok": true,
12397 "command": "frontier.remove-dep",
12398 "frontier": frontier.display().to_string(),
12399 "vfr_id": vfr_id,
12400 "removed": removed,
12401 });
12402 if json {
12403 println!(
12404 "{}",
12405 serde_json::to_string_pretty(&payload)
12406 .expect("failed to serialize frontier.remove-dep")
12407 );
12408 } else {
12409 println!(
12410 "{} removed cross-frontier dep {vfr_id}",
12411 style::ok("frontier")
12412 );
12413 }
12414 }
12415 FrontierAction::RefreshDeps {
12416 frontier,
12417 from,
12418 dry_run,
12419 json,
12420 } => {
12421 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12422 let cross_deps: Vec<String> = p
12423 .project
12424 .dependencies
12425 .iter()
12426 .filter_map(|d| d.vfr_id.clone())
12427 .collect();
12428 if cross_deps.is_empty() {
12429 if json {
12430 println!(
12431 "{}",
12432 serde_json::to_string_pretty(&json!({
12433 "ok": true,
12434 "command": "frontier.refresh-deps",
12435 "frontier": frontier.display().to_string(),
12436 "from": from,
12437 "dry_run": dry_run,
12438 "deps": [],
12439 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
12440 })).expect("serialize")
12441 );
12442 } else {
12443 println!(
12444 "{} no cross-frontier deps declared in {}",
12445 style::ok("frontier"),
12446 frontier.display()
12447 );
12448 }
12449 return;
12450 }
12451 let client = reqwest::blocking::Client::builder()
12452 .timeout(std::time::Duration::from_secs(20))
12453 .build()
12454 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
12455 let base = from.trim_end_matches('/');
12456 #[derive(serde::Deserialize)]
12457 struct HubEntry {
12458 latest_snapshot_hash: String,
12459 }
12460 let mut per_dep: Vec<serde_json::Value> = Vec::new();
12461 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
12462 (0u32, 0u32, 0u32, 0u32);
12463 for vfr in &cross_deps {
12464 let url = format!("{base}/entries/{vfr}");
12465 let resp = client.get(&url).send();
12466 let outcome = match resp {
12467 Ok(r) if r.status().as_u16() == 404 => {
12468 missing += 1;
12469 json!({ "vfr_id": vfr, "status": "missing", "url": url })
12470 }
12471 Ok(r) if !r.status().is_success() => {
12472 unreachable += 1;
12473 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
12474 }
12475 Err(e) => {
12476 unreachable += 1;
12477 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
12478 }
12479 Ok(r) => match r.json::<HubEntry>() {
12480 Err(e) => {
12481 unreachable += 1;
12482 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
12483 }
12484 Ok(entry) => {
12485 match p
12487 .project
12488 .dependencies
12489 .iter()
12490 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
12491 {
12492 None => {
12493 unreachable += 1;
12494 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
12495 }
12496 Some(idx) => {
12497 let local_pin =
12498 p.project.dependencies[idx].pinned_snapshot_hash.clone();
12499 let new_pin = entry.latest_snapshot_hash;
12500 if local_pin.as_deref() == Some(new_pin.as_str()) {
12501 unchanged += 1;
12502 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
12503 } else {
12504 if !dry_run {
12505 p.project.dependencies[idx].pinned_snapshot_hash =
12506 Some(new_pin.clone());
12507 }
12508 refreshed += 1;
12509 json!({
12510 "vfr_id": vfr,
12511 "status": "refreshed",
12512 "old_snapshot": local_pin,
12513 "new_snapshot": new_pin,
12514 })
12515 }
12516 }
12517 }
12518 }
12519 },
12520 };
12521 per_dep.push(outcome);
12522 }
12523 if !dry_run && refreshed > 0 {
12524 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12525 }
12526 let payload = json!({
12527 "ok": true,
12528 "command": "frontier.refresh-deps",
12529 "frontier": frontier.display().to_string(),
12530 "from": from,
12531 "dry_run": dry_run,
12532 "deps": per_dep,
12533 "summary": {
12534 "total": cross_deps.len(),
12535 "refreshed": refreshed,
12536 "unchanged": unchanged,
12537 "missing": missing,
12538 "unreachable": unreachable,
12539 },
12540 });
12541 if json {
12542 println!(
12543 "{}",
12544 serde_json::to_string_pretty(&payload)
12545 .expect("failed to serialize frontier.refresh-deps")
12546 );
12547 } else {
12548 let mode = if dry_run { " (dry-run)" } else { "" };
12549 println!(
12550 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
12551 style::ok("frontier"),
12552 cross_deps.len()
12553 );
12554 for d in &per_dep {
12555 let vfr = d["vfr_id"].as_str().unwrap_or("?");
12556 let status = d["status"].as_str().unwrap_or("?");
12557 match status {
12558 "refreshed" => println!(
12559 " {vfr} refreshed {} → {}",
12560 d["old_snapshot"]
12561 .as_str()
12562 .unwrap_or("(none)")
12563 .chars()
12564 .take(16)
12565 .collect::<String>(),
12566 d["new_snapshot"]
12567 .as_str()
12568 .unwrap_or("?")
12569 .chars()
12570 .take(16)
12571 .collect::<String>(),
12572 ),
12573 "unchanged" => println!(" {vfr} unchanged"),
12574 "missing" => println!(" {vfr} missing on hub"),
12575 _ => println!(" {vfr} unreachable"),
12576 }
12577 }
12578 }
12579 }
12580 FrontierAction::Diff {
12581 frontier,
12582 since,
12583 week,
12584 json,
12585 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
12586 }
12587}
12588
12589fn cmd_repo(action: RepoAction) {
12590 match action {
12591 RepoAction::Status { frontier, json } => {
12592 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
12593 if json {
12594 println!(
12595 "{}",
12596 serde_json::to_string_pretty(&payload)
12597 .expect("failed to serialize repo status")
12598 );
12599 } else {
12600 let summary = payload.get("summary").unwrap_or(&Value::Null);
12601 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
12602 println!("vela repo status");
12603 println!(" frontier: {}", frontier.display());
12604 println!(
12605 " events: {}",
12606 summary
12607 .get("accepted_events")
12608 .and_then(Value::as_u64)
12609 .unwrap_or_default()
12610 );
12611 println!(
12612 " open proposals: {}",
12613 summary
12614 .get("open_proposals")
12615 .and_then(Value::as_u64)
12616 .unwrap_or_default()
12617 );
12618 println!(
12619 " state: {}",
12620 freshness
12621 .get("materialized_state")
12622 .and_then(Value::as_str)
12623 .unwrap_or("unknown")
12624 );
12625 println!(
12626 " proof: {}",
12627 freshness
12628 .get("proof")
12629 .and_then(Value::as_str)
12630 .unwrap_or("unknown")
12631 );
12632 }
12633 }
12634 RepoAction::Doctor { frontier, json } => {
12635 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
12636 if json {
12637 println!(
12638 "{}",
12639 serde_json::to_string_pretty(&payload)
12640 .expect("failed to serialize repo doctor")
12641 );
12642 } else {
12643 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12644 let issues = payload
12645 .get("issues")
12646 .and_then(Value::as_array)
12647 .map_or(0, Vec::len);
12648 println!("vela repo doctor");
12649 println!(" frontier: {}", frontier.display());
12650 println!(" status: {}", if ok { "ok" } else { "needs attention" });
12651 println!(" issues: {issues}");
12652 }
12653 }
12654 }
12655}
12656
12657fn cmd_proof_verify(frontier: &Path, json_output: bool) {
12658 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
12659 if json_output {
12660 println!(
12661 "{}",
12662 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
12663 );
12664 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12665 std::process::exit(1);
12666 }
12667 } else {
12668 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12669 println!("vela proof verify");
12670 println!(" frontier: {}", frontier.display());
12671 println!(" status: {}", if ok { "ok" } else { "failed" });
12672 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
12673 for issue in issues {
12674 if let Some(message) = issue.get("message").and_then(Value::as_str) {
12675 println!(" issue: {message}");
12676 }
12677 }
12678 }
12679 if !ok {
12680 std::process::exit(1);
12681 }
12682 }
12683}
12684
12685fn cmd_proof_explain(frontier: &Path) {
12686 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
12687 print!("{text}");
12688}
12689
12690fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
12699 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
12700
12701 let now = chrono::Utc::now();
12703 let (window_start, window_end, week_label): (
12704 chrono::DateTime<chrono::Utc>,
12705 chrono::DateTime<chrono::Utc>,
12706 Option<String>,
12707 ) = if let Some(s) = since {
12708 let parsed = chrono::DateTime::parse_from_rfc3339(s)
12709 .map(|d| d.with_timezone(&chrono::Utc))
12710 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
12711 (parsed, now, None)
12712 } else {
12713 let key = week
12714 .map(str::to_owned)
12715 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
12716 let (start, end) = iso_week_bounds(&key)
12717 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
12718 (start, end, Some(key))
12719 };
12720
12721 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
12723 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
12724 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
12725 let mut cumulative: usize = 0;
12726
12727 for f in &project.findings {
12728 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
12729 .map(|d| d.with_timezone(&chrono::Utc))
12730 .ok();
12731 let updated_ts = f
12732 .updated
12733 .as_deref()
12734 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
12735 .map(|d| d.with_timezone(&chrono::Utc));
12736
12737 if let Some(c) = created
12738 && c < window_end
12739 {
12740 cumulative += 1;
12741 }
12742
12743 if let Some(c) = created
12744 && c >= window_start
12745 && c < window_end
12746 {
12747 added.push(f);
12748 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
12749 if is_tension {
12750 new_contradictions.push(f);
12751 }
12752 continue;
12753 }
12754 if let Some(u) = updated_ts
12755 && u >= window_start
12756 && u < window_end
12757 {
12758 updated.push(f);
12759 }
12760 }
12761
12762 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
12764 list.iter()
12765 .map(|f| {
12766 json!({
12767 "id": f.id,
12768 "assertion": f.assertion.text,
12769 "evidence_type": f.evidence.evidence_type,
12770 "confidence": f.confidence.score,
12771 "doi": f.provenance.doi,
12772 "pmid": f.provenance.pmid,
12773 })
12774 })
12775 .collect()
12776 };
12777
12778 let payload = json!({
12779 "ok": true,
12780 "command": "frontier.diff",
12781 "frontier": frontier.display().to_string(),
12782 "frontier_id": project.frontier_id,
12783 "window": {
12784 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12785 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12786 "iso_week": week_label,
12787 },
12788 "totals": {
12789 "added": added.len(),
12790 "updated": updated.len(),
12791 "new_contradictions": new_contradictions.len(),
12792 "cumulative_claims": cumulative,
12793 },
12794 "added": summary_for(&added),
12795 "updated": summary_for(&updated),
12796 "new_contradictions": summary_for(&new_contradictions),
12797 });
12798
12799 if json {
12800 println!(
12801 "{}",
12802 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
12803 );
12804 return;
12805 }
12806
12807 let label = week_label
12808 .clone()
12809 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
12810 println!();
12811 println!(
12812 " {}",
12813 format!("VELA · FRONTIER · DIFF · {label}")
12814 .to_uppercase()
12815 .dimmed()
12816 );
12817 println!(" {}", style::tick_row(60));
12818 println!(
12819 " range: {} → {}",
12820 window_start.format("%Y-%m-%d %H:%M"),
12821 window_end.format("%Y-%m-%d %H:%M")
12822 );
12823 println!(" added: {}", added.len());
12824 println!(" updated: {}", updated.len());
12825 println!(" contradictions: {}", new_contradictions.len());
12826 println!(" cumulative: {cumulative}");
12827 if added.is_empty() && updated.is_empty() {
12828 println!();
12829 println!(" (quiet window — no findings added or updated)");
12830 } else {
12831 println!();
12832 println!(" added:");
12833 for f in &added {
12834 println!(
12835 " · {} {}",
12836 f.id.dimmed(),
12837 truncate(&f.assertion.text, 88)
12838 );
12839 }
12840 if !updated.is_empty() {
12841 println!();
12842 println!(" updated:");
12843 for f in &updated {
12844 println!(
12845 " · {} {}",
12846 f.id.dimmed(),
12847 truncate(&f.assertion.text, 88)
12848 );
12849 }
12850 }
12851 }
12852}
12853
12854fn truncate(s: &str, n: usize) -> String {
12855 if s.chars().count() <= n {
12856 s.to_string()
12857 } else {
12858 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
12859 out.push('…');
12860 out
12861 }
12862}
12863
12864fn iso_week_key_for(d: chrono::NaiveDate) -> String {
12866 use chrono::Datelike;
12867 let iso = d.iso_week();
12868 format!("{:04}-W{:02}", iso.year(), iso.week())
12869}
12870
12871fn iso_week_bounds(
12874 key: &str,
12875) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
12876 let (year_str, week_str) = key
12877 .split_once("-W")
12878 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
12879 let year: i32 = year_str
12880 .parse()
12881 .map_err(|e| format!("bad year in '{key}': {e}"))?;
12882 let week: u32 = week_str
12883 .parse()
12884 .map_err(|e| format!("bad week in '{key}': {e}"))?;
12885 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
12886 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
12887 let next_monday = monday + chrono::Duration::days(7);
12888 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
12889 let end = next_monday
12890 .and_hms_opt(0, 0, 0)
12891 .expect("00:00 valid")
12892 .and_utc();
12893 Ok((start, end))
12894}
12895
12896fn cmd_registry(action: RegistryAction) {
12901 use crate::registry;
12902 let default_registry = || -> PathBuf {
12903 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12904 PathBuf::from(home)
12905 .join(".vela")
12906 .join("registry")
12907 .join("entries.json")
12908 };
12909 match action {
12910 RegistryAction::DependsOn { vfr_id, from, json } => {
12911 let base = from.trim_end_matches('/');
12912 let url = format!("{base}/entries/{vfr_id}/depends-on");
12913 let client = reqwest::blocking::Client::builder()
12914 .timeout(std::time::Duration::from_secs(30))
12915 .build()
12916 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12917 let resp = client
12918 .get(&url)
12919 .send()
12920 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
12921 if !resp.status().is_success() {
12922 fail(&format!("GET {url}: HTTP {}", resp.status()));
12923 }
12924 let body: serde_json::Value = resp
12925 .json()
12926 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
12927 if json {
12928 println!(
12929 "{}",
12930 serde_json::to_string_pretty(&body).expect("serialize")
12931 );
12932 } else {
12933 let dependents = body
12934 .get("dependents")
12935 .and_then(|v| v.as_array())
12936 .cloned()
12937 .unwrap_or_default();
12938 let count = dependents.len();
12939 println!(
12940 "{} {count} {} on {vfr_id}",
12941 style::ok("registry"),
12942 if count == 1 {
12943 "frontier depends"
12944 } else {
12945 "frontiers depend"
12946 },
12947 );
12948 for e in &dependents {
12949 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
12950 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
12951 let o = e
12952 .get("owner_actor_id")
12953 .and_then(|v| v.as_str())
12954 .unwrap_or("?");
12955 println!(" {v} {n} ({o})");
12956 }
12957 }
12958 }
12959 RegistryAction::Mirror {
12960 vfr_id,
12961 from,
12962 to,
12963 json,
12964 } => {
12965 let src_base = from.trim_end_matches('/');
12966 let dst_base = to.trim_end_matches('/');
12967 let src_url = format!("{src_base}/entries/{vfr_id}");
12968 let dst_url = format!("{dst_base}/entries");
12969 let client = reqwest::blocking::Client::builder()
12970 .timeout(std::time::Duration::from_secs(30))
12971 .build()
12972 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12973
12974 let entry: serde_json::Value = client
12975 .get(&src_url)
12976 .send()
12977 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12978 .error_for_status()
12979 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12980 .json()
12981 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
12982
12983 let resp = client
12984 .post(&dst_url)
12985 .header("content-type", "application/json")
12986 .body(
12987 serde_json::to_vec(&entry)
12988 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
12989 )
12990 .send()
12991 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
12992 let status = resp.status();
12993 if !status.is_success() {
12994 let body = resp.text().unwrap_or_default();
12995 fail(&format!(
12996 "POST {dst_url}: HTTP {status}: {}",
12997 body.chars().take(300).collect::<String>()
12998 ));
12999 }
13000 let body: serde_json::Value = resp
13001 .json()
13002 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
13003 let duplicate = body
13004 .get("duplicate")
13005 .and_then(serde_json::Value::as_bool)
13006 .unwrap_or(false);
13007 let payload = json!({
13008 "ok": true,
13009 "command": "registry.mirror",
13010 "vfr_id": vfr_id,
13011 "from": src_base,
13012 "to": dst_base,
13013 "duplicate_on_destination": duplicate,
13014 "destination_response": body,
13015 });
13016 if json {
13017 println!(
13018 "{}",
13019 serde_json::to_string_pretty(&payload).expect("serialize")
13020 );
13021 } else {
13022 println!(
13023 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
13024 style::ok("registry"),
13025 if duplicate {
13026 " (duplicate; signature already known)"
13027 } else {
13028 " (fresh insert)"
13029 }
13030 );
13031 }
13032 }
13033 RegistryAction::List { from, json } => {
13034 let (label, registry_data) = match &from {
13037 Some(loc) if loc.starts_with("http") => (
13038 loc.clone(),
13039 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
13040 ),
13041 Some(loc) => {
13042 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
13043 (
13044 p.display().to_string(),
13045 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13046 )
13047 }
13048 None => {
13049 let p = default_registry();
13050 (
13051 p.display().to_string(),
13052 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13053 )
13054 }
13055 };
13056 let r = registry_data;
13057 let path_label = label;
13058 if json {
13059 let payload = json!({
13060 "ok": true,
13061 "command": "registry.list",
13062 "registry": path_label,
13063 "entry_count": r.entries.len(),
13064 "entries": r.entries,
13065 });
13066 println!(
13067 "{}",
13068 serde_json::to_string_pretty(&payload)
13069 .expect("failed to serialize registry.list")
13070 );
13071 } else {
13072 println!();
13073 println!(
13074 " {}",
13075 format!("VELA · REGISTRY · LIST · {}", path_label)
13076 .to_uppercase()
13077 .dimmed()
13078 );
13079 println!(" {}", style::tick_row(60));
13080 if r.entries.is_empty() {
13081 println!(" (registry is empty)");
13082 } else {
13083 for entry in &r.entries {
13084 println!(
13085 " {} {} ({}) by {} published {}",
13086 entry.vfr_id,
13087 entry.name,
13088 entry.network_locator,
13089 entry.owner_actor_id,
13090 entry.signed_publish_at
13091 );
13092 }
13093 }
13094 }
13095 }
13096 RegistryAction::Publish {
13097 frontier,
13098 owner,
13099 key,
13100 locator,
13101 to,
13102 json,
13103 } => {
13104 let key_hex = std::fs::read_to_string(&key)
13107 .map(|s| s.trim().to_string())
13108 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
13109 let signing_key = parse_signing_key(&key_hex);
13110 let derived = hex::encode(signing_key.verifying_key().to_bytes());
13111
13112 let mut frontier_data =
13114 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
13115
13116 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
13117 Some(actor) => actor.public_key.clone(),
13118 None => {
13119 eprintln!(
13127 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
13128 &derived[..16]
13129 );
13130 frontier_data.actors.push(sign::ActorRecord {
13131 id: owner.clone(),
13132 public_key: derived.clone(),
13133 algorithm: "ed25519".to_string(),
13134 created_at: chrono::Utc::now().to_rfc3339(),
13135 tier: None,
13136 orcid: None,
13137 access_clearance: None,
13138 });
13139 repo::save_to_path(&frontier, &frontier_data)
13140 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
13141 derived.clone()
13142 }
13143 };
13144
13145 let snapshot_hash = events::snapshot_hash(&frontier_data);
13149 let event_log_hash = events::event_log_hash(&frontier_data.events);
13150 let vfr_id = frontier_data.frontier_id();
13151 let name = frontier_data.project.name.clone();
13152
13153 if derived != pubkey {
13155 fail(&format!(
13156 "private key does not match registered pubkey for owner '{owner}'"
13157 ));
13158 }
13159
13160 let to_is_remote = matches!(
13168 to.as_deref(),
13169 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
13170 );
13171 let resolved_locator = match locator {
13172 Some(l) => l,
13173 None => {
13174 if to_is_remote {
13175 let hub = to.as_deref().unwrap().trim_end_matches('/');
13176 let hub_root = hub.trim_end_matches("/entries");
13177 format!("{hub_root}/entries/{vfr_id}/snapshot")
13178 } else {
13179 fail_return(
13180 "--locator is required for local publishes; pass e.g. \
13181 --locator file:///path/to/frontier.json or an HTTPS URL.",
13182 )
13183 }
13184 }
13185 };
13186
13187 let mut entry = registry::RegistryEntry {
13188 schema: registry::ENTRY_SCHEMA.to_string(),
13189 vfr_id: vfr_id.clone(),
13190 name: name.clone(),
13191 owner_actor_id: owner.clone(),
13192 owner_pubkey: pubkey,
13193 latest_snapshot_hash: snapshot_hash,
13194 latest_event_log_hash: event_log_hash,
13195 network_locator: resolved_locator,
13196 signed_publish_at: chrono::Utc::now().to_rfc3339(),
13197 signature: String::new(),
13198 };
13199 entry.signature =
13200 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
13201
13202 let (registry_label, duplicate) = if to_is_remote {
13203 let hub_url = to.clone().unwrap();
13204 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
13208 .unwrap_or_else(|e| fail_return(&e));
13209 (hub_url, resp.duplicate)
13210 } else {
13211 let registry_path = match &to {
13212 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
13213 None => default_registry(),
13214 };
13215 registry::publish_entry(®istry_path, entry.clone())
13216 .unwrap_or_else(|e| fail_return(&e));
13217 (registry_path.display().to_string(), false)
13218 };
13219
13220 let payload = json!({
13221 "ok": true,
13222 "command": "registry.publish",
13223 "registry": registry_label,
13224 "vfr_id": vfr_id,
13225 "name": name,
13226 "owner": owner,
13227 "snapshot_hash": entry.latest_snapshot_hash,
13228 "event_log_hash": entry.latest_event_log_hash,
13229 "signed_publish_at": entry.signed_publish_at,
13230 "signature": entry.signature,
13231 "duplicate": duplicate,
13232 });
13233 if json {
13234 println!(
13235 "{}",
13236 serde_json::to_string_pretty(&payload)
13237 .expect("failed to serialize registry.publish")
13238 );
13239 } else {
13240 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
13241 println!(
13242 "{} published {vfr_id} → {}{}",
13243 style::ok("registry"),
13244 registry_label,
13245 dup_suffix
13246 );
13247 println!(" snapshot: {}", entry.latest_snapshot_hash);
13248 println!(" event_log: {}", entry.latest_event_log_hash);
13249 println!(" signature: {}…", &entry.signature[..16]);
13250 }
13251 }
13252 RegistryAction::Pull {
13253 vfr_id,
13254 from,
13255 out,
13256 transitive,
13257 depth,
13258 json,
13259 } => {
13260 let (registry_label, registry_data) = match &from {
13264 Some(loc) if loc.starts_with("http") => (
13265 loc.clone(),
13266 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
13267 ),
13268 Some(loc) => {
13269 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
13270 (
13271 p.display().to_string(),
13272 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13273 )
13274 }
13275 None => {
13276 let p = default_registry();
13277 (
13278 p.display().to_string(),
13279 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13280 )
13281 }
13282 };
13283 let entry = registry::find_latest(®istry_data, &vfr_id)
13284 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
13285
13286 if transitive {
13287 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
13291 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
13292
13293 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
13294 result
13295 .deps
13296 .iter()
13297 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
13298 .collect(),
13299 );
13300 let payload = json!({
13301 "ok": true,
13302 "command": "registry.pull",
13303 "registry": registry_label,
13304 "vfr_id": vfr_id,
13305 "transitive": true,
13306 "depth": depth,
13307 "out_dir": out.display().to_string(),
13308 "primary": result.primary_path.display().to_string(),
13309 "verified": result.verified,
13310 "deps": dep_paths_json,
13311 });
13312 if json {
13313 println!(
13314 "{}",
13315 serde_json::to_string_pretty(&payload)
13316 .expect("failed to serialize registry.pull")
13317 );
13318 } else {
13319 println!(
13320 "{} pulled {vfr_id} (transitive) → {}",
13321 style::ok("registry"),
13322 out.display()
13323 );
13324 println!(" verified {} frontier(s):", result.verified.len());
13325 for v in &result.verified {
13326 println!(" · {v}");
13327 }
13328 println!(" every cross-frontier dependency's pinned snapshot hash matched");
13329 }
13330 return;
13331 }
13332
13333 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
13336 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
13337 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
13338 let _ = std::fs::remove_file(&out);
13339 fail_return(&format!("pull verification failed: {e}"))
13340 });
13341
13342 let payload = json!({
13343 "ok": true,
13344 "command": "registry.pull",
13345 "registry": registry_label,
13346 "vfr_id": vfr_id,
13347 "out": out.display().to_string(),
13348 "snapshot_hash": entry.latest_snapshot_hash,
13349 "event_log_hash": entry.latest_event_log_hash,
13350 "verified": true,
13351 });
13352 if json {
13353 println!(
13354 "{}",
13355 serde_json::to_string_pretty(&payload)
13356 .expect("failed to serialize registry.pull")
13357 );
13358 } else {
13359 println!(
13360 "{} pulled {vfr_id} → {}",
13361 style::ok("registry"),
13362 out.display()
13363 );
13364 println!(" verified snapshot+event_log hashes match registry; signature ok");
13365 }
13366 }
13367 }
13368}
13369
13370fn print_stats_json(path: &Path) {
13371 let frontier = load_frontier_or_fail(path);
13372 let source_hash = hash_path_or_fail(path);
13373 let payload = json!({
13374 "ok": true,
13375 "command": "stats",
13376 "schema_version": project::VELA_SCHEMA_VERSION,
13377 "frontier": {
13378 "name": &frontier.project.name,
13379 "description": &frontier.project.description,
13380 "source": path.display().to_string(),
13381 "hash": format!("sha256:{source_hash}"),
13382 "compiled_at": &frontier.project.compiled_at,
13383 "compiler": &frontier.project.compiler,
13384 "papers_processed": frontier.project.papers_processed,
13385 "errors": frontier.project.errors,
13386 },
13387 "stats": frontier.stats,
13388 "proposals": proposals::summary(&frontier),
13389 "proof_state": frontier.proof_state,
13390 });
13391 println!(
13392 "{}",
13393 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
13394 );
13395}
13396
13397fn cmd_search(
13398 source: Option<&Path>,
13399 query: &str,
13400 entity: Option<&str>,
13401 assertion_type: Option<&str>,
13402 all: Option<&Path>,
13403 limit: usize,
13404 json_output: bool,
13405) {
13406 if let Some(dir) = all {
13407 search::run_all(dir, query, entity, assertion_type, limit);
13408 return;
13409 }
13410 let Some(src) = source else {
13411 fail("Provide --source <frontier> or --all <directory>.");
13412 };
13413 if json_output {
13414 let results = search::search(src, query, entity, assertion_type, limit);
13415 let loaded = load_frontier_or_fail(src);
13416 let source_hash = hash_path_or_fail(src);
13417 let payload = json!({
13418 "ok": true,
13419 "command": "search",
13420 "schema_version": project::VELA_SCHEMA_VERSION,
13421 "query": query,
13422 "frontier": {
13423 "name": &loaded.project.name,
13424 "source": src.display().to_string(),
13425 "hash": format!("sha256:{source_hash}"),
13426 },
13427 "filters": {
13428 "entity": entity,
13429 "assertion_type": assertion_type,
13430 "limit": limit,
13431 },
13432 "count": results.len(),
13433 "results": results.iter().map(|result| json!({
13434 "id": &result.id,
13435 "score": result.score,
13436 "assertion": &result.assertion,
13437 "assertion_type": &result.assertion_type,
13438 "confidence": result.confidence,
13439 "entities": &result.entities,
13440 "doi": &result.doi,
13441 })).collect::<Vec<_>>()
13442 });
13443 println!(
13444 "{}",
13445 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
13446 );
13447 } else {
13448 search::run(src, query, entity, assertion_type, limit);
13449 }
13450}
13451
13452fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
13453 let frontier = load_frontier_or_fail(source);
13454 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
13455 if json_output {
13456 let source_hash = hash_path_or_fail(source);
13457 let payload = json!({
13458 "ok": true,
13459 "command": "tensions",
13460 "schema_version": project::VELA_SCHEMA_VERSION,
13461 "frontier": {
13462 "name": &frontier.project.name,
13463 "source": source.display().to_string(),
13464 "hash": format!("sha256:{source_hash}"),
13465 },
13466 "filters": {
13467 "both_high": both_high,
13468 "cross_domain": cross_domain,
13469 "top": top,
13470 },
13471 "count": result.len(),
13472 "tensions": result.iter().map(|t| json!({
13473 "score": t.score,
13474 "resolved": t.resolved,
13475 "superseding_id": &t.superseding_id,
13476 "finding_a": {
13477 "id": &t.finding_a.id,
13478 "assertion": &t.finding_a.assertion,
13479 "confidence": t.finding_a.confidence,
13480 "assertion_type": &t.finding_a.assertion_type,
13481 "citation_count": t.finding_a.citation_count,
13482 "contradicts_count": t.finding_a.contradicts_count,
13483 },
13484 "finding_b": {
13485 "id": &t.finding_b.id,
13486 "assertion": &t.finding_b.assertion,
13487 "confidence": t.finding_b.confidence,
13488 "assertion_type": &t.finding_b.assertion_type,
13489 "citation_count": t.finding_b.citation_count,
13490 "contradicts_count": t.finding_b.contradicts_count,
13491 }
13492 })).collect::<Vec<_>>()
13493 });
13494 println!(
13495 "{}",
13496 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
13497 );
13498 } else {
13499 tensions::print_tensions(&result);
13500 }
13501}
13502
13503fn cmd_gaps(action: GapsAction) {
13504 match action {
13505 GapsAction::Rank {
13506 frontier,
13507 top,
13508 domain,
13509 json,
13510 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
13511 }
13512}
13513
13514fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
13515 let frontier = load_frontier_or_fail(frontier_path);
13516 let mut ranked = frontier
13517 .findings
13518 .iter()
13519 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
13520 .filter(|finding| {
13521 domain.is_none_or(|domain| {
13522 finding
13523 .assertion
13524 .text
13525 .to_lowercase()
13526 .contains(&domain.to_lowercase())
13527 || finding
13528 .assertion
13529 .entities
13530 .iter()
13531 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
13532 })
13533 })
13534 .map(|finding| {
13535 let dependency_count = frontier
13536 .findings
13537 .iter()
13538 .flat_map(|candidate| candidate.links.iter())
13539 .filter(|link| link.target == finding.id)
13540 .count();
13541 let score = dependency_count as f64 + finding.confidence.score;
13542 json!({
13543 "id": &finding.id,
13544 "kind": "candidate_gap_review_lead",
13545 "assertion": &finding.assertion.text,
13546 "score": score,
13547 "dependency_count": dependency_count,
13548 "confidence": finding.confidence.score,
13549 "evidence_type": &finding.evidence.evidence_type,
13550 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
13551 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
13552 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
13553 })
13554 })
13555 .collect::<Vec<_>>();
13556 ranked.sort_by(|a, b| {
13557 b.get("score")
13558 .and_then(Value::as_f64)
13559 .partial_cmp(&a.get("score").and_then(Value::as_f64))
13560 .unwrap_or(std::cmp::Ordering::Equal)
13561 });
13562 ranked.truncate(top);
13563 if json_output {
13564 let source_hash = hash_path_or_fail(frontier_path);
13565 let payload = json!({
13566 "ok": true,
13567 "command": "gaps rank",
13568 "schema_version": project::VELA_SCHEMA_VERSION,
13569 "frontier": {
13570 "name": &frontier.project.name,
13571 "source": frontier_path.display().to_string(),
13572 "hash": format!("sha256:{source_hash}"),
13573 },
13574 "filters": {
13575 "top": top,
13576 "domain": domain,
13577 },
13578 "count": ranked.len(),
13579 "ranking_label": "candidate gap review leads",
13580 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
13581 "review_leads": ranked.clone(),
13582 "gaps": ranked,
13583 });
13584 println!(
13585 "{}",
13586 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
13587 );
13588 } else {
13589 println!();
13590 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
13591 println!(" {}", style::tick_row(60));
13592 println!(" review source scope; these are not guaranteed experiment targets.");
13593 println!();
13594 for (idx, gap) in ranked.iter().enumerate() {
13595 println!(
13596 " {}. [{}] score={} {}",
13597 idx + 1,
13598 gap["id"].as_str().unwrap_or("?"),
13599 gap["score"].as_f64().unwrap_or(0.0),
13600 gap["assertion"].as_str().unwrap_or("")
13601 );
13602 }
13603 }
13604}
13605
13606async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
13607 if inputs.len() < 2 {
13608 fail("need at least 2 frontier files for bridge detection.");
13609 }
13610 println!();
13611 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
13612 println!(" {}", style::tick_row(60));
13613 println!(" loading {} frontiers...", inputs.len());
13614 let mut named_projects = Vec::<(String, project::Project)>::new();
13615 let mut total_findings = 0;
13616 for path in inputs {
13617 let frontier = load_frontier_or_fail(path);
13618 let name = path
13619 .file_stem()
13620 .unwrap_or_default()
13621 .to_string_lossy()
13622 .to_string();
13623 println!(" {} · {} findings", name, frontier.stats.findings);
13624 total_findings += frontier.stats.findings;
13625 named_projects.push((name, frontier));
13626 }
13627 let refs = named_projects
13628 .iter()
13629 .map(|(name, frontier)| (name.as_str(), frontier))
13630 .collect::<Vec<_>>();
13631 let mut bridges = bridge::detect_bridges(&refs);
13632 if check_novelty && !bridges.is_empty() {
13633 let client = Client::new();
13634 let check_count = bridges.len().min(top_n);
13635 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
13636 for bridge_item in bridges.iter_mut().take(check_count) {
13637 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
13638 match bridge::check_novelty(&client, &query).await {
13639 Ok(count) => bridge_item.pubmed_count = Some(count),
13640 Err(e) => eprintln!(
13641 " {} prior-art check failed for {}: {e}",
13642 style::err_prefix(),
13643 bridge_item.entity_name
13644 ),
13645 }
13646 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
13647 }
13648 }
13649 print!("{}", bridge::format_report(&bridges, total_findings));
13650}
13651
13652struct BenchArgs {
13653 frontier: Option<PathBuf>,
13654 gold: Option<PathBuf>,
13655 entity_gold: Option<PathBuf>,
13656 link_gold: Option<PathBuf>,
13657 suite: Option<PathBuf>,
13658 suite_ready: bool,
13659 min_f1: Option<f64>,
13660 min_precision: Option<f64>,
13661 min_recall: Option<f64>,
13662 no_thresholds: bool,
13663 json: bool,
13664}
13665
13666fn cmd_agent_bench(
13671 gold: &Path,
13672 candidate: &Path,
13673 sources: Option<&Path>,
13674 threshold: Option<f64>,
13675 report_path: Option<&Path>,
13676 json_out: bool,
13677) {
13678 let input = crate::agent_bench::BenchInput {
13679 gold_path: gold.to_path_buf(),
13680 candidate_path: candidate.to_path_buf(),
13681 sources: sources.map(Path::to_path_buf),
13682 threshold: threshold.unwrap_or(0.0),
13683 };
13684 let report = match crate::agent_bench::run(input) {
13685 Ok(r) => r,
13686 Err(e) => {
13687 eprintln!("{} bench failed: {e}", style::err_prefix());
13688 std::process::exit(1);
13689 }
13690 };
13691
13692 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
13693 if let Some(path) = report_path
13694 && let Err(e) = std::fs::write(path, &json)
13695 {
13696 eprintln!(
13697 "{} failed to write report to {}: {e}",
13698 style::err_prefix(),
13699 path.display()
13700 );
13701 }
13702
13703 if json_out {
13704 println!("{json}");
13705 } else {
13706 println!();
13707 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
13708 println!(" {}", style::tick_row(60));
13709 print!("{}", crate::agent_bench::render_pretty(&report));
13710 println!();
13711 }
13712
13713 if !report.pass {
13714 std::process::exit(1);
13715 }
13716}
13717
13718fn cmd_bench(args: BenchArgs) {
13719 if args.suite_ready {
13720 let suite_path = args
13721 .suite
13722 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
13723 let payload =
13724 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
13725 println!(
13726 "{}",
13727 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
13728 );
13729 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13730 std::process::exit(1);
13731 }
13732 return;
13733 }
13734 if let Some(suite_path) = args.suite {
13735 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
13736 if args.json {
13737 println!(
13738 "{}",
13739 serde_json::to_string_pretty(&payload)
13740 .expect("failed to serialize benchmark suite")
13741 );
13742 } else {
13743 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13744 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
13745 println!();
13746 println!(" {}", "VELA · BENCH · SUITE".dimmed());
13747 println!(" {}", style::tick_row(60));
13748 println!(" suite: {}", suite_path.display());
13749 println!(
13750 " status: {}",
13751 if ok {
13752 style::ok("pass")
13753 } else {
13754 style::lost("fail")
13755 }
13756 );
13757 println!(
13758 " tasks: {}/{} passed",
13759 metrics
13760 .get("tasks_passed")
13761 .and_then(Value::as_u64)
13762 .unwrap_or(0),
13763 metrics
13764 .get("tasks_total")
13765 .and_then(Value::as_u64)
13766 .unwrap_or(0)
13767 );
13768 }
13769 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13770 std::process::exit(1);
13771 }
13772 return;
13773 }
13774
13775 let frontier = args
13776 .frontier
13777 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
13778 let thresholds = benchmark::BenchmarkThresholds {
13779 min_f1: if args.no_thresholds {
13780 None
13781 } else {
13782 args.min_f1.or(Some(0.05))
13783 },
13784 min_precision: if args.no_thresholds {
13785 None
13786 } else {
13787 args.min_precision
13788 },
13789 min_recall: if args.no_thresholds {
13790 None
13791 } else {
13792 args.min_recall
13793 },
13794 ..Default::default()
13795 };
13796 if let Some(path) = args.link_gold {
13797 print_benchmark_or_exit(benchmark::task_envelope(
13798 &frontier,
13799 None,
13800 benchmark::BenchmarkMode::Link,
13801 Some(&path),
13802 &thresholds,
13803 None,
13804 ));
13805 } else if let Some(path) = args.entity_gold {
13806 print_benchmark_or_exit(benchmark::task_envelope(
13807 &frontier,
13808 None,
13809 benchmark::BenchmarkMode::Entity,
13810 Some(&path),
13811 &thresholds,
13812 None,
13813 ));
13814 } else if let Some(path) = args.gold {
13815 if args.json {
13816 print_benchmark_or_exit(benchmark::task_envelope(
13817 &frontier,
13818 None,
13819 benchmark::BenchmarkMode::Finding,
13820 Some(&path),
13821 &thresholds,
13822 None,
13823 ));
13824 } else {
13825 benchmark::run(&frontier, &path, false);
13826 }
13827 } else {
13828 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
13829 }
13830}
13831
13832fn print_benchmark_or_exit(result: Result<Value, String>) {
13833 let payload = result.unwrap_or_else(|e| fail_return(&e));
13834 println!(
13835 "{}",
13836 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
13837 );
13838 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13839 std::process::exit(1);
13840 }
13841}
13842
13843fn cmd_packet(action: PacketAction) {
13844 let (result, json_output) = match action {
13845 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
13846 PacketAction::Validate { path, json } => (packet::validate(&path), json),
13847 };
13848 match result {
13849 Ok(output) if json_output => {
13850 println!(
13851 "{}",
13852 serde_json::to_string_pretty(&json!({
13853 "ok": true,
13854 "command": "packet",
13855 "result": output,
13856 }))
13857 .expect("failed to serialize packet response")
13858 );
13859 }
13860 Ok(output) => println!("{output}"),
13861 Err(e) => fail(&e),
13862 }
13863}
13864
13865fn cmd_verify(path: &Path, json_output: bool) {
13870 let result = packet::validate(path);
13871 match result {
13872 Ok(output) if json_output => {
13873 println!(
13874 "{}",
13875 serde_json::to_string_pretty(&json!({
13876 "ok": true,
13877 "command": "verify",
13878 "result": output,
13879 }))
13880 .expect("failed to serialize verify response")
13881 );
13882 }
13883 Ok(output) => {
13884 println!("{output}");
13885 println!(
13886 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
13887 );
13888 }
13889 Err(e) => fail(&e),
13890 }
13891}
13892
13893fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
13894 if path.join(".vela").exists() {
13895 fail(&format!(
13896 "already initialized: {} exists",
13897 path.join(".vela").display()
13898 ));
13899 }
13900 let payload = frontier_repo::initialize(
13901 path,
13902 frontier_repo::InitOptions {
13903 name,
13904 template,
13905 initialize_git,
13906 },
13907 )
13908 .unwrap_or_else(|e| fail_return(&e));
13909 if json_output {
13910 println!(
13911 "{}",
13912 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
13913 );
13914 } else {
13915 println!(
13916 "{} initialized frontier repository in {}",
13917 style::ok("ok"),
13918 path.display()
13919 );
13920 }
13921}
13922
13923fn cmd_quickstart(
13930 path: &Path,
13931 name: &str,
13932 reviewer: &str,
13933 assertion: Option<&str>,
13934 keys_out: Option<&Path>,
13935 json_output: bool,
13936) {
13937 use std::process::Command;
13938
13939 if path.join(".vela").exists() {
13940 fail(&format!(
13941 "already initialized: {} exists",
13942 path.join(".vela").display()
13943 ));
13944 }
13945
13946 let exe = std::env::current_exe()
13947 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
13948 let keys_dir = keys_out
13949 .map(Path::to_path_buf)
13950 .unwrap_or_else(|| path.join("keys"));
13951 let assertion_text =
13952 assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
13953
13954 let run_step = |label: &str, args: &[&str]| -> std::process::Output {
13955 let out = Command::new(&exe)
13956 .args(args)
13957 .output()
13958 .unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
13959 if !out.status.success() {
13960 let stderr = String::from_utf8_lossy(&out.stderr);
13961 fail(&format!("{label} failed:\n{stderr}"));
13962 }
13963 out
13964 };
13965
13966 run_step(
13968 "init",
13969 &[
13970 "init",
13971 path.to_string_lossy().as_ref(),
13972 "--name",
13973 name,
13974 "--no-git",
13975 "--json",
13976 ],
13977 );
13978
13979 let keys_out_str = keys_dir.to_string_lossy().into_owned();
13981 let keypair_out = run_step(
13982 "sign.generate-keypair",
13983 &[
13984 "sign",
13985 "generate-keypair",
13986 "--out",
13987 keys_out_str.as_ref(),
13988 "--json",
13989 ],
13990 );
13991 let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
13992 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
13993 let public_key = keypair_json
13994 .get("public_key")
13995 .and_then(|v| v.as_str())
13996 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
13997 .to_string();
13998
13999 run_step(
14001 "actor.add",
14002 &[
14003 "actor",
14004 "add",
14005 path.to_string_lossy().as_ref(),
14006 reviewer,
14007 "--pubkey",
14008 public_key.as_str(),
14009 "--json",
14010 ],
14011 );
14012
14013 let finding_out = run_step(
14015 "finding.add",
14016 &[
14017 "finding",
14018 "add",
14019 path.to_string_lossy().as_ref(),
14020 "--assertion",
14021 assertion_text,
14022 "--author",
14023 reviewer,
14024 "--apply",
14025 "--json",
14026 ],
14027 );
14028 let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
14029 .unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
14030 let finding_id = finding_json
14031 .get("finding_id")
14032 .and_then(|v| v.as_str())
14033 .map(str::to_string);
14034
14035 if json_output {
14036 let payload = json!({
14037 "ok": true,
14038 "command": "quickstart",
14039 "frontier": path.display().to_string(),
14040 "name": name,
14041 "reviewer": reviewer,
14042 "public_key": public_key,
14043 "keys_dir": keys_dir.display().to_string(),
14044 "finding_id": finding_id,
14045 "next_steps": [
14046 format!("vela serve {}", path.display()),
14047 format!(
14048 "vela ingest <paper.pdf|doi:...> --frontier {}",
14049 path.display()
14050 ),
14051 format!("vela log {}", path.display()),
14052 ],
14053 });
14054 println!(
14055 "{}",
14056 serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
14057 );
14058 return;
14059 }
14060
14061 println!();
14062 println!(
14063 " {}",
14064 format!("VELA · QUICKSTART · {}", path.display())
14065 .to_uppercase()
14066 .dimmed()
14067 );
14068 println!(" {}", style::tick_row(60));
14069 println!(" frontier: {}", path.display());
14070 println!(" name: {name}");
14071 println!(" reviewer: {reviewer}");
14072 println!(" keys: {}", keys_dir.display());
14073 println!(" pubkey: {}…", &public_key[..16]);
14074 if let Some(id) = finding_id.as_deref() {
14075 println!(" finding: {id}");
14076 }
14077 println!();
14078 println!(" {}", style::ok("done"));
14079 println!(" next:");
14080 println!(" vela serve {}", path.display());
14081 println!(
14082 " vela ingest <paper.pdf|doi:10.xxx|pmid:xxx> --frontier {}",
14083 path.display()
14084 );
14085 println!(" vela log {}", path.display());
14086 println!();
14087}
14088
14089fn cmd_lock(path: &Path, check: bool, json_output: bool) {
14095 if check {
14096 cmd_lock_check(path, json_output);
14097 return;
14098 }
14099 let payload = crate::frontier_repo::materialize(path).unwrap_or_else(|e| fail_return(&e));
14100 if json_output {
14101 println!(
14102 "{}",
14103 serde_json::to_string_pretty(&json!({
14104 "ok": true,
14105 "command": "lock",
14106 "path": path.display().to_string(),
14107 "snapshot_hash": payload.get("snapshot_hash"),
14108 "event_log_hash": payload.get("event_log_hash"),
14109 "proposal_state_hash": payload.get("proposal_state_hash"),
14110 }))
14111 .expect("failed to serialize lock report")
14112 );
14113 return;
14114 }
14115 println!();
14116 println!(
14117 " {}",
14118 format!("VELA · LOCK · {}", path.display())
14119 .to_uppercase()
14120 .dimmed()
14121 );
14122 println!(" {}", style::tick_row(60));
14123 println!(
14124 " snapshot_hash: {}",
14125 payload
14126 .get("snapshot_hash")
14127 .and_then(|v| v.as_str())
14128 .unwrap_or("?")
14129 );
14130 println!(
14131 " event_log_hash: {}",
14132 payload
14133 .get("event_log_hash")
14134 .and_then(|v| v.as_str())
14135 .unwrap_or("?")
14136 );
14137 println!(
14138 " proposal_state_hash: {}",
14139 payload
14140 .get("proposal_state_hash")
14141 .and_then(|v| v.as_str())
14142 .unwrap_or("?")
14143 );
14144 println!();
14145 println!(" {}", style::ok("locked"));
14146}
14147
14148fn cmd_lock_check(path: &Path, json_output: bool) {
14149 use crate::frontier_repo::read_lock;
14150 let lock = read_lock(path).unwrap_or_else(|e| fail_return(&e));
14151 let Some(lock) = lock else {
14152 fail("lock --check: no vela.lock found at path");
14153 };
14154 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
14155 let current_snapshot = format!("sha256:{}", crate::events::snapshot_hash(&project));
14156 let current_event_log = format!("sha256:{}", crate::events::event_log_hash(&project.events));
14157 let mut drift: Vec<String> = Vec::new();
14158 if lock.snapshot_hash != current_snapshot {
14159 drift.push(format!(
14160 "snapshot_hash: lock={} current={}",
14161 lock.snapshot_hash, current_snapshot
14162 ));
14163 }
14164 if lock.event_log_hash != current_event_log {
14165 drift.push(format!(
14166 "event_log_hash: lock={} current={}",
14167 lock.event_log_hash, current_event_log
14168 ));
14169 }
14170 let ok = drift.is_empty();
14171 if json_output {
14172 println!(
14173 "{}",
14174 serde_json::to_string_pretty(&json!({
14175 "ok": ok,
14176 "command": "lock.check",
14177 "path": path.display().to_string(),
14178 "drift": drift,
14179 "lock_snapshot_hash": lock.snapshot_hash,
14180 "current_snapshot_hash": current_snapshot,
14181 "lock_event_log_hash": lock.event_log_hash,
14182 "current_event_log_hash": current_event_log,
14183 "dependency_count": lock.dependencies.len(),
14184 }))
14185 .expect("failed to serialize lock check report")
14186 );
14187 } else {
14188 println!();
14189 println!(
14190 " {}",
14191 format!("VELA · LOCK · CHECK · {}", path.display())
14192 .to_uppercase()
14193 .dimmed()
14194 );
14195 println!(" {}", style::tick_row(60));
14196 if ok {
14197 println!(" snapshot_hash: {}", lock.snapshot_hash);
14198 println!(" event_log_hash: {}", lock.event_log_hash);
14199 println!(" dependencies pinned: {}", lock.dependencies.len());
14200 println!();
14201 println!(" {} on-disk state matches vela.lock", style::ok("ok"));
14202 } else {
14203 println!(" {} drift detected:", style::err_prefix());
14204 for d in &drift {
14205 println!(" - {d}");
14206 }
14207 }
14208 }
14209 if !ok {
14210 std::process::exit(1);
14211 }
14212}
14213
14214fn cmd_doc(path: &Path, out: Option<&Path>, json_output: bool) {
14219 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
14220 let out_dir = out
14221 .map(Path::to_path_buf)
14222 .unwrap_or_else(|| path.join("doc"));
14223 let report =
14224 crate::doc_render::write_site(&project, &out_dir).unwrap_or_else(|e| fail_return(&e));
14225 if json_output {
14226 println!(
14227 "{}",
14228 serde_json::to_string_pretty(&report).expect("failed to serialize doc report")
14229 );
14230 return;
14231 }
14232 println!();
14233 println!(
14234 " {}",
14235 format!("VELA · DOC · {}", path.display())
14236 .to_uppercase()
14237 .dimmed()
14238 );
14239 println!(" {}", style::tick_row(60));
14240 println!(" frontier_id: {}", report.frontier_id);
14241 println!(" out: {}", report.out);
14242 println!(" files written: {}", report.files_written);
14243 println!(" findings: {}", report.findings_documented);
14244 println!(" events: {}", report.events_documented);
14245 println!();
14246 println!(
14247 " {} open {}/index.html in a browser",
14248 style::ok("ok"),
14249 report.out
14250 );
14251}
14252
14253fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
14254 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
14255 let target = into
14256 .map(Path::to_path_buf)
14257 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
14258 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
14259 println!(
14260 "{} {} findings · {}",
14261 style::ok("imported"),
14262 frontier.findings.len(),
14263 target.display()
14264 );
14265}
14266
14267fn cmd_locator_repair(
14268 path: &Path,
14269 atom_id: &str,
14270 locator_override: Option<&str>,
14271 reviewer: &str,
14272 reason: &str,
14273 apply: bool,
14274 json_output: bool,
14275) {
14276 let report = state::repair_evidence_atom_locator(
14277 path,
14278 atom_id,
14279 locator_override,
14280 reviewer,
14281 reason,
14282 apply,
14283 )
14284 .unwrap_or_else(|e| fail_return(&e));
14285 print_state_report(&report, json_output);
14286}
14287
14288async fn cmd_source_fetch(
14293 identifier: &str,
14294 cache_root: Option<&Path>,
14295 out_path: Option<&Path>,
14296 refresh: bool,
14297 _json_output: bool,
14298) {
14299 use sha2::{Digest, Sha256};
14300
14301 let normalized = normalize_source_identifier(identifier);
14302 let cache_path = cache_root.map(|root| {
14303 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
14304 root.join("sources")
14305 .join("cache")
14306 .join(format!("{hash}.json"))
14307 });
14308
14309 if !refresh
14310 && let Some(p) = cache_path.as_ref()
14311 && p.is_file()
14312 {
14313 let body = std::fs::read_to_string(p)
14314 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
14315 emit_source_fetch_result(&body, out_path);
14316 return;
14317 }
14318
14319 let result = fetch_source_metadata(&normalized).await;
14320 let json = match result {
14321 Ok(value) => serde_json::to_string_pretty(&value)
14322 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
14323 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
14324 };
14325
14326 if let Some(p) = cache_path.as_ref() {
14327 if let Some(parent) = p.parent() {
14328 std::fs::create_dir_all(parent)
14329 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
14330 }
14331 std::fs::write(p, &json)
14332 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
14333 }
14334 emit_source_fetch_result(&json, out_path);
14335}
14336
14337fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
14338 if let Some(p) = out_path {
14339 if let Some(parent) = p.parent() {
14340 let _ = std::fs::create_dir_all(parent);
14341 }
14342 std::fs::write(p, body)
14343 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
14344 } else {
14345 println!("{body}");
14346 }
14347}
14348
14349fn normalize_source_identifier(raw: &str) -> String {
14350 let trimmed = raw.trim();
14351 if trimmed.starts_with("doi:")
14352 || trimmed.starts_with("pmid:")
14353 || trimmed.starts_with("nct:")
14354 || trimmed.starts_with("pmc:")
14355 {
14356 return trimmed.to_string();
14357 }
14358 if trimmed.starts_with("10.") {
14359 return format!("doi:{trimmed}");
14360 }
14361 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
14362 return format!(
14363 "nct:{}",
14364 trimmed
14365 .to_uppercase()
14366 .trim_start_matches("NCT")
14367 .to_string()
14368 .split_at(0)
14369 .0
14370 );
14371 }
14372 if trimmed.chars().all(|c| c.is_ascii_digit()) {
14373 return format!("pmid:{trimmed}");
14374 }
14375 trimmed.to_string()
14376}
14377
14378async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
14379 let client = Client::builder()
14380 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
14381 .timeout(std::time::Duration::from_secs(30))
14382 .build()
14383 .map_err(|e| format!("client build: {e}"))?;
14384 if let Some(rest) = normalized.strip_prefix("doi:") {
14385 let mut record = fetch_via_crossref(&client, rest).await?;
14392 let crossref_abstract = record
14393 .get("abstract")
14394 .and_then(|v| v.as_str())
14395 .unwrap_or("");
14396 if crossref_abstract.is_empty()
14397 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
14398 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
14399 {
14400 let pubmed_abstract = pubmed_record
14401 .get("abstract")
14402 .and_then(|v| v.as_str())
14403 .unwrap_or("")
14404 .to_string();
14405 if !pubmed_abstract.is_empty()
14406 && let Some(obj) = record.as_object_mut()
14407 {
14408 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
14409 obj.insert(
14410 "abstract_source".to_string(),
14411 Value::String(format!("pubmed:{pmid}")),
14412 );
14413 }
14414 }
14415 return Ok(record);
14416 }
14417 if let Some(rest) = normalized.strip_prefix("pmid:") {
14418 return fetch_via_pubmed(&client, rest).await;
14419 }
14420 if let Some(rest) = normalized.strip_prefix("nct:") {
14421 return fetch_via_ctgov(&client, rest).await;
14422 }
14423 Err(format!(
14424 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
14425 ))
14426}
14427
14428async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
14432 let url = format!(
14433 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
14434 urlencoding::encode(doi)
14435 );
14436 let resp = client.get(&url).send().await.ok()?;
14437 if !resp.status().is_success() {
14438 return None;
14439 }
14440 let body: Value = resp.json().await.ok()?;
14441 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
14442 if id_list.len() != 1 {
14443 return None;
14446 }
14447 id_list.first()?.as_str().map(|s| s.to_string())
14448}
14449
14450async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
14451 let url = format!("https://api.crossref.org/works/{doi}");
14452 let resp = client
14453 .get(&url)
14454 .send()
14455 .await
14456 .map_err(|e| format!("crossref get: {e}"))?;
14457 if !resp.status().is_success() {
14458 return Err(format!("crossref returned {}", resp.status()));
14459 }
14460 let body: Value = resp
14461 .json()
14462 .await
14463 .map_err(|e| format!("crossref json: {e}"))?;
14464 let work = body.get("message").cloned().unwrap_or(Value::Null);
14465 let title = work
14466 .get("title")
14467 .and_then(|v| v.as_array())
14468 .and_then(|a| a.first())
14469 .and_then(|v| v.as_str())
14470 .unwrap_or("")
14471 .to_string();
14472 let abstract_html = work
14473 .get("abstract")
14474 .and_then(|v| v.as_str())
14475 .unwrap_or("")
14476 .to_string();
14477 let abstract_text = strip_jats_tags(&abstract_html);
14478 let year = work
14479 .get("issued")
14480 .and_then(|v| v.get("date-parts"))
14481 .and_then(|v| v.as_array())
14482 .and_then(|a| a.first())
14483 .and_then(|v| v.as_array())
14484 .and_then(|a| a.first())
14485 .and_then(|v| v.as_i64());
14486 let journal = work
14487 .get("container-title")
14488 .and_then(|v| v.as_array())
14489 .and_then(|a| a.first())
14490 .and_then(|v| v.as_str())
14491 .unwrap_or("")
14492 .to_string();
14493 let authors = work
14494 .get("author")
14495 .and_then(|v| v.as_array())
14496 .map(|arr| {
14497 arr.iter()
14498 .filter_map(|a| {
14499 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
14500 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
14501 let combined = format!("{given} {family}").trim().to_string();
14502 if combined.is_empty() {
14503 None
14504 } else {
14505 Some(combined)
14506 }
14507 })
14508 .collect::<Vec<_>>()
14509 })
14510 .unwrap_or_default();
14511 Ok(json!({
14512 "schema": "vela.source_fetch.v0.1",
14513 "identifier": format!("doi:{doi}"),
14514 "source": "crossref",
14515 "title": title,
14516 "abstract": abstract_text,
14517 "year": year,
14518 "journal": journal,
14519 "authors": authors,
14520 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14521 }))
14522}
14523
14524async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
14525 let url = format!(
14526 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
14527 );
14528 let resp = client
14529 .get(&url)
14530 .send()
14531 .await
14532 .map_err(|e| format!("pubmed get: {e}"))?;
14533 if !resp.status().is_success() {
14534 return Err(format!("pubmed returned {}", resp.status()));
14535 }
14536 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
14537 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
14538 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
14539 let year = extract_xml_text(&xml, "<Year>", "</Year>")
14540 .parse::<i64>()
14541 .ok();
14542 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
14543 Ok(json!({
14544 "schema": "vela.source_fetch.v0.1",
14545 "identifier": format!("pmid:{pmid}"),
14546 "source": "pubmed",
14547 "title": title,
14548 "abstract": abstract_text,
14549 "year": year,
14550 "journal": journal,
14551 "authors": Vec::<String>::new(),
14552 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14553 }))
14554}
14555
14556async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
14557 let nct_clean = nct.trim();
14558 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
14559 nct_clean.to_uppercase()
14560 } else {
14561 format!("NCT{nct_clean}")
14562 };
14563 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
14564 let resp = client
14565 .get(&url)
14566 .send()
14567 .await
14568 .map_err(|e| format!("ctgov get: {e}"))?;
14569 if !resp.status().is_success() {
14570 return Err(format!("ctgov returned {}", resp.status()));
14571 }
14572 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
14573 let title = body
14574 .pointer("/protocolSection/identificationModule/briefTitle")
14575 .and_then(|v| v.as_str())
14576 .unwrap_or("")
14577 .to_string();
14578 let abstract_text = body
14579 .pointer("/protocolSection/descriptionModule/briefSummary")
14580 .and_then(|v| v.as_str())
14581 .unwrap_or("")
14582 .to_string();
14583 let phase = body
14584 .pointer("/protocolSection/designModule/phases")
14585 .and_then(|v| v.as_array())
14586 .and_then(|a| a.first())
14587 .and_then(|v| v.as_str())
14588 .unwrap_or("")
14589 .to_string();
14590 Ok(json!({
14591 "schema": "vela.source_fetch.v0.1",
14592 "identifier": format!("nct:{nct_id}"),
14593 "source": "clinicaltrials.gov",
14594 "title": title,
14595 "abstract": abstract_text,
14596 "year": Value::Null,
14597 "journal": phase,
14598 "authors": Vec::<String>::new(),
14599 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14600 }))
14601}
14602
14603fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
14604 if let Some(start) = xml.find(open) {
14605 let after = &xml[start + open.len()..];
14606 if let Some(end) = after.find(close) {
14607 return after[..end].trim().to_string();
14608 }
14609 }
14610 String::new()
14611}
14612
14613fn strip_jats_tags(html: &str) -> String {
14614 let mut out = String::with_capacity(html.len());
14615 let mut in_tag = false;
14616 for c in html.chars() {
14617 match c {
14618 '<' => in_tag = true,
14619 '>' => in_tag = false,
14620 _ if !in_tag => out.push(c),
14621 _ => {}
14622 }
14623 }
14624 out.split_whitespace().collect::<Vec<_>>().join(" ")
14625}
14626
14627fn cmd_span_repair(
14628 path: &Path,
14629 finding_id: &str,
14630 section: &str,
14631 text: &str,
14632 reviewer: &str,
14633 reason: &str,
14634 apply: bool,
14635 json_output: bool,
14636) {
14637 let report =
14638 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
14639 .unwrap_or_else(|e| fail_return(&e));
14640 print_state_report(&report, json_output);
14641}
14642
14643#[allow(clippy::too_many_arguments)]
14644fn cmd_entity_resolve(
14645 path: &Path,
14646 finding_id: &str,
14647 entity_name: &str,
14648 source: &str,
14649 id: &str,
14650 confidence: f64,
14651 matched_name: Option<&str>,
14652 resolution_method: &str,
14653 reviewer: &str,
14654 reason: &str,
14655 apply: bool,
14656 json_output: bool,
14657) {
14658 let report = state::resolve_finding_entity(
14659 path,
14660 finding_id,
14661 entity_name,
14662 source,
14663 id,
14664 confidence,
14665 matched_name,
14666 resolution_method,
14667 reviewer,
14668 reason,
14669 apply,
14670 )
14671 .unwrap_or_else(|e| fail_return(&e));
14672 print_state_report(&report, json_output);
14673}
14674
14675fn cmd_propagate(
14676 path: &Path,
14677 retract: Option<String>,
14678 reduce_confidence: Option<String>,
14679 to: Option<f64>,
14680 output: Option<&Path>,
14681) {
14682 let mut frontier = load_frontier_or_fail(path);
14683 let (finding_id, action, label) = if let Some(id) = retract {
14684 (id, propagate::PropagationAction::Retracted, "retraction")
14685 } else if let Some(id) = reduce_confidence {
14686 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
14687 if !(0.0..=1.0).contains(&score) {
14688 fail("--to must be between 0.0 and 1.0");
14689 }
14690 (
14691 id,
14692 propagate::PropagationAction::ConfidenceReduced { new_score: score },
14693 "confidence reduction",
14694 )
14695 } else {
14696 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
14697 };
14698 if !frontier.findings.iter().any(|f| f.id == finding_id) {
14699 fail(&format!("finding not found: {finding_id}"));
14700 }
14701 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
14702 frontier.review_events.extend(result.events.clone());
14707 project::recompute_stats(&mut frontier);
14708 propagate::print_result(&result, label, &finding_id);
14709 let out = output.unwrap_or(path);
14710 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
14711 println!(" output: {}", out.display());
14712}
14713
14714fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
14715 let source_desc = source
14716 .map(|p| p.display().to_string())
14717 .or_else(|| frontiers.map(|p| p.display().to_string()))
14718 .unwrap_or_else(|| "frontier.json".to_string());
14719 let args = if let Some(path) = source {
14720 format!(r#""serve", "{}""#, path.display())
14721 } else if let Some(path) = frontiers {
14722 format!(r#""serve", "--frontiers", "{}""#, path.display())
14723 } else {
14724 r#""serve", "frontier.json""#.to_string()
14725 };
14726 println!(
14727 r#"Add this MCP server configuration to your client:
14728
14729{{
14730 "mcpServers": {{
14731 "vela": {{
14732 "command": "vela",
14733 "args": [{args}]
14734 }}
14735 }}
14736}}
14737
14738Source: {source_desc}"#
14739 );
14740}
14741
14742fn parse_entities(input: &str) -> Vec<(String, String)> {
14743 if input.trim().is_empty() {
14744 return Vec::new();
14745 }
14746 input
14747 .split(',')
14748 .filter_map(|pair| {
14749 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
14750 if parts.len() == 2 {
14751 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
14752 } else {
14753 eprintln!(
14754 "{} skipping malformed entity '{}'",
14755 style::warn("warn"),
14756 pair.trim()
14757 );
14758 None
14759 }
14760 })
14761 .collect()
14762}
14763
14764fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
14765 inputs
14766 .iter()
14767 .filter_map(|input| {
14768 let trimmed = input.trim();
14769 if trimmed.is_empty() {
14770 return None;
14771 }
14772 if trimmed.starts_with('{') {
14773 match serde_json::from_str::<Value>(trimmed) {
14774 Ok(value @ Value::Object(_)) => return Some(value),
14775 Ok(_) | Err(_) => {
14776 eprintln!(
14777 "{} evidence span JSON should be an object; storing as text",
14778 style::warn("warn")
14779 );
14780 }
14781 }
14782 }
14783 Some(json!({
14784 "section": "curator_source",
14785 "text": trimmed,
14786 }))
14787 })
14788 .collect()
14789}
14790
14791fn hash_path(path: &Path) -> Result<String, String> {
14792 let mut hasher = Sha256::new();
14793 if path.is_file() {
14794 let bytes = std::fs::read(path)
14795 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
14796 hasher.update(&bytes);
14797 } else if path.is_dir() {
14798 let mut files = Vec::new();
14799 collect_hash_files(path, path, &mut files)?;
14800 files.sort();
14801 for rel in files {
14802 hasher.update(rel.to_string_lossy().as_bytes());
14803 let bytes = std::fs::read(path.join(&rel))
14804 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
14805 hasher.update(bytes);
14806 }
14807 } else {
14808 return Err(format!("Cannot hash missing path {}", path.display()));
14809 }
14810 Ok(format!("{:x}", hasher.finalize()))
14811}
14812
14813fn load_frontier_or_fail(path: &Path) -> project::Project {
14814 repo::load_from_path(path).unwrap_or_else(|e| {
14815 fail_return(&format!(
14816 "Failed to load frontier '{}': {e}",
14817 path.display()
14818 ))
14819 })
14820}
14821
14822fn hash_path_or_fail(path: &Path) -> String {
14823 hash_path(path).unwrap_or_else(|e| {
14824 fail_return(&format!(
14825 "Failed to hash frontier '{}': {e}",
14826 path.display()
14827 ))
14828 })
14829}
14830
14831fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
14832 for entry in
14833 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
14834 {
14835 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
14836 let path = entry.path();
14837 if path.is_dir() {
14838 collect_hash_files(root, &path, files)?;
14839 } else if path.is_file() {
14840 files.push(
14841 path.strip_prefix(root)
14842 .map_err(|e| e.to_string())?
14843 .to_path_buf(),
14844 );
14845 }
14846 }
14847 Ok(())
14848}
14849
14850fn schema_error_suggestion(error: &str) -> &'static str {
14851 if schema_error_action(error).is_some() {
14852 "Run `vela normalize` to repair deterministic frontier state."
14853 } else {
14854 "Inspect and correct the referenced frontier field."
14855 }
14856}
14857
14858fn schema_error_fix(error: &str) -> bool {
14859 schema_error_action(error).is_some()
14860}
14861
14862fn schema_error_action(error: &str) -> Option<&'static str> {
14863 if error.contains("stats.findings")
14864 || error.contains("stats.links")
14865 || error.contains("Invalid compiler")
14866 || error.contains("Invalid vela_version")
14867 || error.contains("Invalid schema")
14868 {
14869 Some("normalize_metadata_and_stats")
14870 } else if error.contains("does not match content-address") {
14871 Some("rewrite_ids")
14872 } else {
14873 None
14874 }
14875}
14876
14877fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
14878 let mut actions = std::collections::BTreeMap::<String, usize>::new();
14879 for diagnostic in diagnostics {
14880 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
14881 *actions.entry(action.to_string()).or_default() += 1;
14882 }
14883 }
14884 actions
14885 .into_iter()
14886 .map(|(action, count)| {
14887 let command = if action == "rewrite_ids" {
14888 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
14889 } else {
14890 "vela normalize <frontier> --write"
14891 };
14892 json!({
14893 "action": action,
14894 "count": count,
14895 "command": command,
14896 })
14897 })
14898 .collect()
14899}
14900
14901fn cmd_integrity(frontier: &Path, json: bool) {
14902 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
14903 if json {
14904 println!(
14905 "{}",
14906 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
14907 );
14908 } else {
14909 println!("vela integrity");
14910 println!(" frontier: {}", frontier.display());
14911 println!(" status: {}", report.status);
14912 println!(" proof freshness: {}", report.proof_freshness);
14913 println!(" structural errors: {}", report.structural_errors.len());
14914 for error in report.structural_errors.iter().take(8) {
14915 println!(" - {}: {}", error.rule_id, error.message);
14916 }
14917 }
14918}
14919
14920fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
14921 let report =
14922 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
14923 if json {
14924 println!(
14925 "{}",
14926 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
14927 );
14928 } else {
14929 println!("vela impact");
14930 println!(" finding: {}", report.target.id);
14931 println!(" frontier: {}", report.frontier.vfr_id);
14932 println!(" direct dependents: {}", report.summary.direct_dependents);
14933 println!(" downstream: {}", report.summary.total_downstream);
14934 println!(" open proposals: {}", report.summary.open_proposals);
14935 println!(" accepted events: {}", report.summary.accepted_events);
14936 println!(" proof: {}", report.summary.proof_status);
14937 }
14938}
14939
14940fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
14941 use crate::discord::DiscordKind;
14942 use crate::discord_compute::compute_discord_assignment;
14943
14944 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
14945 let assignment = compute_discord_assignment(&project);
14946 let support = assignment.frontier_support();
14947
14948 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
14951 for context in support.iter() {
14952 let set = assignment.get(context);
14953 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
14954 if let Some(filter) = kind_filter
14955 && !kinds.iter().any(|k| k == filter)
14956 {
14957 continue;
14958 }
14959 rows.push((context.clone(), kinds));
14960 }
14961
14962 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
14966 std::collections::BTreeMap::new();
14967 for kind in DiscordKind::ALL {
14968 let count = assignment
14969 .iter()
14970 .filter(|(_, set)| set.contains(*kind))
14971 .count();
14972 if count > 0 {
14973 histogram.insert(kind.as_str(), count);
14974 }
14975 }
14976
14977 let total_findings = project.findings.len();
14978 let frontier_id = project
14979 .frontier_id
14980 .clone()
14981 .unwrap_or_else(|| String::from("<unknown>"));
14982
14983 if json {
14984 let row_value = |row: &(String, Vec<String>)| {
14985 serde_json::json!({
14986 "finding_id": row.0,
14987 "discord_kinds": row.1,
14988 })
14989 };
14990 let report = serde_json::json!({
14991 "frontier_id": frontier_id,
14992 "total_findings": total_findings,
14993 "frontier_support_size": support.len(),
14994 "filtered_row_count": rows.len(),
14995 "filter_kind": kind_filter,
14996 "histogram": histogram,
14997 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
14998 });
14999 println!(
15000 "{}",
15001 serde_json::to_string_pretty(&report).expect("serialize discord report")
15002 );
15003 return;
15004 }
15005
15006 println!("vela discord");
15007 println!(" frontier: {frontier_id}");
15008 println!(" total findings: {total_findings}");
15009 println!(
15010 " frontier support (any discord): {} of {}",
15011 support.len(),
15012 total_findings
15013 );
15014 if let Some(k) = kind_filter {
15015 println!(" filter: kind = {k}");
15016 }
15017 println!();
15018 if histogram.is_empty() {
15019 println!(" no discord detected.");
15020 } else {
15021 println!(" discord histogram:");
15022 for (k, n) in &histogram {
15023 println!(" {n:>4} {k}");
15024 }
15025 }
15026 if !rows.is_empty() {
15027 println!();
15028 println!(" findings with discord (showing up to 50):");
15029 for (fid, kinds) in rows.iter().take(50) {
15030 println!(" {fid} · {}", kinds.join(", "));
15031 }
15032 if rows.len() > 50 {
15033 println!(" ... and {} more", rows.len() - 50);
15034 }
15035 }
15036}
15037
15038fn empty_signal_report() -> signals::SignalReport {
15039 signals::SignalReport {
15040 schema: "vela.signals.v0".to_string(),
15041 frontier: "unavailable".to_string(),
15042 signals: Vec::new(),
15043 review_queue: Vec::new(),
15044 proof_readiness: signals::ProofReadiness {
15045 status: "unavailable".to_string(),
15046 blockers: 0,
15047 warnings: 0,
15048 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
15049 },
15050 }
15051}
15052
15053fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
15054 println!();
15055 println!(" {}", "SIGNALS".dimmed());
15056 println!(" {}", style::tick_row(60));
15057 println!(" total signals: {}", report.signals.len());
15058 println!(" proof readiness: {}", report.proof_readiness.status);
15059 if !report.review_queue.is_empty() {
15060 println!(" review queue: {} items", report.review_queue.len());
15061 }
15062 if strict && report.proof_readiness.status != "ready" {
15063 println!(
15064 " {} proof readiness has blocking signals.",
15065 style::lost("strict check failed")
15066 );
15067 }
15068}
15069
15070fn append_packet_json_file(
15071 packet_dir: &Path,
15072 relative_path: &str,
15073 value: &Value,
15074) -> Result<(), String> {
15075 let content = serde_json::to_vec_pretty(value)
15076 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
15077 let path = packet_dir.join(relative_path);
15078 if let Some(parent) = path.parent() {
15079 std::fs::create_dir_all(parent)
15080 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
15081 }
15082 std::fs::write(&path, &content)
15083 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
15084 let entry = json!({
15085 "path": relative_path,
15086 "sha256": hex::encode(Sha256::digest(&content)),
15087 "bytes": content.len(),
15088 });
15089
15090 for manifest_name in ["manifest.json", "packet.lock.json"] {
15091 let manifest_path = packet_dir.join(manifest_name);
15092 let data = std::fs::read_to_string(&manifest_path)
15093 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
15094 let mut manifest: Value = serde_json::from_str(&data)
15095 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
15096 let array_key = if manifest_name == "manifest.json" {
15097 "included_files"
15098 } else {
15099 "files"
15100 };
15101 let files = manifest
15102 .get_mut(array_key)
15103 .and_then(Value::as_array_mut)
15104 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
15105 files.retain(|file| {
15106 file.get("path")
15107 .and_then(Value::as_str)
15108 .is_none_or(|path| path != relative_path)
15109 });
15110 files.push(entry.clone());
15111 std::fs::write(
15112 &manifest_path,
15113 serde_json::to_vec_pretty(&manifest)
15114 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
15115 )
15116 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
15117 }
15118
15119 let lock_path = packet_dir.join("packet.lock.json");
15120 let lock_content = std::fs::read(&lock_path)
15121 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
15122 let lock_entry = json!({
15123 "path": "packet.lock.json",
15124 "sha256": hex::encode(Sha256::digest(&lock_content)),
15125 "bytes": lock_content.len(),
15126 });
15127 let manifest_path = packet_dir.join("manifest.json");
15128 let data = std::fs::read_to_string(&manifest_path)
15129 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
15130 let mut manifest: Value = serde_json::from_str(&data)
15131 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
15132 let files = manifest
15133 .get_mut("included_files")
15134 .and_then(Value::as_array_mut)
15135 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
15136 files.retain(|file| {
15137 file.get("path")
15138 .and_then(Value::as_str)
15139 .is_none_or(|path| path != "packet.lock.json")
15140 });
15141 files.push(lock_entry);
15142 std::fs::write(
15143 &manifest_path,
15144 serde_json::to_vec_pretty(&manifest)
15145 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
15146 )
15147 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
15148 Ok(())
15149}
15150
15151fn print_tool_check_report(report: &Value) {
15152 let summary = report.get("summary").unwrap_or(&Value::Null);
15153 let frontier = report.get("frontier").unwrap_or(&Value::Null);
15154 println!();
15155 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
15156 println!(" {}", style::tick_row(60));
15157 println!(
15158 "frontier: {}",
15159 frontier
15160 .get("name")
15161 .and_then(Value::as_str)
15162 .unwrap_or("unknown")
15163 );
15164 println!(
15165 "findings: {}",
15166 frontier
15167 .get("findings")
15168 .and_then(Value::as_u64)
15169 .unwrap_or_default()
15170 );
15171 println!(
15172 "checks: {} passed, {} failed",
15173 summary
15174 .get("passed")
15175 .and_then(Value::as_u64)
15176 .unwrap_or_default(),
15177 summary
15178 .get("failed")
15179 .and_then(Value::as_u64)
15180 .unwrap_or_default()
15181 );
15182 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
15183 let names = tools
15184 .iter()
15185 .filter_map(Value::as_str)
15186 .collect::<Vec<_>>()
15187 .join(", ");
15188 println!("tools: {names}");
15189 }
15190 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
15191 for check in checks {
15192 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
15193 style::ok("ok")
15194 } else {
15195 style::lost("lost")
15196 };
15197 println!(
15198 " {} {}",
15199 status,
15200 check
15201 .get("tool")
15202 .and_then(Value::as_str)
15203 .unwrap_or("unknown")
15204 );
15205 }
15206 }
15207}
15208
15209fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
15210 if json_output {
15211 println!(
15212 "{}",
15213 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
15214 );
15215 } else {
15216 println!("{}", report.message);
15217 println!(" frontier: {}", report.frontier);
15218 println!(" finding: {}", report.finding_id);
15219 println!(" proposal: {}", report.proposal_id);
15220 println!(" status: {}", report.proposal_status);
15221 if let Some(event_id) = &report.applied_event_id {
15222 println!(" event: {}", event_id);
15223 }
15224 println!(" wrote: {}", report.wrote_to);
15225 }
15226}
15227
15228fn print_history(payload: &Value) {
15229 let finding = payload.get("finding").unwrap_or(&Value::Null);
15230 println!("vela history");
15231 println!(
15232 " finding: {}",
15233 finding
15234 .get("id")
15235 .and_then(Value::as_str)
15236 .unwrap_or("unknown")
15237 );
15238 println!(
15239 " assertion: {}",
15240 finding
15241 .get("assertion")
15242 .and_then(Value::as_str)
15243 .unwrap_or("")
15244 );
15245 println!(
15246 " confidence: {:.3}",
15247 finding
15248 .get("confidence")
15249 .and_then(Value::as_f64)
15250 .unwrap_or_default()
15251 );
15252 let reviews = payload
15253 .get("review_events")
15254 .and_then(Value::as_array)
15255 .map_or(0, Vec::len);
15256 let updates = payload
15257 .get("confidence_updates")
15258 .and_then(Value::as_array)
15259 .map_or(0, Vec::len);
15260 let annotations = finding
15261 .get("annotations")
15262 .and_then(Value::as_array)
15263 .map_or(0, Vec::len);
15264 let sources = payload
15265 .get("sources")
15266 .and_then(Value::as_array)
15267 .map_or(0, Vec::len);
15268 let atoms = payload
15269 .get("evidence_atoms")
15270 .and_then(Value::as_array)
15271 .map_or(0, Vec::len);
15272 let conditions = payload
15273 .get("condition_records")
15274 .and_then(Value::as_array)
15275 .map_or(0, Vec::len);
15276 let proposals = payload
15277 .get("proposals")
15278 .and_then(Value::as_array)
15279 .map_or(0, Vec::len);
15280 let events = payload
15281 .get("events")
15282 .and_then(Value::as_array)
15283 .map_or(0, Vec::len);
15284 println!(" review events: {reviews}");
15285 println!(" confidence updates: {updates}");
15286 println!(" annotations: {annotations}");
15287 println!(" sources: {sources}");
15288 println!(" evidence atoms: {atoms}");
15289 println!(" condition records: {conditions}");
15290 println!(" proposals: {proposals}");
15291 println!(" canonical events: {events}");
15292 if let Some(status) = payload
15293 .get("proof_state")
15294 .and_then(|value| value.get("latest_packet"))
15295 .and_then(|value| value.get("status"))
15296 .and_then(Value::as_str)
15297 {
15298 println!(" proof state: {status}");
15299 }
15300 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
15301 for event in events.iter().take(8) {
15302 println!(
15303 " - {} {} {}",
15304 event
15305 .get("reviewed_at")
15306 .and_then(Value::as_str)
15307 .unwrap_or(""),
15308 event.get("id").and_then(Value::as_str).unwrap_or(""),
15309 event.get("reason").and_then(Value::as_str).unwrap_or("")
15310 );
15311 }
15312 }
15313}
15314
15315#[derive(Debug, Serialize)]
15316pub struct ProofTrace {
15317 pub trace_version: String,
15318 pub command: Vec<String>,
15319 pub source: String,
15320 pub source_hash: String,
15321 pub schema_version: String,
15322 pub checked_artifacts: Vec<String>,
15323 pub benchmark: Option<Value>,
15324 pub packet_manifest: String,
15325 pub packet_validation: String,
15326 pub caveats: Vec<String>,
15327 pub status: String,
15328 pub trace_path: String,
15329}
15330
15331const SCIENCE_SUBCOMMANDS: &[&str] = &[
15332 "compile-notes",
15333 "compile-code",
15334 "compile-data",
15335 "review-pending",
15336 "find-tensions",
15337 "plan-experiments",
15338 "scout",
15339 "check",
15340 "normalize",
15341 "integrity",
15342 "impact",
15343 "discord",
15344 "quickstart",
15345 "proof",
15346 "repo",
15347 "serve",
15348 "stats",
15349 "search",
15350 "tensions",
15351 "gaps",
15352 "bridge",
15353 "export",
15354 "packet",
15355 "bench",
15356 "conformance",
15357 "version",
15358 "sign",
15359 "actor",
15360 "frontier",
15361 "queue",
15362 "registry",
15363 "init",
15364 "import",
15365 "lock",
15366 "doc",
15367 "diff",
15368 "proposals",
15369 "finding",
15370 "link",
15371 "entity",
15372 "review",
15373 "note",
15374 "caveat",
15375 "revise",
15376 "reject",
15377 "history",
15378 "import-events",
15379 "retract",
15380 "propagate",
15381 "replicate",
15383 "replications",
15384 "dataset-add",
15387 "datasets",
15388 "code-add",
15389 "code-artifacts",
15390 "artifact-add",
15391 "artifact-to-state",
15392 "bridge-kit",
15393 "source-adapter",
15394 "runtime-adapter",
15395 "artifacts",
15396 "artifact-audit",
15397 "decision-brief",
15398 "trial-summary",
15399 "source-verification",
15400 "source-ingest-plan",
15401 "clinical-trial-import",
15402 "negative-result-add",
15404 "negative-results",
15405 "trajectory-create",
15407 "trajectory-step",
15408 "trajectories",
15409 "tier-set",
15411 "locator-repair",
15413 "span-repair",
15415 "entity-resolve",
15417 "entity-add",
15419 "proof-add",
15421 "source-fetch",
15423 "predict",
15426 "resolve",
15427 "predictions",
15428 "predictions-expire",
15429 "calibration",
15430 "consensus",
15433 "federation",
15435 "causal",
15437 "status",
15441 "log",
15442 "inbox",
15443 "ask",
15444 "bridges",
15446 "workbench",
15448 "verify",
15450 "ingest",
15454 "propose",
15455 "accept",
15456 "attest",
15457 "lineage",
15458 "carina",
15461 "atlas",
15464 "constellation",
15467];
15468
15469pub fn is_science_subcommand(name: &str) -> bool {
15470 SCIENCE_SUBCOMMANDS.contains(&name)
15471}
15472
15473fn print_strict_help() {
15474 println!(
15475 r#"Vela {}
15476Version control for scientific state.
15477
15478Usage:
15479 vela <COMMAND>
15480
15481Core flow (v0.74):
15482 init Initialize a split frontier repo
15483 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
15484 propose Create a finding.review proposal
15485 diff Preview a `vpr_*` proposal, or compare two frontier files
15486 accept Apply a proposal under reviewer authority
15487 attest Sign findings under your private key
15488 log Recent canonical state events
15489 lineage State-transition replay for one finding
15490 serve Local Workbench (findings, evidence, diff, lineage)
15491
15492Read-only inspection:
15493 check Validate a frontier, repo, or proof packet
15494 integrity Check accepted frontier state integrity
15495 impact Report downstream finding impact
15496 normalize Apply deterministic frontier-state repairs
15497 proof Export and validate a proof packet
15498 repo Inspect split frontier repository status and shape
15499 stats Show frontier statistics
15500 search Search findings
15501 tensions List candidate contradictions and tensions
15502 gaps Inspect and rank candidate gap review leads
15503 bridge Find candidate cross-domain connections
15504
15505Advanced (proposal-creation, agent inboxes, federation):
15506 scout Run Literature Scout against a folder of PDFs (writes proposals)
15507 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
15508 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
15509 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
15510 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
15511 find-tensions Run Contradiction Finder: surface real contradictions among findings
15512 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
15513 export Export frontier artifacts
15514 packet Inspect or validate proof packets
15515 bench Run deterministic benchmark gates
15516 conformance Run protocol conformance vectors
15517 sign Optional signing and signature verification
15518 runtime-adapter
15519 Normalize external runtime exports into reviewable proposals
15520 version Show version information
15521 import Import frontier.json into a .vela repo
15522 proposals Inspect, validate, export, import, accept, or reject write proposals
15523 artifact-to-state
15524 Import a Carina artifact packet as reviewable proposals
15525 bridge-kit
15526 Validate Carina artifact packets before importing runtime output
15527 source-adapter
15528 Run reviewed source adapters into artifact-to-state proposals
15529 finding Add or manage finding bundles as frontier state
15530 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
15531 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
15532 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
15533 actor Register Ed25519 publisher identities in a frontier
15534 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
15535 review Create a review proposal or review interactively
15536 note Add a lightweight note to a finding
15537 caveat Create an explicit caveat proposal
15538 revise Create a confidence revision proposal
15539 reject Create a rejection proposal
15540 history Show state-transition history for one finding (v0.74 alias: `lineage`)
15541 import-events Import review/state events from a packet or JSON file
15542 retract Create a retraction proposal
15543 propagate Simulate impact over declared dependency links
15544 artifact-add Register a content-addressed artifact
15545 artifacts List content-addressed artifacts
15546 artifact-audit Audit artifact locators, hashes, references, and profiles
15547 decision-brief Show the validated decision brief projection
15548 trial-summary Show the validated trial outcome projection
15549 source-verification Show the validated source verification projection
15550 source-ingest-plan Show the validated source ingest plan
15551 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
15552 locator-repair Mechanically repair an evidence atom's missing source locator
15553 span-repair Mechanically repair a finding's missing evidence span
15554 entity-resolve Resolve a finding entity to a canonical id
15555 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
15556 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
15557 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
15558
15559Quick start (the demo):
15560 vela init demo --name "Your bounded question"
15561 vela ingest paper.pdf --frontier demo
15562 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
15563 vela diff <vpr_id> --frontier demo
15564 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
15565 vela serve --path demo
15566
15567Substrate health:
15568 vela frontier materialize my-frontier --json
15569 vela repo status my-frontier --json
15570 vela proof verify my-frontier --json
15571 vela check my-frontier --strict --json
15572
15573Monolithic frontier file:
15574 vela frontier new frontier.json --name "Your bounded question"
15575 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
15576 vela check frontier.json --json
15577 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
15578 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
15579
15580Publish your own frontier (see docs/PUBLISHING.md):
15581 vela frontier new ./frontier.json --name "Your bounded question"
15582 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
15583 vela sign generate-keypair --out keys
15584 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
15585 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
15586 --to https://vela-hub.fly.dev
15587"#,
15588 env!("CARGO_PKG_VERSION")
15589 );
15590}
15591
15592pub type ScoutHandler = fn(
15601 folder: PathBuf,
15602 frontier: PathBuf,
15603 backend: Option<String>,
15604 dry_run: bool,
15605 json: bool,
15606) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15607
15608static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
15609
15610pub fn register_scout_handler(handler: ScoutHandler) {
15614 let _ = SCOUT_HANDLER.set(handler);
15615}
15616
15617pub type AtlasInitHandler = fn(
15621 atlases_root: PathBuf,
15622 name: String,
15623 domain: String,
15624 scope_note: Option<String>,
15625 frontiers: Vec<PathBuf>,
15626 json: bool,
15627) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15628
15629static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
15630
15631pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
15632 let _ = ATLAS_INIT_HANDLER.set(handler);
15633}
15634
15635pub type AtlasMaterializeHandler =
15637 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15638
15639static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
15640
15641pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
15642 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
15643}
15644
15645pub type AtlasServeHandler = fn(
15650 atlases_root: PathBuf,
15651 name: String,
15652 port: u16,
15653 open_browser: bool,
15654) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15655
15656static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
15657
15658pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
15659 let _ = ATLAS_SERVE_HANDLER.set(handler);
15660}
15661
15662pub type AtlasUpdateHandler = fn(
15667 atlases_root: PathBuf,
15668 name: String,
15669 add_frontier: Vec<PathBuf>,
15670 remove_vfr_id: Vec<String>,
15671 json: bool,
15672) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15673
15674static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
15675
15676pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
15677 let _ = ATLAS_UPDATE_HANDLER.set(handler);
15678}
15679
15680pub type ConstellationInitHandler = fn(
15684 constellations_root: PathBuf,
15685 name: String,
15686 scope_note: Option<String>,
15687 atlases: Vec<PathBuf>,
15688 json: bool,
15689) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15690
15691static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
15692
15693pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
15694 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
15695}
15696
15697pub type ConstellationMaterializeHandler = fn(
15698 constellations_root: PathBuf,
15699 name: String,
15700 json: bool,
15701) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15702
15703static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
15704 OnceLock::new();
15705
15706pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
15707 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
15708}
15709
15710pub type ConstellationServeHandler = fn(
15711 constellations_root: PathBuf,
15712 name: String,
15713 port: u16,
15714 open_browser: bool,
15715) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15716
15717static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
15718
15719pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
15720 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
15721}
15722
15723pub type NotesHandler = fn(
15727 vault: PathBuf,
15728 frontier: PathBuf,
15729 backend: Option<String>,
15730 max_files: Option<usize>,
15731 max_items_per_category: Option<usize>,
15732 dry_run: bool,
15733 json: bool,
15734) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15735
15736static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
15737
15738pub fn register_notes_handler(handler: NotesHandler) {
15740 let _ = NOTES_HANDLER.set(handler);
15741}
15742
15743pub type CodeHandler = fn(
15745 root: PathBuf,
15746 frontier: PathBuf,
15747 backend: Option<String>,
15748 max_files: Option<usize>,
15749 dry_run: bool,
15750 json: bool,
15751) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15752
15753static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
15754
15755pub fn register_code_handler(handler: CodeHandler) {
15757 let _ = CODE_HANDLER.set(handler);
15758}
15759
15760pub type DatasetsHandler = fn(
15762 root: PathBuf,
15763 frontier: PathBuf,
15764 backend: Option<String>,
15765 sample_rows: Option<usize>,
15766 dry_run: bool,
15767 json: bool,
15768) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15769
15770static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
15771
15772pub fn register_datasets_handler(handler: DatasetsHandler) {
15774 let _ = DATASETS_HANDLER.set(handler);
15775}
15776
15777pub type ReviewerHandler = fn(
15779 frontier: PathBuf,
15780 backend: Option<String>,
15781 max_proposals: Option<usize>,
15782 batch_size: usize,
15783 dry_run: bool,
15784 json: bool,
15785) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15786
15787static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
15788
15789pub fn register_reviewer_handler(handler: ReviewerHandler) {
15791 let _ = REVIEWER_HANDLER.set(handler);
15792}
15793
15794pub type TensionsHandler = fn(
15796 frontier: PathBuf,
15797 backend: Option<String>,
15798 max_findings: Option<usize>,
15799 dry_run: bool,
15800 json: bool,
15801) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15802
15803static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
15804
15805pub fn register_tensions_handler(handler: TensionsHandler) {
15807 let _ = TENSIONS_HANDLER.set(handler);
15808}
15809
15810pub type ExperimentsHandler = fn(
15812 frontier: PathBuf,
15813 backend: Option<String>,
15814 max_findings: Option<usize>,
15815 dry_run: bool,
15816 json: bool,
15817) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15818
15819static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
15820
15821pub fn register_experiments_handler(handler: ExperimentsHandler) {
15823 let _ = EXPERIMENTS_HANDLER.set(handler);
15824}
15825
15826fn find_vela_repo() -> Option<PathBuf> {
15842 let mut cur = std::env::current_dir().ok()?;
15843 loop {
15844 if cur.join(".vela").is_dir() {
15845 return Some(cur);
15846 }
15847 if !cur.pop() {
15848 return None;
15849 }
15850 }
15851}
15852
15853fn print_session_help() {
15854 println!();
15855 println!(
15856 " Vela {} · Version control for scientific state.",
15857 env!("CARGO_PKG_VERSION")
15858 );
15859 println!();
15860 println!(" USAGE");
15861 println!(" vela Open a session against the nearest .vela/ repo");
15862 println!(" vela <command> Run a specific subcommand");
15863 println!(" vela help advanced Full subcommand list (30+ commands)");
15864 println!();
15865 println!(" CORE FLOW (v0.74)");
15866 println!(" init Initialize a split frontier repo");
15867 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
15868 println!(" propose Create a finding.review proposal");
15869 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
15870 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
15871 println!(" attest Sign findings under your private key");
15872 println!(" log Recent canonical state events");
15873 println!(" lineage <vf_id> State-transition replay for one finding");
15874 println!(" serve Local Workbench (find, evidence, diff, lineage)");
15875 println!();
15876 println!(" DAILY ALSO-RANS");
15877 println!(" status One-screen frontier health");
15878 println!(" inbox Pending review proposals");
15879 println!(" review Review a proposal interactively");
15880 println!(" ask <question> Plain-text query against the frontier");
15881 println!();
15882 println!(" REASONING (Pearl 1 → 2 → 3)");
15883 println!(" causal audit Per-finding identifiability");
15884 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
15885 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
15886 println!();
15887 println!(" COMPOSITION");
15888 println!(" bridge <a> <b> Cross-frontier hypotheses");
15889 println!(" consensus <vf> Field consensus over similar claims");
15890 println!();
15891 println!(" PUBLISH");
15892 println!(" registry publish Push a signed manifest to the hub");
15893 println!(" federation peer-add Federate with another hub");
15894 println!();
15895 println!(" In session, type a single letter for a quick verb, or any");
15896 println!(" question in plain text. `q` or `exit` quits.");
15897 println!();
15898}
15899
15900fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
15901 use crate::causal_reasoning::{audit_frontier, summarize_audit};
15902
15903 let label = frontier_label(project);
15904 let vfr = project.frontier_id();
15905 let vfr_short = vfr.chars().take(16).collect::<String>();
15906
15907 let mut pending = 0usize;
15908 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
15909 for p in &project.proposals {
15910 if p.status == "pending_review" {
15911 pending += 1;
15912 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
15913 }
15914 }
15915
15916 let audit = audit_frontier(project);
15917 let audit_summary = summarize_audit(&audit);
15918
15919 let bridges_dir = repo_path.join(".vela/bridges");
15920 let mut bridge_total = 0usize;
15921 let mut bridge_confirmed = 0usize;
15922 let mut bridge_derived = 0usize;
15923 if bridges_dir.is_dir()
15924 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
15925 {
15926 for entry in entries.flatten() {
15927 let path = entry.path();
15928 if path.extension().and_then(|s| s.to_str()) != Some("json") {
15929 continue;
15930 }
15931 bridge_total += 1;
15932 if let Ok(data) = std::fs::read_to_string(&path)
15933 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
15934 {
15935 match b.status {
15936 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
15937 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
15938 _ => {}
15939 }
15940 }
15941 }
15942 }
15943
15944 let mut targets_with_success = std::collections::HashSet::new();
15945 let mut failed_replications = 0usize;
15946 for r in &project.replications {
15947 if r.outcome == "replicated" {
15948 targets_with_success.insert(r.target_finding.clone());
15949 } else if r.outcome == "failed" {
15950 failed_replications += 1;
15951 }
15952 }
15953
15954 println!();
15955 let version = crate::project::VELA_COMPILER_VERSION
15956 .strip_prefix("vela/")
15957 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
15958 println!(
15959 " {}",
15960 format!("VELA · {version} · {label}")
15961 .to_uppercase()
15962 .dimmed()
15963 );
15964 println!(" {}", style::tick_row(60));
15965 println!(
15966 " vfr_id {}… repo {}",
15967 vfr_short,
15968 repo_path.display()
15969 );
15970 println!(
15971 " findings {:>4} events {} proposals pending {}",
15972 project.findings.len(),
15973 project.events.len(),
15974 pending
15975 );
15976
15977 if pending > 0 {
15978 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
15979 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
15980 }
15981 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
15982 println!(
15983 " {} · {} underidentified · {} conditional",
15984 if audit_summary.underidentified > 0 {
15985 style::lost("audit")
15986 } else {
15987 style::warn("audit")
15988 },
15989 audit_summary.underidentified,
15990 audit_summary.conditional,
15991 );
15992 }
15993 if bridge_total > 0 {
15994 println!(
15995 " {} · {} total · {} confirmed · {} awaiting review",
15996 style::ok("bridges"),
15997 bridge_total,
15998 bridge_confirmed,
15999 bridge_derived
16000 );
16001 }
16002 if !project.replications.is_empty() {
16003 println!(
16004 " {} · {} records · {} findings replicated · {} failed",
16005 style::ok("replications"),
16006 project.replications.len(),
16007 targets_with_success.len(),
16008 failed_replications,
16009 );
16010 }
16011
16012 println!();
16013 println!(" type a verb or ask anything:");
16014 println!(" a audit problems i inbox (pending) b bridges");
16015 println!(" g causal graph l log (recent) c counterfactuals");
16016 println!(" s refresh status h help (more verbs) q quit");
16017 println!();
16018}
16019
16020fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
16022 match verb {
16023 "a" | "audit" => {
16024 let action = CausalAction::Audit {
16025 frontier: repo_path.to_path_buf(),
16026 problems_only: true,
16027 json: false,
16028 };
16029 cmd_causal(action);
16030 true
16031 }
16032 "i" | "inbox" => {
16033 let action = ProposalAction::List {
16034 frontier: repo_path.to_path_buf(),
16035 status: Some("pending_review".into()),
16036 json: false,
16037 };
16038 cmd_proposals(action);
16039 true
16040 }
16041 "b" | "bridges" => {
16042 let action = BridgesAction::List {
16043 frontier: repo_path.to_path_buf(),
16044 status: None,
16045 json: false,
16046 };
16047 cmd_bridges(action);
16048 true
16049 }
16050 "g" | "graph" => {
16051 let action = CausalAction::Graph {
16052 frontier: repo_path.to_path_buf(),
16053 node: None,
16054 json: false,
16055 };
16056 cmd_causal(action);
16057 true
16058 }
16059 "l" | "log" => {
16060 cmd_log(repo_path, 10, None, false);
16061 true
16062 }
16063 "c" | "counterfactual" | "counterfactuals" => {
16064 let project = match repo::load_from_path(repo_path) {
16067 Ok(p) => p,
16068 Err(e) => {
16069 eprintln!("{} {e}", style::err_prefix());
16070 return true;
16071 }
16072 };
16073 println!();
16074 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
16075 println!(" {}", style::tick_row(60));
16076 let mut pairs = 0usize;
16080 for child in &project.findings {
16081 for link in &child.links {
16082 if !matches!(link.link_type.as_str(), "depends" | "supports") {
16083 continue;
16084 }
16085 if link.mechanism.is_none() {
16086 continue;
16087 }
16088 let parent = link
16089 .target
16090 .split_once(':')
16091 .map_or(link.target.as_str(), |(_, r)| r);
16092 pairs += 1;
16093 if pairs <= 10 {
16094 println!(" · do({parent}) → {}", child.id);
16095 }
16096 }
16097 }
16098 if pairs == 0 {
16099 println!(" no mechanism-annotated edges found.");
16100 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
16101 } else {
16102 println!();
16103 println!(" {pairs} live pair(s). Run with:");
16104 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
16105 }
16106 println!();
16107 true
16108 }
16109 "s" | "status" | "refresh" => {
16110 match repo::load_from_path(repo_path) {
16112 Ok(p) => print_session_dashboard(&p, repo_path),
16113 Err(e) => eprintln!("{} {e}", style::err_prefix()),
16114 }
16115 true
16116 }
16117 "h" | "help" | "?" => {
16118 print_session_help();
16119 true
16120 }
16121 _ => false,
16122 }
16123}
16124
16125fn run_session() {
16126 let repo_path = match find_vela_repo() {
16127 Some(p) => p,
16128 None => {
16129 println!();
16130 println!(
16131 " {}",
16132 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
16133 );
16134 println!(" {}", style::tick_row(60));
16135 println!(" Run `vela init` here to create a frontier, or cd into one.");
16136 println!(" Or run `vela help` for the command list.");
16137 println!();
16138 return;
16139 }
16140 };
16141
16142 let project = match repo::load_from_path(&repo_path) {
16143 Ok(p) => p,
16144 Err(e) => {
16145 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
16146 std::process::exit(1);
16147 }
16148 };
16149
16150 print_session_dashboard(&project, &repo_path);
16151
16152 use std::io::{BufRead, Write};
16153 let stdin = std::io::stdin();
16154 let mut stdout = std::io::stdout();
16155 loop {
16156 print!(" > ");
16157 stdout.flush().ok();
16158 let mut line = String::new();
16159 if stdin.lock().read_line(&mut line).is_err() {
16160 break;
16161 }
16162 let input = line.trim();
16163 if input.is_empty() {
16164 continue;
16165 }
16166 if matches!(input, "q" | "quit" | "exit") {
16167 break;
16168 }
16169 if run_session_verb(input, &repo_path) {
16170 continue;
16171 }
16172 let project = match repo::load_from_path(&repo_path) {
16174 Ok(p) => p,
16175 Err(e) => {
16176 eprintln!("{} {e}", style::err_prefix());
16177 continue;
16178 }
16179 };
16180 answer(&project, input, false);
16181 }
16182}
16183
16184pub fn run_from_args() {
16185 style::init();
16186 let args = std::env::args().collect::<Vec<_>>();
16187 match args.get(1).map(String::as_str) {
16188 None => {
16192 run_session();
16193 return;
16194 }
16195 Some("-h" | "--help" | "help") => {
16196 if args.get(2).map(String::as_str) == Some("advanced") {
16199 print_strict_help();
16200 } else {
16201 print_session_help();
16202 }
16203 return;
16204 }
16205 Some("-V" | "--version" | "version") => {
16206 println!("vela {}", env!("CARGO_PKG_VERSION"));
16207 return;
16208 }
16209 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
16210 let json = args.iter().any(|arg| arg == "--json");
16211 let frontier = args
16212 .iter()
16213 .skip(3)
16214 .find(|arg| !arg.starts_with('-'))
16215 .map(PathBuf::from)
16216 .unwrap_or_else(|| {
16217 eprintln!(
16218 "{} proof verify requires a frontier repo",
16219 style::err_prefix()
16220 );
16221 std::process::exit(2);
16222 });
16223 cmd_proof_verify(&frontier, json);
16224 return;
16225 }
16226 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
16227 let frontier = args
16228 .iter()
16229 .skip(3)
16230 .find(|arg| !arg.starts_with('-'))
16231 .map(PathBuf::from)
16232 .unwrap_or_else(|| {
16233 eprintln!(
16234 "{} proof explain requires a frontier repo",
16235 style::err_prefix()
16236 );
16237 std::process::exit(2);
16238 });
16239 cmd_proof_explain(&frontier);
16240 return;
16241 }
16242 Some(cmd) if !is_science_subcommand(cmd) => {
16243 eprintln!(
16244 "{} unknown or non-release command: {cmd}",
16245 style::err_prefix()
16246 );
16247 eprintln!("run `vela --help` for the strict v0 command surface.");
16248 std::process::exit(2);
16249 }
16250 Some(_) => {}
16251 }
16252 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
16253 runtime.block_on(run_command());
16254}
16255
16256fn fail(message: &str) -> ! {
16257 eprintln!("{} {message}", style::err_prefix());
16258 std::process::exit(1);
16259}
16260
16261fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
16266 if !valid.contains(&value) {
16267 fail(&format!(
16268 "invalid {flag} '{value}'. Valid: {}",
16269 valid.join(", ")
16270 ));
16271 }
16272}
16273
16274fn fail_return<T>(message: &str) -> T {
16275 fail(message)
16276}