1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Quickstart {
601 #[arg(default_value = "demo")]
603 path: PathBuf,
604 #[arg(long, default_value = "Quickstart frontier")]
606 name: String,
607 #[arg(long, default_value = "reviewer:you")]
611 reviewer: String,
612 #[arg(long)]
615 assertion: Option<String>,
616 #[arg(long)]
619 keys_out: Option<PathBuf>,
620 #[arg(long)]
622 json: bool,
623 },
624 Lock {
631 path: PathBuf,
633 #[arg(long)]
636 check: bool,
637 #[arg(long)]
639 json: bool,
640 },
641 Doc {
648 path: PathBuf,
650 #[arg(long)]
652 out: Option<PathBuf>,
653 #[arg(long)]
656 json: bool,
657 },
658 Import {
660 frontier: PathBuf,
661 #[arg(long)]
662 into: Option<PathBuf>,
663 },
664 Diff {
674 target: String,
677 frontier_b: Option<PathBuf>,
680 #[arg(long)]
684 frontier: Option<PathBuf>,
685 #[arg(long, default_value = "reviewer:preview")]
687 reviewer: String,
688 #[arg(long)]
689 json: bool,
690 #[arg(long)]
691 quiet: bool,
692 },
693 Proposals {
695 #[command(subcommand)]
696 action: ProposalAction,
697 },
698 ArtifactToState {
700 frontier: PathBuf,
702 packet: PathBuf,
704 #[arg(long)]
706 actor: String,
707 #[arg(long)]
709 apply_artifacts: bool,
710 #[arg(long)]
711 json: bool,
712 },
713 BridgeKit {
715 #[command(subcommand)]
716 action: BridgeKitAction,
717 },
718 SourceAdapter {
720 #[command(subcommand)]
721 action: SourceAdapterAction,
722 },
723 RuntimeAdapter {
725 #[command(subcommand)]
726 action: RuntimeAdapterAction,
727 },
728 Finding {
730 #[command(subcommand)]
731 command: FindingCommands,
732 },
733 Link {
737 #[command(subcommand)]
738 action: LinkAction,
739 },
740 Workbench {
745 #[arg(default_value = ".")]
747 path: PathBuf,
748 #[arg(long, default_value_t = 3850)]
750 port: u16,
751 #[arg(long)]
753 no_open: bool,
754 },
755 Bridges {
761 #[command(subcommand)]
762 action: BridgesAction,
763 },
764 Entity {
769 #[command(subcommand)]
770 action: EntityAction,
771 },
772 Review {
774 frontier: PathBuf,
776 finding_id: String,
778 #[arg(long)]
780 status: Option<String>,
781 #[arg(long)]
783 reason: Option<String>,
784 #[arg(long)]
786 reviewer: String,
787 #[arg(long)]
789 apply: bool,
790 #[arg(long)]
792 json: bool,
793 },
794 Note {
796 frontier: PathBuf,
797 finding_id: String,
798 #[arg(long)]
799 text: String,
800 #[arg(long)]
801 author: String,
802 #[arg(long)]
804 apply: bool,
805 #[arg(long)]
806 json: bool,
807 },
808 Caveat {
810 frontier: PathBuf,
811 finding_id: String,
812 #[arg(long)]
813 text: String,
814 #[arg(long)]
815 author: String,
816 #[arg(long)]
817 apply: bool,
818 #[arg(long)]
819 json: bool,
820 },
821 Revise {
823 frontier: PathBuf,
824 finding_id: String,
825 #[arg(long)]
827 confidence: f64,
828 #[arg(long)]
830 reason: String,
831 #[arg(long)]
833 reviewer: String,
834 #[arg(long)]
835 apply: bool,
836 #[arg(long)]
837 json: bool,
838 },
839 Reject {
841 frontier: PathBuf,
842 finding_id: String,
843 #[arg(long)]
844 reason: String,
845 #[arg(long)]
846 reviewer: String,
847 #[arg(long)]
848 apply: bool,
849 #[arg(long)]
850 json: bool,
851 },
852 History {
854 frontier: PathBuf,
855 finding_id: String,
856 #[arg(long)]
857 json: bool,
858 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
862 as_of: Option<String>,
863 },
864 ImportEvents {
866 source: PathBuf,
867 #[arg(long)]
868 into: PathBuf,
869 #[arg(long)]
870 json: bool,
871 },
872 Retract {
874 source: PathBuf,
875 finding_id: String,
876 #[arg(long)]
877 reason: String,
878 #[arg(long)]
879 reviewer: String,
880 #[arg(long)]
881 apply: bool,
882 #[arg(long)]
883 json: bool,
884 },
885 EntityAdd {
890 frontier: PathBuf,
891 finding_id: String,
892 #[arg(long)]
893 entity: String,
894 #[arg(long)]
898 entity_type: String,
899 #[arg(long)]
900 reviewer: String,
901 #[arg(long)]
902 reason: String,
903 #[arg(long)]
904 apply: bool,
905 #[arg(long)]
906 json: bool,
907 },
908 EntityResolve {
912 frontier: PathBuf,
913 finding_id: String,
914 #[arg(long)]
915 entity: String,
916 #[arg(long)]
917 source: String,
918 #[arg(long)]
919 id: String,
920 #[arg(long)]
921 confidence: f64,
922 #[arg(long)]
923 matched_name: Option<String>,
924 #[arg(long, default_value = "manual")]
925 resolution_method: String,
926 #[arg(long)]
927 reviewer: String,
928 #[arg(long)]
929 reason: String,
930 #[arg(long)]
931 apply: bool,
932 #[arg(long)]
933 json: bool,
934 },
935 SourceFetch {
943 identifier: String,
946 #[arg(long)]
950 cache: Option<PathBuf>,
951 #[arg(long)]
953 out: Option<PathBuf>,
954 #[arg(long)]
956 refresh: bool,
957 #[arg(long)]
958 json: bool,
959 },
960 SpanRepair {
963 frontier: PathBuf,
964 finding_id: String,
965 #[arg(long)]
966 section: String,
967 #[arg(long)]
968 text: String,
969 #[arg(long)]
970 reviewer: String,
971 #[arg(long)]
972 reason: String,
973 #[arg(long)]
974 apply: bool,
975 #[arg(long)]
976 json: bool,
977 },
978 LocatorRepair {
983 frontier: PathBuf,
984 atom_id: String,
985 #[arg(long)]
988 locator: Option<String>,
989 #[arg(long)]
992 reviewer: String,
993 #[arg(long)]
995 reason: String,
996 #[arg(long)]
998 apply: bool,
999 #[arg(long)]
1000 json: bool,
1001 },
1002 Propagate {
1004 frontier: PathBuf,
1005 #[arg(long)]
1006 retract: Option<String>,
1007 #[arg(long)]
1008 reduce_confidence: Option<String>,
1009 #[arg(long)]
1010 to: Option<f64>,
1011 #[arg(short, long)]
1012 output: Option<PathBuf>,
1013 },
1014 Replicate {
1023 frontier: PathBuf,
1025 target: String,
1027 #[arg(long)]
1029 outcome: String,
1030 #[arg(long)]
1032 by: String,
1033 #[arg(long)]
1037 conditions: String,
1038 #[arg(long)]
1040 source_title: String,
1041 #[arg(long)]
1043 doi: Option<String>,
1044 #[arg(long)]
1046 pmid: Option<String>,
1047 #[arg(long)]
1049 sample_size: Option<String>,
1050 #[arg(long, default_value = "")]
1053 note: String,
1054 #[arg(long)]
1056 previous_attempt: Option<String>,
1057 #[arg(long, default_value_t = false)]
1064 no_cascade: bool,
1065 #[arg(long)]
1067 json: bool,
1068 },
1069 Replications {
1072 frontier: PathBuf,
1074 #[arg(long)]
1076 target: Option<String>,
1077 #[arg(long)]
1079 json: bool,
1080 },
1081 DatasetAdd {
1088 frontier: PathBuf,
1090 #[arg(long)]
1092 name: String,
1093 #[arg(long)]
1095 version: Option<String>,
1096 #[arg(long)]
1100 content_hash: String,
1101 #[arg(long)]
1103 url: Option<String>,
1104 #[arg(long)]
1106 license: Option<String>,
1107 #[arg(long)]
1109 source_title: String,
1110 #[arg(long)]
1112 doi: Option<String>,
1113 #[arg(long)]
1115 row_count: Option<u64>,
1116 #[arg(long)]
1118 json: bool,
1119 },
1120 Datasets {
1122 frontier: PathBuf,
1123 #[arg(long)]
1124 json: bool,
1125 },
1126 CodeAdd {
1130 frontier: PathBuf,
1132 #[arg(long)]
1134 language: String,
1135 #[arg(long)]
1137 repo_url: Option<String>,
1138 #[arg(long)]
1141 commit: Option<String>,
1142 #[arg(long)]
1144 path: String,
1145 #[arg(long)]
1147 content_hash: String,
1148 #[arg(long)]
1150 line_start: Option<u32>,
1151 #[arg(long)]
1153 line_end: Option<u32>,
1154 #[arg(long)]
1156 entry_point: Option<String>,
1157 #[arg(long)]
1159 json: bool,
1160 },
1161 CodeArtifacts {
1163 frontier: PathBuf,
1164 #[arg(long)]
1165 json: bool,
1166 },
1167 ArtifactAdd {
1172 frontier: PathBuf,
1174 #[arg(long)]
1177 kind: String,
1178 #[arg(long)]
1180 name: String,
1181 #[arg(long)]
1184 file: Option<PathBuf>,
1185 #[arg(long)]
1187 url: Option<String>,
1188 #[arg(long)]
1190 content_hash: Option<String>,
1191 #[arg(long)]
1193 media_type: Option<String>,
1194 #[arg(long)]
1196 license: Option<String>,
1197 #[arg(long)]
1199 source_title: Option<String>,
1200 #[arg(long)]
1202 source_url: Option<String>,
1203 #[arg(long)]
1205 doi: Option<String>,
1206 #[arg(long)]
1208 target: Vec<String>,
1209 #[arg(long)]
1211 metadata: Vec<String>,
1212 #[arg(long, default_value = "public")]
1214 access_tier: String,
1215 #[arg(long, default_value = "reviewer:manual")]
1217 deposited_by: String,
1218 #[arg(long, default_value = "artifact deposit")]
1220 reason: String,
1221 #[arg(long)]
1223 json: bool,
1224 },
1225 Artifacts {
1227 frontier: PathBuf,
1228 #[arg(long)]
1230 target: Option<String>,
1231 #[arg(long)]
1232 json: bool,
1233 },
1234 ArtifactAudit {
1236 frontier: PathBuf,
1237 #[arg(long)]
1239 json: bool,
1240 },
1241 DecisionBrief {
1243 frontier: PathBuf,
1244 #[arg(long)]
1246 json: bool,
1247 },
1248 TrialSummary {
1250 frontier: PathBuf,
1251 #[arg(long)]
1253 json: bool,
1254 },
1255 SourceVerification {
1257 frontier: PathBuf,
1258 #[arg(long)]
1260 json: bool,
1261 },
1262 SourceIngestPlan {
1264 frontier: PathBuf,
1265 #[arg(long)]
1267 json: bool,
1268 },
1269 ClinicalTrialImport {
1272 frontier: PathBuf,
1274 nct_id: String,
1276 #[arg(long)]
1279 input_json: Option<PathBuf>,
1280 #[arg(long)]
1282 target: Vec<String>,
1283 #[arg(long, default_value = "reviewer:manual")]
1285 deposited_by: String,
1286 #[arg(long, default_value = "clinical trial record import")]
1288 reason: String,
1289 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1291 license: String,
1292 #[arg(long)]
1294 json: bool,
1295 },
1296 NegativeResultAdd {
1304 frontier: PathBuf,
1306 #[arg(long)]
1308 kind: String,
1309 #[arg(long)]
1311 deposited_by: String,
1312 #[arg(long)]
1314 reason: String,
1315 #[arg(long)]
1318 conditions_text: String,
1319 #[arg(long, default_value = "")]
1321 notes: String,
1322 #[arg(long)]
1325 target: Vec<String>,
1326 #[arg(long)]
1330 endpoint: Option<String>,
1331 #[arg(long)]
1333 intervention: Option<String>,
1334 #[arg(long)]
1336 comparator: Option<String>,
1337 #[arg(long)]
1339 population: Option<String>,
1340 #[arg(long)]
1342 n_enrolled: Option<u32>,
1343 #[arg(long)]
1345 power: Option<f64>,
1346 #[arg(long)]
1348 ci_lower: Option<f64>,
1349 #[arg(long)]
1351 ci_upper: Option<f64>,
1352 #[arg(long)]
1354 effect_size_threshold: Option<f64>,
1355 #[arg(long)]
1357 registry_id: Option<String>,
1358 #[arg(long)]
1361 reagent: Option<String>,
1362 #[arg(long)]
1364 observation: Option<String>,
1365 #[arg(long)]
1367 attempts: Option<u32>,
1368 #[arg(long)]
1371 source_title: String,
1372 #[arg(long)]
1374 doi: Option<String>,
1375 #[arg(long)]
1377 url: Option<String>,
1378 #[arg(long)]
1380 year: Option<i32>,
1381 #[arg(long)]
1383 json: bool,
1384 },
1385 NegativeResults {
1387 frontier: PathBuf,
1388 #[arg(long)]
1390 target: Option<String>,
1391 #[arg(long)]
1392 json: bool,
1393 },
1394 TrajectoryCreate {
1399 frontier: PathBuf,
1401 #[arg(long)]
1403 deposited_by: String,
1404 #[arg(long)]
1406 reason: String,
1407 #[arg(long)]
1412 target: Vec<String>,
1413 #[arg(long, default_value = "")]
1415 notes: String,
1416 #[arg(long)]
1417 json: bool,
1418 },
1419 TrajectoryStep {
1422 frontier: PathBuf,
1424 trajectory_id: String,
1426 #[arg(long)]
1428 kind: String,
1429 #[arg(long)]
1433 description: String,
1434 #[arg(long)]
1436 actor: String,
1437 #[arg(long)]
1439 reason: String,
1440 #[arg(long)]
1443 reference: Vec<String>,
1444 #[arg(long)]
1445 json: bool,
1446 },
1447 Trajectories {
1449 frontier: PathBuf,
1450 #[arg(long)]
1452 target: Option<String>,
1453 #[arg(long)]
1454 json: bool,
1455 },
1456 TierSet {
1462 frontier: PathBuf,
1464 #[arg(long)]
1466 object_type: String,
1467 #[arg(long)]
1469 object_id: String,
1470 #[arg(long)]
1472 tier: String,
1473 #[arg(long)]
1476 actor: String,
1477 #[arg(long)]
1480 reason: String,
1481 #[arg(long)]
1482 json: bool,
1483 },
1484 Predict {
1491 frontier: PathBuf,
1493 #[arg(long)]
1495 by: String,
1496 #[arg(long)]
1499 claim: String,
1500 #[arg(long)]
1502 criterion: String,
1503 #[arg(long)]
1505 resolves_by: Option<String>,
1506 #[arg(long)]
1508 confidence: f64,
1509 #[arg(long, default_value = "")]
1511 target: String,
1512 #[arg(long, default_value = "affirmed")]
1514 outcome: String,
1515 #[arg(long, default_value = "")]
1517 conditions: String,
1518 #[arg(long)]
1520 json: bool,
1521 },
1522 Resolve {
1527 frontier: PathBuf,
1529 prediction: String,
1531 #[arg(long)]
1533 outcome: String,
1534 #[arg(long)]
1536 matched: bool,
1537 #[arg(long)]
1540 by: String,
1541 #[arg(long, default_value = "1.0")]
1543 confidence: f64,
1544 #[arg(long, default_value = "")]
1546 source_title: String,
1547 #[arg(long)]
1549 doi: Option<String>,
1550 #[arg(long)]
1552 json: bool,
1553 },
1554 Predictions {
1556 frontier: PathBuf,
1557 #[arg(long)]
1559 by: Option<String>,
1560 #[arg(long)]
1562 open: bool,
1563 #[arg(long)]
1565 json: bool,
1566 },
1567 Calibration {
1570 frontier: PathBuf,
1571 #[arg(long)]
1573 actor: Option<String>,
1574 #[arg(long)]
1576 json: bool,
1577 },
1578 PredictionsExpire {
1586 frontier: PathBuf,
1587 #[arg(long)]
1590 now: Option<String>,
1591 #[arg(long)]
1594 dry_run: bool,
1595 #[arg(long)]
1596 json: bool,
1597 },
1598 Consensus {
1607 frontier: PathBuf,
1609 target: String,
1611 #[arg(long, default_value = "composite")]
1614 weighting: String,
1615 #[arg(long)]
1620 causal_claim: Option<String>,
1621 #[arg(long)]
1626 causal_grade_min: Option<String>,
1627 #[arg(long)]
1629 json: bool,
1630 },
1631
1632 Ingest {
1648 path: String,
1651 #[arg(long)]
1654 frontier: PathBuf,
1655 #[arg(short, long)]
1659 backend: Option<String>,
1660 #[arg(long)]
1664 actor: Option<String>,
1665 #[arg(long)]
1667 dry_run: bool,
1668 #[arg(long)]
1669 json: bool,
1670 },
1671
1672 Propose {
1678 frontier: PathBuf,
1679 finding_id: String,
1680 #[arg(long)]
1682 status: String,
1683 #[arg(long)]
1684 reason: String,
1685 #[arg(long)]
1686 reviewer: String,
1687 #[arg(long)]
1690 apply: bool,
1691 #[arg(long)]
1692 json: bool,
1693 },
1694
1695 Accept {
1699 frontier: PathBuf,
1700 proposal_id: String,
1701 #[arg(long)]
1702 reviewer: String,
1703 #[arg(long)]
1704 reason: String,
1705 #[arg(long)]
1706 json: bool,
1707 },
1708
1709 Attest {
1721 frontier: PathBuf,
1723 #[arg(long)]
1727 event: Option<String>,
1728 #[arg(long)]
1731 attester: Option<String>,
1732 #[arg(long)]
1735 scope_note: Option<String>,
1736 #[arg(long)]
1739 proof_id: Option<String>,
1740 #[arg(long)]
1745 signature: Option<String>,
1746 #[arg(long)]
1749 key: Option<PathBuf>,
1750 #[arg(long)]
1751 json: bool,
1752 },
1753
1754 Lineage {
1757 frontier: PathBuf,
1758 finding_id: String,
1759 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1760 as_of: Option<String>,
1761 #[arg(long)]
1762 json: bool,
1763 },
1764
1765 Carina {
1768 #[command(subcommand)]
1769 action: CarinaAction,
1770 },
1771
1772 Atlas {
1777 #[command(subcommand)]
1778 action: AtlasAction,
1779 },
1780
1781 Constellation {
1787 #[command(subcommand)]
1788 action: ConstellationAction,
1789 },
1790}
1791
1792#[derive(Subcommand)]
1797enum AtlasAction {
1798 Init {
1803 name: String,
1806 #[arg(long, value_delimiter = ',', num_args = 1..)]
1808 frontiers: Vec<PathBuf>,
1809 #[arg(long, default_value = "general")]
1812 domain: String,
1813 #[arg(long)]
1815 scope_note: Option<String>,
1816 #[arg(long, default_value = "atlases")]
1818 atlases_root: PathBuf,
1819 #[arg(long)]
1820 json: bool,
1821 },
1822 Materialize {
1826 name: String,
1828 #[arg(long, default_value = "atlases")]
1829 atlases_root: PathBuf,
1830 #[arg(long)]
1831 json: bool,
1832 },
1833 Serve {
1838 name: String,
1839 #[arg(long, default_value = "atlases")]
1840 atlases_root: PathBuf,
1841 #[arg(long, default_value_t = 3848)]
1842 port: u16,
1843 #[arg(long)]
1844 no_open: bool,
1845 },
1846 Update {
1853 name: String,
1854 #[arg(long, value_delimiter = ',')]
1857 add_frontier: Vec<PathBuf>,
1858 #[arg(long, value_delimiter = ',')]
1861 remove_vfr_id: Vec<String>,
1862 #[arg(long, default_value = "atlases")]
1863 atlases_root: PathBuf,
1864 #[arg(long)]
1865 json: bool,
1866 },
1867}
1868
1869#[derive(Subcommand)]
1873enum ConstellationAction {
1874 Init {
1878 name: String,
1879 #[arg(long, value_delimiter = ',', num_args = 1..)]
1881 atlases: Vec<PathBuf>,
1882 #[arg(long)]
1883 scope_note: Option<String>,
1884 #[arg(long, default_value = "constellations")]
1885 constellations_root: PathBuf,
1886 #[arg(long)]
1887 json: bool,
1888 },
1889 Materialize {
1894 name: String,
1895 #[arg(long, default_value = "constellations")]
1896 constellations_root: PathBuf,
1897 #[arg(long)]
1898 json: bool,
1899 },
1900 Serve {
1904 name: String,
1905 #[arg(long, default_value = "constellations")]
1906 constellations_root: PathBuf,
1907 #[arg(long, default_value_t = 3849)]
1908 port: u16,
1909 #[arg(long)]
1910 no_open: bool,
1911 },
1912}
1913
1914#[derive(Subcommand)]
1918enum CarinaAction {
1919 Validate {
1924 path: PathBuf,
1928 #[arg(long)]
1931 primitive: Option<String>,
1932 #[arg(long)]
1933 json: bool,
1934 },
1935 List {
1937 #[arg(long)]
1938 json: bool,
1939 },
1940 Schema { primitive: String },
1942}
1943
1944#[derive(Subcommand)]
1945enum PacketAction {
1946 Inspect {
1948 path: PathBuf,
1949 #[arg(long)]
1950 json: bool,
1951 },
1952 Validate {
1954 path: PathBuf,
1955 #[arg(long)]
1956 json: bool,
1957 },
1958}
1959
1960#[derive(Subcommand)]
1961enum SignAction {
1962 GenerateKeypair {
1964 #[arg(long, default_value = ".vela/keys")]
1965 out: PathBuf,
1966 #[arg(long)]
1967 json: bool,
1968 },
1969 Apply {
1971 frontier: PathBuf,
1972 #[arg(long)]
1973 private_key: PathBuf,
1974 #[arg(long)]
1975 json: bool,
1976 },
1977 Verify {
1979 frontier: PathBuf,
1980 #[arg(long)]
1981 public_key: Option<PathBuf>,
1982 #[arg(long)]
1983 json: bool,
1984 },
1985 ThresholdSet {
1990 frontier: PathBuf,
1991 finding_id: String,
1993 #[arg(long)]
1995 to: u32,
1996 #[arg(long)]
1997 json: bool,
1998 },
1999}
2000
2001#[derive(Subcommand)]
2002enum ActorAction {
2003 Add {
2005 frontier: PathBuf,
2006 id: String,
2008 #[arg(long)]
2010 pubkey: String,
2011 #[arg(long)]
2015 tier: Option<String>,
2016 #[arg(long)]
2020 orcid: Option<String>,
2021 #[arg(long)]
2026 clearance: Option<String>,
2027 #[arg(long)]
2028 json: bool,
2029 },
2030 List {
2032 frontier: PathBuf,
2033 #[arg(long)]
2034 json: bool,
2035 },
2036}
2037
2038#[derive(Subcommand)]
2039enum CausalAction {
2040 Audit {
2044 frontier: PathBuf,
2045 #[arg(long)]
2048 problems_only: bool,
2049 #[arg(long)]
2050 json: bool,
2051 },
2052 Effect {
2065 frontier: PathBuf,
2066 source: String,
2068 #[arg(long)]
2070 on: String,
2071 #[arg(long)]
2072 json: bool,
2073 },
2074 Graph {
2077 frontier: PathBuf,
2078 #[arg(long)]
2080 node: Option<String>,
2081 #[arg(long)]
2082 json: bool,
2083 },
2084 Counterfactual {
2091 frontier: PathBuf,
2092 intervene_on: String,
2094 #[arg(long)]
2096 set_to: f64,
2097 #[arg(long)]
2099 target: String,
2100 #[arg(long)]
2101 json: bool,
2102 },
2103}
2104
2105#[derive(Subcommand)]
2106enum BridgesAction {
2107 Derive {
2111 frontier_a: PathBuf,
2114 #[arg(long, default_value = "a")]
2116 label_a: String,
2117 frontier_b: PathBuf,
2119 #[arg(long, default_value = "b")]
2121 label_b: String,
2122 #[arg(long)]
2123 json: bool,
2124 },
2125 List {
2127 frontier: PathBuf,
2129 #[arg(long)]
2131 status: Option<String>,
2132 #[arg(long)]
2133 json: bool,
2134 },
2135 Show {
2137 frontier: PathBuf,
2138 bridge_id: String,
2139 #[arg(long)]
2140 json: bool,
2141 },
2142 Confirm {
2147 frontier: PathBuf,
2148 bridge_id: String,
2149 #[arg(long)]
2152 reviewer: Option<String>,
2153 #[arg(long)]
2155 note: Option<String>,
2156 #[arg(long)]
2157 json: bool,
2158 },
2159 Refute {
2162 frontier: PathBuf,
2163 bridge_id: String,
2164 #[arg(long)]
2165 reviewer: Option<String>,
2166 #[arg(long)]
2167 note: Option<String>,
2168 #[arg(long)]
2169 json: bool,
2170 },
2171}
2172
2173#[derive(Subcommand)]
2174enum FederationAction {
2175 PeerAdd {
2179 frontier: PathBuf,
2180 id: String,
2182 #[arg(long)]
2184 url: String,
2185 #[arg(long)]
2187 pubkey: String,
2188 #[arg(long, default_value = "")]
2190 note: String,
2191 #[arg(long)]
2192 json: bool,
2193 },
2194 PeerList {
2196 frontier: PathBuf,
2197 #[arg(long)]
2198 json: bool,
2199 },
2200 PeerRemove {
2204 frontier: PathBuf,
2205 id: String,
2206 #[arg(long)]
2207 json: bool,
2208 },
2209 Sync {
2226 frontier: PathBuf,
2227 peer_id: String,
2229 #[arg(long)]
2231 url: Option<String>,
2232 #[arg(long)]
2236 via_hub: bool,
2237 #[arg(long)]
2240 vfr_id: Option<String>,
2241 #[arg(long)]
2248 allow_cross_vfr: bool,
2249 #[arg(long)]
2251 dry_run: bool,
2252 #[arg(long)]
2253 json: bool,
2254 },
2255 PushResolution {
2268 frontier: PathBuf,
2269 conflict_event_id: String,
2273 #[arg(long = "to")]
2275 to: String,
2276 #[arg(long)]
2280 key: Option<PathBuf>,
2281 #[arg(long)]
2284 vfr_id: Option<String>,
2285 #[arg(long)]
2286 json: bool,
2287 },
2288}
2289
2290#[derive(Subcommand)]
2291enum FrontierAction {
2292 New {
2299 path: PathBuf,
2301 #[arg(long)]
2303 name: String,
2304 #[arg(long, default_value = "")]
2306 description: String,
2307 #[arg(long)]
2309 force: bool,
2310 #[arg(long)]
2311 json: bool,
2312 },
2313 Materialize {
2315 frontier: PathBuf,
2317 #[arg(long)]
2318 json: bool,
2319 },
2320 AddDep {
2324 frontier: PathBuf,
2326 vfr_id: String,
2328 #[arg(long)]
2331 locator: String,
2332 #[arg(long)]
2335 snapshot: String,
2336 #[arg(long)]
2338 name: Option<String>,
2339 #[arg(long)]
2340 json: bool,
2341 },
2342 ListDeps {
2344 frontier: PathBuf,
2345 #[arg(long)]
2346 json: bool,
2347 },
2348 RemoveDep {
2351 frontier: PathBuf,
2352 vfr_id: String,
2353 #[arg(long)]
2354 json: bool,
2355 },
2356 RefreshDeps {
2363 frontier: PathBuf,
2364 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2366 from: String,
2367 #[arg(long)]
2369 dry_run: bool,
2370 #[arg(long)]
2371 json: bool,
2372 },
2373 Diff {
2385 frontier: PathBuf,
2387 #[arg(long)]
2390 since: Option<String>,
2391 #[arg(long)]
2394 week: Option<String>,
2395 #[arg(long)]
2397 json: bool,
2398 },
2399}
2400
2401#[derive(Subcommand)]
2402enum RepoAction {
2403 Status {
2405 frontier: PathBuf,
2407 #[arg(long)]
2409 json: bool,
2410 },
2411 Doctor {
2413 frontier: PathBuf,
2415 #[arg(long)]
2417 json: bool,
2418 },
2419}
2420
2421#[derive(Subcommand)]
2422enum QueueAction {
2423 List {
2425 #[arg(long)]
2426 queue_file: Option<PathBuf>,
2427 #[arg(long)]
2428 json: bool,
2429 },
2430 Sign {
2433 #[arg(long)]
2435 actor: String,
2436 #[arg(long)]
2438 key: PathBuf,
2439 #[arg(long)]
2441 queue_file: Option<PathBuf>,
2442 #[arg(long, alias = "all")]
2448 yes_to_all: bool,
2449 #[arg(long)]
2450 json: bool,
2451 },
2452 Clear {
2454 #[arg(long)]
2455 queue_file: Option<PathBuf>,
2456 #[arg(long)]
2457 json: bool,
2458 },
2459}
2460
2461#[derive(Subcommand)]
2462enum RegistryAction {
2463 List {
2465 #[arg(long)]
2467 from: Option<String>,
2468 #[arg(long)]
2469 json: bool,
2470 },
2471 Publish {
2473 frontier: PathBuf,
2475 #[arg(long)]
2477 owner: String,
2478 #[arg(long)]
2480 key: PathBuf,
2481 #[arg(long)]
2488 locator: Option<String>,
2489 #[arg(long)]
2491 to: Option<String>,
2492 #[arg(long)]
2493 json: bool,
2494 },
2495 DependsOn {
2502 vfr_id: String,
2504 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2506 from: String,
2507 #[arg(long)]
2508 json: bool,
2509 },
2510 Mirror {
2518 vfr_id: String,
2520 #[arg(long)]
2522 from: String,
2523 #[arg(long)]
2525 to: String,
2526 #[arg(long)]
2527 json: bool,
2528 },
2529 Pull {
2531 vfr_id: String,
2533 #[arg(long)]
2535 from: Option<String>,
2536 #[arg(long)]
2540 out: PathBuf,
2541 #[arg(long)]
2544 transitive: bool,
2545 #[arg(long, default_value = "4")]
2548 depth: usize,
2549 #[arg(long)]
2550 json: bool,
2551 },
2552}
2553
2554#[derive(Subcommand)]
2555enum GapsAction {
2556 Rank {
2558 frontier: PathBuf,
2559 #[arg(long, default_value = "10")]
2560 top: usize,
2561 #[arg(long)]
2562 domain: Option<String>,
2563 #[arg(long)]
2564 json: bool,
2565 },
2566}
2567
2568#[derive(Subcommand)]
2569enum LinkAction {
2570 Add {
2575 frontier: PathBuf,
2577 #[arg(long)]
2579 from: String,
2580 #[arg(long)]
2582 to: String,
2583 #[arg(long, default_value = "supports")]
2585 r#type: String,
2586 #[arg(long, default_value = "")]
2588 note: String,
2589 #[arg(long, default_value = "reviewer")]
2591 inferred_by: String,
2592 #[arg(long)]
2601 no_check_target: bool,
2602 #[arg(long)]
2603 json: bool,
2604 },
2605}
2606
2607#[derive(Subcommand)]
2608enum EntityAction {
2609 Resolve {
2616 frontier: PathBuf,
2617 #[arg(long)]
2619 force: bool,
2620 #[arg(long)]
2621 json: bool,
2622 },
2623 List {
2625 #[arg(long)]
2626 json: bool,
2627 },
2628}
2629
2630#[derive(Subcommand)]
2631enum FindingCommands {
2632 Add {
2634 frontier: PathBuf,
2636 #[arg(long)]
2638 assertion: String,
2639 #[arg(long, default_value = "mechanism")]
2641 r#type: String,
2642 #[arg(long, default_value = "manual finding")]
2644 source: String,
2645 #[arg(long, default_value = "expert_assertion")]
2647 source_type: String,
2648 #[arg(long)]
2650 author: String,
2651 #[arg(long, default_value = "0.3")]
2653 confidence: f64,
2654 #[arg(long, default_value = "theoretical")]
2656 evidence_type: String,
2657 #[arg(long, default_value = "")]
2659 entities: String,
2660 #[arg(long)]
2662 entities_reviewed: bool,
2663 #[arg(long)]
2665 evidence_span: Vec<String>,
2666 #[arg(long)]
2668 gap: bool,
2669 #[arg(long)]
2671 negative_space: bool,
2672 #[arg(long)]
2674 doi: Option<String>,
2675 #[arg(long)]
2677 pmid: Option<String>,
2678 #[arg(long)]
2680 year: Option<i32>,
2681 #[arg(long)]
2683 journal: Option<String>,
2684 #[arg(long)]
2686 url: Option<String>,
2687 #[arg(long)]
2689 source_authors: Option<String>,
2690 #[arg(long)]
2692 conditions_text: Option<String>,
2693 #[arg(long)]
2695 species: Option<String>,
2696 #[arg(long)]
2698 in_vivo: bool,
2699 #[arg(long)]
2701 in_vitro: bool,
2702 #[arg(long)]
2704 human_data: bool,
2705 #[arg(long)]
2707 clinical_trial: bool,
2708 #[arg(long)]
2710 json: bool,
2711 #[arg(long)]
2713 apply: bool,
2714 },
2715 Supersede {
2722 frontier: PathBuf,
2724 old_id: String,
2726 #[arg(long)]
2728 assertion: String,
2729 #[arg(long, default_value = "mechanism")]
2731 r#type: String,
2732 #[arg(long, default_value = "manual finding")]
2734 source: String,
2735 #[arg(long, default_value = "expert_assertion")]
2737 source_type: String,
2738 #[arg(long)]
2740 author: String,
2741 #[arg(long)]
2743 reason: String,
2744 #[arg(long, default_value = "0.5")]
2746 confidence: f64,
2747 #[arg(long, default_value = "experimental")]
2749 evidence_type: String,
2750 #[arg(long, default_value = "")]
2752 entities: String,
2753 #[arg(long)]
2755 doi: Option<String>,
2756 #[arg(long)]
2758 pmid: Option<String>,
2759 #[arg(long)]
2761 year: Option<i32>,
2762 #[arg(long)]
2764 journal: Option<String>,
2765 #[arg(long)]
2767 url: Option<String>,
2768 #[arg(long)]
2770 source_authors: Option<String>,
2771 #[arg(long)]
2773 conditions_text: Option<String>,
2774 #[arg(long)]
2776 species: Option<String>,
2777 #[arg(long)]
2778 in_vivo: bool,
2779 #[arg(long)]
2780 in_vitro: bool,
2781 #[arg(long)]
2782 human_data: bool,
2783 #[arg(long)]
2784 clinical_trial: bool,
2785 #[arg(long)]
2786 json: bool,
2787 #[arg(long)]
2789 apply: bool,
2790 },
2791 CausalSet {
2797 frontier: PathBuf,
2799 finding_id: String,
2801 #[arg(long)]
2803 claim: String,
2804 #[arg(long)]
2807 grade: Option<String>,
2808 #[arg(long)]
2811 actor: String,
2812 #[arg(long)]
2815 reason: String,
2816 #[arg(long)]
2817 json: bool,
2818 },
2819}
2820
2821#[derive(Subcommand)]
2822enum ProposalAction {
2823 List {
2825 frontier: PathBuf,
2826 #[arg(long)]
2827 status: Option<String>,
2828 #[arg(long)]
2829 json: bool,
2830 },
2831 Show {
2833 frontier: PathBuf,
2834 proposal_id: String,
2835 #[arg(long)]
2836 json: bool,
2837 },
2838 Preview {
2840 frontier: PathBuf,
2841 proposal_id: String,
2842 #[arg(long, default_value = "reviewer:preview")]
2843 reviewer: String,
2844 #[arg(long)]
2845 json: bool,
2846 },
2847 Import {
2849 frontier: PathBuf,
2850 source: PathBuf,
2851 #[arg(long)]
2852 json: bool,
2853 },
2854 Validate {
2856 source: PathBuf,
2857 #[arg(long)]
2858 json: bool,
2859 },
2860 Export {
2862 frontier: PathBuf,
2863 output: PathBuf,
2864 #[arg(long)]
2865 status: Option<String>,
2866 #[arg(long)]
2867 json: bool,
2868 },
2869 Accept {
2871 frontier: PathBuf,
2872 proposal_id: String,
2873 #[arg(long)]
2874 reviewer: String,
2875 #[arg(long)]
2876 reason: String,
2877 #[arg(long)]
2878 json: bool,
2879 },
2880 Reject {
2882 frontier: PathBuf,
2883 proposal_id: String,
2884 #[arg(long)]
2885 reviewer: String,
2886 #[arg(long)]
2887 reason: String,
2888 #[arg(long)]
2889 json: bool,
2890 },
2891}
2892
2893#[derive(Subcommand)]
2894enum SourceAdapterAction {
2895 Run {
2897 frontier: PathBuf,
2899 adapter: String,
2901 #[arg(long)]
2903 actor: String,
2904 #[arg(long = "entry")]
2906 entries: Vec<String>,
2907 #[arg(long)]
2909 priority: Option<String>,
2910 #[arg(long)]
2912 include_excluded: bool,
2913 #[arg(long)]
2915 allow_partial: bool,
2916 #[arg(long)]
2918 dry_run: bool,
2919 #[arg(long)]
2921 input_dir: Option<PathBuf>,
2922 #[arg(long)]
2924 apply_artifacts: bool,
2925 #[arg(long)]
2927 json: bool,
2928 },
2929}
2930
2931#[derive(Subcommand)]
2932enum RuntimeAdapterAction {
2933 Run {
2935 frontier: PathBuf,
2937 adapter: String,
2939 #[arg(long)]
2941 input: PathBuf,
2942 #[arg(long)]
2944 actor: String,
2945 #[arg(long)]
2947 dry_run: bool,
2948 #[arg(long)]
2950 apply_artifacts: bool,
2951 #[arg(long)]
2953 json: bool,
2954 },
2955}
2956
2957#[derive(Subcommand)]
2958enum BridgeKitAction {
2959 Validate {
2961 source: PathBuf,
2963 #[arg(long)]
2965 json: bool,
2966 },
2967 VerifyProvenance {
2974 packet: PathBuf,
2976 #[arg(long)]
2978 json: bool,
2979 },
2980}
2981
2982pub async fn run_command() {
2983 dotenvy::dotenv().ok();
2984
2985 match Cli::parse().command {
2986 Commands::Scout {
2987 folder,
2988 frontier,
2989 backend,
2990 dry_run,
2991 json,
2992 } => {
2993 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
2994 }
2995 Commands::CompileNotes {
2996 vault,
2997 frontier,
2998 backend,
2999 max_files,
3000 max_items_per_category,
3001 dry_run,
3002 json,
3003 } => {
3004 cmd_compile_notes(
3005 &vault,
3006 &frontier,
3007 backend.as_deref(),
3008 max_files,
3009 max_items_per_category,
3010 dry_run,
3011 json,
3012 )
3013 .await;
3014 }
3015 Commands::CompileCode {
3016 root,
3017 frontier,
3018 backend,
3019 max_files,
3020 dry_run,
3021 json,
3022 } => {
3023 cmd_compile_code(
3024 &root,
3025 &frontier,
3026 backend.as_deref(),
3027 max_files,
3028 dry_run,
3029 json,
3030 )
3031 .await;
3032 }
3033 Commands::CompileData {
3034 root,
3035 frontier,
3036 backend,
3037 sample_rows,
3038 dry_run,
3039 json,
3040 } => {
3041 cmd_compile_data(
3042 &root,
3043 &frontier,
3044 backend.as_deref(),
3045 sample_rows,
3046 dry_run,
3047 json,
3048 )
3049 .await;
3050 }
3051 Commands::ReviewPending {
3052 frontier,
3053 backend,
3054 max_proposals,
3055 batch_size,
3056 dry_run,
3057 json,
3058 } => {
3059 cmd_review_pending(
3060 &frontier,
3061 backend.as_deref(),
3062 max_proposals,
3063 batch_size,
3064 dry_run,
3065 json,
3066 )
3067 .await;
3068 }
3069 Commands::FindTensions {
3070 frontier,
3071 backend,
3072 max_findings,
3073 dry_run,
3074 json,
3075 } => {
3076 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3077 }
3078 Commands::PlanExperiments {
3079 frontier,
3080 backend,
3081 max_findings,
3082 dry_run,
3083 json,
3084 } => {
3085 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3086 }
3087 Commands::Check {
3088 source,
3089 schema,
3090 stats,
3091 conformance,
3092 conformance_dir,
3093 all,
3094 schema_only,
3095 strict,
3096 fix,
3097 json,
3098 } => cmd_check(
3099 source.as_deref(),
3100 schema,
3101 stats,
3102 conformance,
3103 &conformance_dir,
3104 all,
3105 schema_only,
3106 strict,
3107 fix,
3108 json,
3109 ),
3110 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3111 Commands::Impact {
3112 frontier,
3113 finding_id,
3114 depth,
3115 json,
3116 } => cmd_impact(&frontier, &finding_id, depth, json),
3117 Commands::Discord {
3118 frontier,
3119 json,
3120 kind,
3121 } => cmd_discord(&frontier, json, kind.as_deref()),
3122 Commands::Normalize {
3123 source,
3124 out,
3125 write,
3126 dry_run,
3127 rewrite_ids,
3128 id_map,
3129 resync_provenance,
3130 json,
3131 } => cmd_normalize(
3132 &source,
3133 out.as_deref(),
3134 write,
3135 dry_run,
3136 rewrite_ids,
3137 id_map.as_deref(),
3138 resync_provenance,
3139 json,
3140 ),
3141 Commands::Proof {
3142 frontier,
3143 out,
3144 template,
3145 gold,
3146 record_proof_state,
3147 json,
3148 } => cmd_proof(
3149 &frontier,
3150 &out,
3151 &template,
3152 gold.as_deref(),
3153 record_proof_state,
3154 json,
3155 ),
3156 Commands::Repo { action } => cmd_repo(action),
3157 Commands::Serve {
3158 frontier,
3159 frontiers,
3160 backend,
3161 http,
3162 setup,
3163 check_tools,
3164 json,
3165 workbench,
3166 } => {
3167 if setup {
3168 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3169 } else if check_tools {
3170 let source =
3171 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3172 match serve::check_tools(source) {
3173 Ok(report) => {
3174 if json {
3175 println!(
3176 "{}",
3177 serde_json::to_string_pretty(&report)
3178 .expect("failed to serialize tool check report")
3179 );
3180 } else {
3181 print_tool_check_report(&report);
3182 }
3183 }
3184 Err(e) => fail(&format!("Tool check failed: {e}")),
3185 }
3186 } else {
3187 let source =
3188 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3189 let resolved_port = if workbench {
3191 Some(http.unwrap_or(3848))
3192 } else {
3193 http
3194 };
3195 if let Some(port) = resolved_port {
3196 serve::run_http(source, backend.as_deref(), port, workbench).await;
3197 } else {
3198 serve::run(source, backend.as_deref()).await;
3199 }
3200 }
3201 }
3202 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3203 Commands::Log {
3204 frontier,
3205 limit,
3206 kind,
3207 json,
3208 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3209 Commands::Inbox {
3210 frontier,
3211 kind,
3212 limit,
3213 json,
3214 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3215 Commands::Ask {
3216 frontier,
3217 question,
3218 json,
3219 } => cmd_ask(&frontier, &question.join(" "), json),
3220 Commands::Stats { frontier, json } => {
3221 if json {
3222 print_stats_json(&frontier);
3223 } else {
3224 cmd_stats(&frontier);
3225 }
3226 }
3227 Commands::Search {
3228 source,
3229 query,
3230 entity,
3231 r#type,
3232 all,
3233 limit,
3234 json,
3235 } => cmd_search(
3236 source.as_deref(),
3237 &query,
3238 entity.as_deref(),
3239 r#type.as_deref(),
3240 all.as_deref(),
3241 limit,
3242 json,
3243 ),
3244 Commands::Tensions {
3245 source,
3246 both_high,
3247 cross_domain,
3248 top,
3249 json,
3250 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3251 Commands::Gaps { action } => cmd_gaps(action),
3252 Commands::Bridge {
3253 inputs,
3254 novelty,
3255 top,
3256 } => cmd_bridge(&inputs, novelty, top).await,
3257 Commands::Export {
3258 frontier,
3259 format,
3260 output,
3261 } => export::run(&frontier, &format, output.as_deref()),
3262 Commands::Packet { action } => cmd_packet(action),
3263 Commands::Verify { path, json } => cmd_verify(&path, json),
3264 Commands::Bench {
3265 frontier,
3266 gold,
3267 candidate,
3268 sources,
3269 threshold,
3270 report,
3271 entity_gold,
3272 link_gold,
3273 suite,
3274 suite_ready,
3275 min_f1,
3276 min_precision,
3277 min_recall,
3278 no_thresholds,
3279 json,
3280 } => {
3281 if let Some(cand) = candidate.clone() {
3286 let Some(g) = gold.clone() else {
3287 eprintln!(
3288 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3289 style::err_prefix()
3290 );
3291 std::process::exit(2);
3292 };
3293 cmd_agent_bench(
3294 &g,
3295 &cand,
3296 sources.as_deref(),
3297 threshold,
3298 report.as_deref(),
3299 json,
3300 );
3301 } else {
3302 cmd_bench(BenchArgs {
3303 frontier,
3304 gold,
3305 entity_gold,
3306 link_gold,
3307 suite,
3308 suite_ready,
3309 min_f1,
3310 min_precision,
3311 min_recall,
3312 no_thresholds,
3313 json,
3314 });
3315 }
3316 }
3317 Commands::Conformance { dir } => {
3318 let _ = conformance::run(&dir);
3319 }
3320 Commands::Version => println!("vela 0.36.0"),
3321 Commands::Sign { action } => cmd_sign(action),
3322 Commands::Actor { action } => cmd_actor(action),
3323 Commands::Federation { action } => cmd_federation(action),
3324 Commands::Causal { action } => cmd_causal(action),
3325 Commands::Frontier { action } => cmd_frontier(action),
3326 Commands::Queue { action } => cmd_queue(action),
3327 Commands::Registry { action } => cmd_registry(action),
3328 Commands::Init {
3329 path,
3330 name,
3331 template,
3332 no_git,
3333 json,
3334 } => cmd_init(&path, &name, &template, !no_git, json),
3335 Commands::Quickstart {
3336 path,
3337 name,
3338 reviewer,
3339 assertion,
3340 keys_out,
3341 json,
3342 } => cmd_quickstart(
3343 &path,
3344 &name,
3345 &reviewer,
3346 assertion.as_deref(),
3347 keys_out.as_deref(),
3348 json,
3349 ),
3350 Commands::Lock { path, check, json } => cmd_lock(&path, check, json),
3351 Commands::Doc { path, out, json } => cmd_doc(&path, out.as_deref(), json),
3352 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3353 Commands::Diff {
3354 target,
3355 frontier_b,
3356 frontier,
3357 reviewer,
3358 json,
3359 quiet,
3360 } => {
3361 if target.starts_with("vpr_") {
3366 let frontier_root = frontier
3367 .clone()
3368 .or_else(|| frontier_b.clone())
3369 .unwrap_or_else(|| std::path::PathBuf::from("."));
3370 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3371 .unwrap_or_else(|e| fail_return(&e));
3372 let payload = json!({
3373 "ok": true,
3374 "command": "diff.proposal",
3375 "frontier": frontier_root.display().to_string(),
3376 "proposal_id": target,
3377 "preview": preview,
3378 });
3379 if json {
3380 println!(
3381 "{}",
3382 serde_json::to_string_pretty(&payload)
3383 .expect("failed to serialize diff preview")
3384 );
3385 } else {
3386 println!("vela diff · proposal preview");
3387 println!(" proposal: {}", target);
3388 println!(" kind: {}", preview.kind);
3389 println!(
3390 " findings: {} -> {}",
3391 preview.findings_before, preview.findings_after
3392 );
3393 println!(
3394 " artifacts: {} -> {}",
3395 preview.artifacts_before, preview.artifacts_after
3396 );
3397 println!(
3398 " events: {} -> {}",
3399 preview.events_before, preview.events_after
3400 );
3401 if !preview.changed_findings.is_empty() {
3402 println!(
3403 " findings changed: {}",
3404 preview.changed_findings.join(", ")
3405 );
3406 }
3407 }
3408 } else {
3409 let frontier_a = std::path::PathBuf::from(&target);
3410 let b = frontier_b.unwrap_or_else(|| {
3411 fail_return(
3412 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3413 )
3414 });
3415 diff::run(&frontier_a, &b, json, quiet);
3416 }
3417 }
3418 Commands::Proposals { action } => cmd_proposals(action),
3419 Commands::ArtifactToState {
3420 frontier,
3421 packet,
3422 actor,
3423 apply_artifacts,
3424 json,
3425 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3426 Commands::BridgeKit { action } => cmd_bridge_kit(action).await,
3427 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3428 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3429 Commands::Link { action } => cmd_link(action),
3430 Commands::Workbench {
3431 path,
3432 port,
3433 no_open,
3434 } => {
3435 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3436 fail(&e);
3437 }
3438 }
3439 Commands::Bridges { action } => cmd_bridges(action),
3440 Commands::Entity { action } => cmd_entity(action),
3441 Commands::Finding { command } => match command {
3442 FindingCommands::Add {
3443 frontier,
3444 assertion,
3445 r#type,
3446 source,
3447 source_type,
3448 author,
3449 confidence,
3450 evidence_type,
3451 entities,
3452 entities_reviewed,
3453 evidence_span,
3454 gap,
3455 negative_space,
3456 doi,
3457 pmid,
3458 year,
3459 journal,
3460 url,
3461 source_authors,
3462 conditions_text,
3463 species,
3464 in_vivo,
3465 in_vitro,
3466 human_data,
3467 clinical_trial,
3468 json,
3469 apply,
3470 } => {
3471 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3472 validate_enum_arg(
3473 "--evidence-type",
3474 &evidence_type,
3475 bundle::VALID_EVIDENCE_TYPES,
3476 );
3477 validate_enum_arg(
3478 "--source-type",
3479 &source_type,
3480 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3481 );
3482 let parsed_entities = parse_entities(&entities);
3483 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3484 for (name, etype) in &parsed_entities {
3485 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3486 fail(&format!(
3487 "invalid entity type '{}' for '{}'. Valid: {}",
3488 etype,
3489 name,
3490 bundle::VALID_ENTITY_TYPES.join(", "),
3491 ));
3492 }
3493 }
3494 let parsed_source_authors = source_authors
3495 .map(|s| {
3496 s.split(';')
3497 .map(|a| a.trim().to_string())
3498 .filter(|a| !a.is_empty())
3499 .collect()
3500 })
3501 .unwrap_or_default();
3502 let parsed_species = species
3503 .map(|s| {
3504 s.split(';')
3505 .map(|a| a.trim().to_string())
3506 .filter(|a| !a.is_empty())
3507 .collect()
3508 })
3509 .unwrap_or_default();
3510 let report = state::add_finding(
3511 &frontier,
3512 state::FindingDraftOptions {
3513 text: assertion,
3514 assertion_type: r#type,
3515 source,
3516 source_type,
3517 author,
3518 confidence,
3519 evidence_type,
3520 entities: parsed_entities,
3521 doi,
3522 pmid,
3523 year,
3524 journal,
3525 url,
3526 source_authors: parsed_source_authors,
3527 conditions_text,
3528 species: parsed_species,
3529 in_vivo,
3530 in_vitro,
3531 human_data,
3532 clinical_trial,
3533 entities_reviewed,
3534 evidence_spans: parsed_evidence_spans,
3535 gap,
3536 negative_space,
3537 },
3538 apply,
3539 )
3540 .unwrap_or_else(|e| fail_return(&e));
3541 print_state_report(&report, json);
3542 }
3543 FindingCommands::Supersede {
3544 frontier,
3545 old_id,
3546 assertion,
3547 r#type,
3548 source,
3549 source_type,
3550 author,
3551 reason,
3552 confidence,
3553 evidence_type,
3554 entities,
3555 doi,
3556 pmid,
3557 year,
3558 journal,
3559 url,
3560 source_authors,
3561 conditions_text,
3562 species,
3563 in_vivo,
3564 in_vitro,
3565 human_data,
3566 clinical_trial,
3567 json,
3568 apply,
3569 } => {
3570 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3571 validate_enum_arg(
3572 "--evidence-type",
3573 &evidence_type,
3574 bundle::VALID_EVIDENCE_TYPES,
3575 );
3576 validate_enum_arg(
3577 "--source-type",
3578 &source_type,
3579 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3580 );
3581 let parsed_entities = parse_entities(&entities);
3582 for (name, etype) in &parsed_entities {
3583 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3584 fail(&format!(
3585 "invalid entity type '{}' for '{}'. Valid: {}",
3586 etype,
3587 name,
3588 bundle::VALID_ENTITY_TYPES.join(", "),
3589 ));
3590 }
3591 }
3592 let parsed_source_authors = source_authors
3593 .map(|s| {
3594 s.split(';')
3595 .map(|a| a.trim().to_string())
3596 .filter(|a| !a.is_empty())
3597 .collect()
3598 })
3599 .unwrap_or_default();
3600 let parsed_species = species
3601 .map(|s| {
3602 s.split(';')
3603 .map(|a| a.trim().to_string())
3604 .filter(|a| !a.is_empty())
3605 .collect()
3606 })
3607 .unwrap_or_default();
3608 let report = state::supersede_finding(
3609 &frontier,
3610 &old_id,
3611 &reason,
3612 state::FindingDraftOptions {
3613 text: assertion,
3614 assertion_type: r#type,
3615 source,
3616 source_type,
3617 author,
3618 confidence,
3619 evidence_type,
3620 entities: parsed_entities,
3621 doi,
3622 pmid,
3623 year,
3624 journal,
3625 url,
3626 source_authors: parsed_source_authors,
3627 conditions_text,
3628 species: parsed_species,
3629 in_vivo,
3630 in_vitro,
3631 human_data,
3632 clinical_trial,
3633 entities_reviewed: false,
3634 evidence_spans: Vec::new(),
3635 gap: false,
3636 negative_space: false,
3637 },
3638 apply,
3639 )
3640 .unwrap_or_else(|e| fail_return(&e));
3641 print_state_report(&report, json);
3642 }
3643 FindingCommands::CausalSet {
3644 frontier,
3645 finding_id,
3646 claim,
3647 grade,
3648 actor,
3649 reason,
3650 json,
3651 } => {
3652 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3653 fail(&format!(
3654 "invalid --claim '{claim}'; valid: {:?}",
3655 bundle::VALID_CAUSAL_CLAIMS
3656 ));
3657 }
3658 if let Some(g) = grade.as_deref()
3659 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3660 {
3661 fail(&format!(
3662 "invalid --grade '{g}'; valid: {:?}",
3663 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3664 ));
3665 }
3666 let report = state::set_causal(
3667 &frontier,
3668 &finding_id,
3669 &claim,
3670 grade.as_deref(),
3671 &actor,
3672 &reason,
3673 )
3674 .unwrap_or_else(|e| fail_return(&e));
3675 print_state_report(&report, json);
3676 }
3677 },
3678 Commands::Review {
3679 frontier,
3680 finding_id,
3681 status,
3682 reason,
3683 reviewer,
3684 apply,
3685 json,
3686 } => {
3687 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3688 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3689 let report = state::review_finding(
3690 &frontier,
3691 &finding_id,
3692 state::ReviewOptions {
3693 status,
3694 reason,
3695 reviewer,
3696 },
3697 apply,
3698 )
3699 .unwrap_or_else(|e| fail_return(&e));
3700 print_state_report(&report, json);
3701 }
3702 Commands::Note {
3703 frontier,
3704 finding_id,
3705 text,
3706 author,
3707 apply,
3708 json,
3709 } => {
3710 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3711 .unwrap_or_else(|e| fail_return(&e));
3712 print_state_report(&report, json);
3713 }
3714 Commands::Caveat {
3715 frontier,
3716 finding_id,
3717 text,
3718 author,
3719 apply,
3720 json,
3721 } => {
3722 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3723 .unwrap_or_else(|e| fail_return(&e));
3724 print_state_report(&report, json);
3725 }
3726 Commands::Revise {
3727 frontier,
3728 finding_id,
3729 confidence,
3730 reason,
3731 reviewer,
3732 apply,
3733 json,
3734 } => {
3735 let report = state::revise_confidence(
3736 &frontier,
3737 &finding_id,
3738 state::ReviseOptions {
3739 confidence,
3740 reason,
3741 reviewer,
3742 },
3743 apply,
3744 )
3745 .unwrap_or_else(|e| fail_return(&e));
3746 print_state_report(&report, json);
3747 }
3748 Commands::Reject {
3749 frontier,
3750 finding_id,
3751 reason,
3752 reviewer,
3753 apply,
3754 json,
3755 } => {
3756 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3757 .unwrap_or_else(|e| fail_return(&e));
3758 print_state_report(&report, json);
3759 }
3760 Commands::History {
3761 frontier,
3762 finding_id,
3763 json,
3764 as_of,
3765 } => {
3766 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3767 .unwrap_or_else(|e| fail_return(&e));
3768 if json {
3769 println!(
3770 "{}",
3771 serde_json::to_string_pretty(&payload)
3772 .expect("failed to serialize history response")
3773 );
3774 } else {
3775 print_history(&payload);
3776 }
3777 }
3778 Commands::ImportEvents { source, into, json } => {
3779 let report =
3780 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3781 if json {
3782 println!(
3783 "{}",
3784 serde_json::to_string_pretty(&json!({
3785 "ok": true,
3786 "command": "import-events",
3787 "source": report.source,
3788 "target": into.display().to_string(),
3789 "summary": {
3790 "imported": report.imported,
3791 "new": report.new,
3792 "duplicate": report.duplicate,
3793 "canonical_events_imported": report.events_imported,
3794 "canonical_events_new": report.events_new,
3795 "canonical_events_duplicate": report.events_duplicate,
3796 }
3797 }))
3798 .expect("failed to serialize import-events response")
3799 );
3800 } else {
3801 println!("{report}");
3802 }
3803 }
3804 Commands::Retract {
3805 source,
3806 finding_id,
3807 reason,
3808 reviewer,
3809 apply,
3810 json,
3811 } => {
3812 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3813 .unwrap_or_else(|e| fail_return(&e));
3814 print_state_report(&report, json);
3815 }
3816 Commands::LocatorRepair {
3817 frontier,
3818 atom_id,
3819 locator,
3820 reviewer,
3821 reason,
3822 apply,
3823 json,
3824 } => {
3825 cmd_locator_repair(
3826 &frontier,
3827 &atom_id,
3828 locator.as_deref(),
3829 &reviewer,
3830 &reason,
3831 apply,
3832 json,
3833 );
3834 }
3835 Commands::SourceFetch {
3836 identifier,
3837 cache,
3838 out,
3839 refresh,
3840 json,
3841 } => {
3842 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3843 }
3844 Commands::SpanRepair {
3845 frontier,
3846 finding_id,
3847 section,
3848 text,
3849 reviewer,
3850 reason,
3851 apply,
3852 json,
3853 } => {
3854 cmd_span_repair(
3855 &frontier,
3856 &finding_id,
3857 §ion,
3858 &text,
3859 &reviewer,
3860 &reason,
3861 apply,
3862 json,
3863 );
3864 }
3865 Commands::EntityAdd {
3866 frontier,
3867 finding_id,
3868 entity,
3869 entity_type,
3870 reviewer,
3871 reason,
3872 apply,
3873 json,
3874 } => {
3875 let report = state::add_finding_entity(
3876 &frontier,
3877 &finding_id,
3878 &entity,
3879 &entity_type,
3880 &reviewer,
3881 &reason,
3882 apply,
3883 )
3884 .unwrap_or_else(|e| fail_return(&e));
3885 print_state_report(&report, json);
3886 }
3887 Commands::EntityResolve {
3888 frontier,
3889 finding_id,
3890 entity,
3891 source,
3892 id,
3893 confidence,
3894 matched_name,
3895 resolution_method,
3896 reviewer,
3897 reason,
3898 apply,
3899 json,
3900 } => {
3901 cmd_entity_resolve(
3902 &frontier,
3903 &finding_id,
3904 &entity,
3905 &source,
3906 &id,
3907 confidence,
3908 matched_name.as_deref(),
3909 &resolution_method,
3910 &reviewer,
3911 &reason,
3912 apply,
3913 json,
3914 );
3915 }
3916 Commands::Propagate {
3917 frontier,
3918 retract,
3919 reduce_confidence,
3920 to,
3921 output,
3922 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3923 Commands::Replicate {
3924 frontier,
3925 target,
3926 outcome,
3927 by,
3928 conditions,
3929 source_title,
3930 doi,
3931 pmid,
3932 sample_size,
3933 note,
3934 previous_attempt,
3935 no_cascade,
3936 json,
3937 } => cmd_replicate(
3938 &frontier,
3939 &target,
3940 &outcome,
3941 &by,
3942 &conditions,
3943 &source_title,
3944 doi.as_deref(),
3945 pmid.as_deref(),
3946 sample_size.as_deref(),
3947 ¬e,
3948 previous_attempt.as_deref(),
3949 no_cascade,
3950 json,
3951 ),
3952 Commands::Replications {
3953 frontier,
3954 target,
3955 json,
3956 } => cmd_replications(&frontier, target.as_deref(), json),
3957 Commands::DatasetAdd {
3958 frontier,
3959 name,
3960 version,
3961 content_hash,
3962 url,
3963 license,
3964 source_title,
3965 doi,
3966 row_count,
3967 json,
3968 } => cmd_dataset_add(
3969 &frontier,
3970 &name,
3971 version.as_deref(),
3972 &content_hash,
3973 url.as_deref(),
3974 license.as_deref(),
3975 &source_title,
3976 doi.as_deref(),
3977 row_count,
3978 json,
3979 ),
3980 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
3981 Commands::CodeAdd {
3982 frontier,
3983 language,
3984 repo_url,
3985 commit,
3986 path,
3987 content_hash,
3988 line_start,
3989 line_end,
3990 entry_point,
3991 json,
3992 } => cmd_code_add(
3993 &frontier,
3994 &language,
3995 repo_url.as_deref(),
3996 commit.as_deref(),
3997 &path,
3998 &content_hash,
3999 line_start,
4000 line_end,
4001 entry_point.as_deref(),
4002 json,
4003 ),
4004 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
4005 Commands::ArtifactAdd {
4006 frontier,
4007 kind,
4008 name,
4009 file,
4010 url,
4011 content_hash,
4012 media_type,
4013 license,
4014 source_title,
4015 source_url,
4016 doi,
4017 target,
4018 metadata,
4019 access_tier,
4020 deposited_by,
4021 reason,
4022 json,
4023 } => cmd_artifact_add(
4024 &frontier,
4025 &kind,
4026 &name,
4027 file.as_deref(),
4028 url.as_deref(),
4029 content_hash.as_deref(),
4030 media_type.as_deref(),
4031 license.as_deref(),
4032 source_title.as_deref(),
4033 source_url.as_deref(),
4034 doi.as_deref(),
4035 target,
4036 metadata,
4037 &access_tier,
4038 &deposited_by,
4039 &reason,
4040 json,
4041 ),
4042 Commands::Artifacts {
4043 frontier,
4044 target,
4045 json,
4046 } => cmd_artifacts(&frontier, target.as_deref(), json),
4047 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
4048 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
4049 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
4050 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
4051 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
4052 Commands::ClinicalTrialImport {
4053 frontier,
4054 nct_id,
4055 input_json,
4056 target,
4057 deposited_by,
4058 reason,
4059 license,
4060 json,
4061 } => {
4062 cmd_clinical_trial_import(
4063 &frontier,
4064 &nct_id,
4065 input_json.as_deref(),
4066 target,
4067 &deposited_by,
4068 &reason,
4069 &license,
4070 json,
4071 )
4072 .await
4073 }
4074 Commands::NegativeResultAdd {
4075 frontier,
4076 kind,
4077 deposited_by,
4078 reason,
4079 conditions_text,
4080 notes,
4081 target,
4082 endpoint,
4083 intervention,
4084 comparator,
4085 population,
4086 n_enrolled,
4087 power,
4088 ci_lower,
4089 ci_upper,
4090 effect_size_threshold,
4091 registry_id,
4092 reagent,
4093 observation,
4094 attempts,
4095 source_title,
4096 doi,
4097 url,
4098 year,
4099 json,
4100 } => cmd_negative_result_add(
4101 &frontier,
4102 &kind,
4103 &deposited_by,
4104 &reason,
4105 &conditions_text,
4106 ¬es,
4107 target,
4108 endpoint.as_deref(),
4109 intervention.as_deref(),
4110 comparator.as_deref(),
4111 population.as_deref(),
4112 n_enrolled,
4113 power,
4114 ci_lower,
4115 ci_upper,
4116 effect_size_threshold,
4117 registry_id.as_deref(),
4118 reagent.as_deref(),
4119 observation.as_deref(),
4120 attempts,
4121 &source_title,
4122 doi.as_deref(),
4123 url.as_deref(),
4124 year,
4125 json,
4126 ),
4127 Commands::NegativeResults {
4128 frontier,
4129 target,
4130 json,
4131 } => cmd_negative_results(&frontier, target.as_deref(), json),
4132 Commands::TrajectoryCreate {
4133 frontier,
4134 deposited_by,
4135 reason,
4136 target,
4137 notes,
4138 json,
4139 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4140 Commands::TrajectoryStep {
4141 frontier,
4142 trajectory_id,
4143 kind,
4144 description,
4145 actor,
4146 reason,
4147 reference,
4148 json,
4149 } => cmd_trajectory_step(
4150 &frontier,
4151 &trajectory_id,
4152 &kind,
4153 &description,
4154 &actor,
4155 &reason,
4156 reference,
4157 json,
4158 ),
4159 Commands::Trajectories {
4160 frontier,
4161 target,
4162 json,
4163 } => cmd_trajectories(&frontier, target.as_deref(), json),
4164 Commands::TierSet {
4165 frontier,
4166 object_type,
4167 object_id,
4168 tier,
4169 actor,
4170 reason,
4171 json,
4172 } => cmd_tier_set(
4173 &frontier,
4174 &object_type,
4175 &object_id,
4176 &tier,
4177 &actor,
4178 &reason,
4179 json,
4180 ),
4181 Commands::Predict {
4182 frontier,
4183 by,
4184 claim,
4185 criterion,
4186 resolves_by,
4187 confidence,
4188 target,
4189 outcome,
4190 conditions,
4191 json,
4192 } => cmd_predict(
4193 &frontier,
4194 &by,
4195 &claim,
4196 &criterion,
4197 resolves_by.as_deref(),
4198 confidence,
4199 &target,
4200 &outcome,
4201 &conditions,
4202 json,
4203 ),
4204 Commands::Resolve {
4205 frontier,
4206 prediction,
4207 outcome,
4208 matched,
4209 by,
4210 confidence,
4211 source_title,
4212 doi,
4213 json,
4214 } => cmd_resolve(
4215 &frontier,
4216 &prediction,
4217 &outcome,
4218 matched,
4219 &by,
4220 confidence,
4221 &source_title,
4222 doi.as_deref(),
4223 json,
4224 ),
4225 Commands::Predictions {
4226 frontier,
4227 by,
4228 open,
4229 json,
4230 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4231 Commands::Calibration {
4232 frontier,
4233 actor,
4234 json,
4235 } => cmd_calibration(&frontier, actor.as_deref(), json),
4236 Commands::PredictionsExpire {
4237 frontier,
4238 now,
4239 dry_run,
4240 json,
4241 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4242 Commands::Consensus {
4243 frontier,
4244 target,
4245 weighting,
4246 causal_claim,
4247 causal_grade_min,
4248 json,
4249 } => cmd_consensus(
4250 &frontier,
4251 &target,
4252 &weighting,
4253 causal_claim.as_deref(),
4254 causal_grade_min.as_deref(),
4255 json,
4256 ),
4257
4258 Commands::Ingest {
4261 path,
4262 frontier,
4263 backend,
4264 actor,
4265 dry_run,
4266 json,
4267 } => {
4268 cmd_ingest(
4269 &path,
4270 &frontier,
4271 backend.as_deref(),
4272 actor.as_deref(),
4273 dry_run,
4274 json,
4275 )
4276 .await
4277 }
4278
4279 Commands::Propose {
4280 frontier,
4281 finding_id,
4282 status,
4283 reason,
4284 reviewer,
4285 apply,
4286 json,
4287 } => {
4288 let options = state::ReviewOptions {
4291 status: status.clone(),
4292 reason: reason.clone(),
4293 reviewer: reviewer.clone(),
4294 };
4295 let report = state::review_finding(&frontier, &finding_id, options, apply)
4296 .unwrap_or_else(|e| fail_return(&e));
4297 print_state_report(&report, json);
4298 }
4299
4300 Commands::Accept {
4301 frontier,
4302 proposal_id,
4303 reviewer,
4304 reason,
4305 json,
4306 } => {
4307 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4309 .unwrap_or_else(|e| fail_return(&e));
4310 let payload = json!({
4311 "ok": true,
4312 "command": "accept",
4313 "frontier": frontier.display().to_string(),
4314 "proposal_id": proposal_id,
4315 "reviewer": reviewer,
4316 "applied_event_id": event_id,
4317 });
4318 if json {
4319 println!(
4320 "{}",
4321 serde_json::to_string_pretty(&payload)
4322 .expect("failed to serialize accept response")
4323 );
4324 } else {
4325 println!(
4326 "{} accepted and applied proposal {}",
4327 style::ok("ok"),
4328 proposal_id
4329 );
4330 println!(" event: {}", event_id);
4331 }
4332 }
4333
4334 Commands::Attest {
4335 frontier,
4336 event,
4337 attester,
4338 scope_note,
4339 proof_id,
4340 signature,
4341 key,
4342 json,
4343 } => {
4344 if let Some(target_event_id) = event {
4348 let attester_id = attester.unwrap_or_else(|| {
4349 fail_return("attest: --attester is required in per-event mode")
4350 });
4351 let scope = scope_note.unwrap_or_else(|| {
4352 fail_return("attest: --scope-note is required in per-event mode")
4353 });
4354 let attestation_event_id = state::record_attestation(
4355 &frontier,
4356 &target_event_id,
4357 &attester_id,
4358 &scope,
4359 proof_id.as_deref(),
4360 signature.as_deref(),
4361 )
4362 .unwrap_or_else(|e| fail_return(&e));
4363 if json {
4364 let payload = json!({
4365 "ok": true,
4366 "command": "attest.event",
4367 "frontier": frontier.display().to_string(),
4368 "target_event_id": target_event_id,
4369 "attestation_event_id": attestation_event_id,
4370 "attester_id": attester_id,
4371 });
4372 println!(
4373 "{}",
4374 serde_json::to_string_pretty(&payload)
4375 .expect("failed to serialize attest.event response")
4376 );
4377 } else {
4378 println!(
4379 "{} attested {} by {} ({})",
4380 style::ok("ok"),
4381 target_event_id,
4382 attester_id,
4383 attestation_event_id
4384 );
4385 }
4386 return;
4387 }
4388 let key_path = key.unwrap_or_else(|| {
4390 fail_return(
4391 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
4392 )
4393 });
4394 let count =
4395 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
4396 let payload = json!({
4397 "ok": true,
4398 "command": "attest",
4399 "frontier": frontier.display().to_string(),
4400 "private_key": key_path.display().to_string(),
4401 "signed": count,
4402 });
4403 if json {
4404 println!(
4405 "{}",
4406 serde_json::to_string_pretty(&payload)
4407 .expect("failed to serialize attest response")
4408 );
4409 } else {
4410 println!(
4411 "{} {count} findings in {}",
4412 style::ok("attested"),
4413 frontier.display()
4414 );
4415 }
4416 }
4417
4418 Commands::Lineage {
4419 frontier,
4420 finding_id,
4421 as_of,
4422 json,
4423 } => {
4424 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4426 .unwrap_or_else(|e| fail_return(&e));
4427 if json {
4428 println!(
4429 "{}",
4430 serde_json::to_string_pretty(&payload)
4431 .expect("failed to serialize lineage response")
4432 );
4433 } else {
4434 print_history(&payload);
4435 }
4436 }
4437
4438 Commands::Carina { action } => cmd_carina(action),
4439
4440 Commands::Atlas { action } => cmd_atlas(action).await,
4441
4442 Commands::Constellation { action } => cmd_constellation(action).await,
4443 }
4444}
4445
4446async fn cmd_atlas(action: AtlasAction) {
4451 match action {
4452 AtlasAction::Init {
4453 name,
4454 frontiers,
4455 domain,
4456 scope_note,
4457 atlases_root,
4458 json,
4459 } => match ATLAS_INIT_HANDLER.get() {
4460 Some(handler) => {
4461 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
4462 }
4463 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
4464 },
4465 AtlasAction::Materialize {
4466 name,
4467 atlases_root,
4468 json,
4469 } => match ATLAS_MATERIALIZE_HANDLER.get() {
4470 Some(handler) => handler(atlases_root, name, json).await,
4471 None => fail("vela atlas materialize: handler not registered"),
4472 },
4473 AtlasAction::Serve {
4474 name,
4475 atlases_root,
4476 port,
4477 no_open,
4478 } => {
4479 match ATLAS_SERVE_HANDLER.get() {
4483 Some(handler) => handler(atlases_root, name, port, !no_open).await,
4484 None => fail("vela atlas serve: handler not registered"),
4485 }
4486 }
4487 AtlasAction::Update {
4488 name,
4489 add_frontier,
4490 remove_vfr_id,
4491 atlases_root,
4492 json,
4493 } => match ATLAS_UPDATE_HANDLER.get() {
4494 Some(handler) => {
4495 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
4496 }
4497 None => fail("vela atlas update: handler not registered"),
4498 },
4499 }
4500}
4501
4502async fn cmd_constellation(action: ConstellationAction) {
4506 match action {
4507 ConstellationAction::Init {
4508 name,
4509 atlases,
4510 scope_note,
4511 constellations_root,
4512 json,
4513 } => match CONSTELLATION_INIT_HANDLER.get() {
4514 Some(handler) => {
4515 handler(constellations_root, name, scope_note, atlases, json).await;
4516 }
4517 None => fail(
4518 "vela constellation init: handler not registered (built without vela-constellation)",
4519 ),
4520 },
4521 ConstellationAction::Materialize {
4522 name,
4523 constellations_root,
4524 json,
4525 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
4526 Some(handler) => handler(constellations_root, name, json).await,
4527 None => fail("vela constellation materialize: handler not registered"),
4528 },
4529 ConstellationAction::Serve {
4530 name,
4531 constellations_root,
4532 port,
4533 no_open,
4534 } => match CONSTELLATION_SERVE_HANDLER.get() {
4535 Some(handler) => handler(constellations_root, name, port, !no_open).await,
4536 None => fail("vela constellation serve: handler not registered"),
4537 },
4538 }
4539}
4540
4541fn cmd_carina(action: CarinaAction) {
4544 match action {
4545 CarinaAction::List { json } => {
4546 if json {
4547 println!(
4548 "{}",
4549 serde_json::to_string_pretty(&json!({
4550 "ok": true,
4551 "command": "carina.list",
4552 "primitives": carina_validate::PRIMITIVE_NAMES,
4553 }))
4554 .expect("failed to serialize carina.list")
4555 );
4556 } else {
4557 println!("Carina primitives bundled with this build:");
4558 for name in carina_validate::PRIMITIVE_NAMES {
4559 println!(" · {name}");
4560 }
4561 }
4562 }
4563 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4564 Some(text) => print!("{text}"),
4565 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4566 },
4567 CarinaAction::Validate {
4568 path,
4569 primitive,
4570 json,
4571 } => {
4572 let text = std::fs::read_to_string(&path)
4573 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4574 let value: Value = serde_json::from_str(&text)
4575 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4576 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4582 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4583 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4584 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4585 for (key, child) in primitives {
4586 let outcome = carina_validate::validate(key, child)
4587 .map(|()| carina_validate::detect_primitive(child));
4588 report.push((key.clone(), outcome));
4589 }
4590 } else {
4591 let outcome = match primitive.as_deref() {
4592 Some(name) => carina_validate::validate(name, &value).map(|()| {
4593 carina_validate::PRIMITIVE_NAMES
4594 .iter()
4595 .copied()
4596 .find(|p| *p == name)
4597 }),
4598 None => carina_validate::validate_auto(&value).map(Some),
4599 };
4600 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4601 report.push((label, outcome));
4602 }
4603
4604 let total = report.len();
4605 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4606 let fail = total - pass;
4607
4608 if json {
4609 let entries: Vec<Value> = report
4610 .iter()
4611 .map(|(label, r)| match r {
4612 Ok(name) => json!({
4613 "key": label,
4614 "primitive": name,
4615 "ok": true,
4616 }),
4617 Err(errs) => json!({
4618 "key": label,
4619 "ok": false,
4620 "errors": errs,
4621 }),
4622 })
4623 .collect();
4624 println!(
4625 "{}",
4626 serde_json::to_string_pretty(&json!({
4627 "ok": fail == 0,
4628 "command": "carina.validate",
4629 "file": path.display().to_string(),
4630 "total": total,
4631 "passed": pass,
4632 "failed": fail,
4633 "entries": entries,
4634 }))
4635 .expect("failed to serialize carina.validate")
4636 );
4637 } else {
4638 for (label, r) in &report {
4639 match r {
4640 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4641 Ok(None) => println!(" {} {label}", style::ok("ok")),
4642 Err(errs) => {
4643 println!(" {} {label}", style::lost("fail"));
4644 for e in errs {
4645 println!(" {e}");
4646 }
4647 }
4648 }
4649 }
4650 println!();
4651 if fail == 0 {
4652 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4653 } else {
4654 println!(
4655 "{} {pass}/{total} valid · {fail} failed",
4656 style::lost("carina.validate")
4657 );
4658 }
4659 }
4660
4661 if fail > 0 {
4662 std::process::exit(1);
4663 }
4664 }
4665 }
4666}
4667
4668fn cmd_consensus(
4671 frontier: &Path,
4672 target: &str,
4673 weighting_str: &str,
4674 causal_claim: Option<&str>,
4675 causal_grade_min: Option<&str>,
4676 json: bool,
4677) {
4678 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4679
4680 if !target.starts_with("vf_") {
4681 fail(&format!("target `{target}` is not a vf_ finding id"));
4682 }
4683 let scheme =
4684 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4685
4686 let parsed_claim = match causal_claim {
4687 None => None,
4688 Some("correlation") => Some(CausalClaim::Correlation),
4689 Some("mediation") => Some(CausalClaim::Mediation),
4690 Some("intervention") => Some(CausalClaim::Intervention),
4691 Some(other) => fail_return(&format!(
4692 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4693 )),
4694 };
4695 let parsed_grade = match causal_grade_min {
4696 None => None,
4697 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4698 Some("observational") => Some(CausalEvidenceGrade::Observational),
4699 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4700 Some("rct") => Some(CausalEvidenceGrade::Rct),
4701 Some(other) => fail_return(&format!(
4702 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4703 )),
4704 };
4705 let filter = crate::aggregate::AggregateFilter {
4706 causal_claim: parsed_claim,
4707 causal_grade_min: parsed_grade,
4708 };
4709 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4710
4711 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4712 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4713
4714 if json {
4715 println!(
4716 "{}",
4717 serde_json::to_string_pretty(&result).expect("serialize consensus")
4718 );
4719 return;
4720 }
4721
4722 println!();
4723 println!(
4724 " {}",
4725 format!(
4726 "VELA · CONSENSUS · {} ({})",
4727 result.target, result.weighting
4728 )
4729 .to_uppercase()
4730 .dimmed()
4731 );
4732 println!(" {}", style::tick_row(60));
4733 println!(
4734 " target: {}",
4735 truncate(&result.target_assertion, 80)
4736 );
4737 println!(" similar findings: {}", result.n_findings);
4738 println!(
4739 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
4740 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
4741 );
4742 println!();
4743 println!(" constituents (sorted by weight):");
4744 let mut sorted = result.constituents.clone();
4745 sorted.sort_by(|a, b| {
4746 b.weight
4747 .partial_cmp(&a.weight)
4748 .unwrap_or(std::cmp::Ordering::Equal)
4749 });
4750 for c in sorted.iter().take(10) {
4751 let repls = if c.n_replications > 0 {
4752 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
4753 } else {
4754 String::new()
4755 };
4756 println!(
4757 " · w={:.2} raw={:.2} adj={:.2}{}",
4758 c.weight, c.raw_score, c.adjusted_score, repls
4759 );
4760 println!(" {}", truncate(&c.assertion_text, 88));
4761 }
4762 if result.constituents.len() > 10 {
4763 println!(" ... ({} more)", result.constituents.len() - 10);
4764 }
4765}
4766
4767fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
4773 let trimmed = s.trim();
4774 if trimmed.eq_ignore_ascii_case("affirmed") {
4775 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
4776 }
4777 if trimmed.eq_ignore_ascii_case("falsified") {
4778 return Ok(crate::bundle::ExpectedOutcome::Falsified);
4779 }
4780 if let Some(rest) = trimmed.strip_prefix("cat:") {
4781 return Ok(crate::bundle::ExpectedOutcome::Categorical {
4782 value: rest.to_string(),
4783 });
4784 }
4785 if let Some(rest) = trimmed.strip_prefix("quant:") {
4786 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
4787 let (val_s, tol_s) = vt
4788 .split_once('±')
4789 .or_else(|| vt.split_once("+/-"))
4790 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
4791 let value: f64 = val_s
4792 .parse()
4793 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
4794 let tolerance: f64 = tol_s
4795 .parse()
4796 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
4797 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
4798 value,
4799 tolerance,
4800 units: units.to_string(),
4801 });
4802 }
4803 Err(format!(
4804 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
4805 ))
4806}
4807
4808#[allow(clippy::too_many_arguments)]
4810fn cmd_predict(
4811 frontier: &Path,
4812 by: &str,
4813 claim: &str,
4814 criterion: &str,
4815 resolves_by: Option<&str>,
4816 confidence: f64,
4817 target_csv: &str,
4818 outcome: &str,
4819 conditions_text: &str,
4820 json: bool,
4821) {
4822 if !(0.0..=1.0).contains(&confidence) {
4823 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4824 }
4825 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
4826
4827 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4828
4829 let targets: Vec<String> = target_csv
4830 .split(',')
4831 .map(|s| s.trim().to_string())
4832 .filter(|s| !s.is_empty())
4833 .collect();
4834 for t in &targets {
4835 if !t.starts_with("vf_") {
4836 fail(&format!("target `{t}` is not a vf_ id"));
4837 }
4838 if !project.findings.iter().any(|f| f.id == *t) {
4839 fail(&format!("target `{t}` not present in frontier"));
4840 }
4841 }
4842
4843 let lower = conditions_text.to_lowercase();
4844 let conditions = crate::bundle::Conditions {
4845 text: conditions_text.to_string(),
4846 species_verified: Vec::new(),
4847 species_unverified: Vec::new(),
4848 in_vitro: lower.contains("in vitro"),
4849 in_vivo: lower.contains("in vivo"),
4850 human_data: lower.contains("human") || lower.contains("clinical"),
4851 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
4852 concentration_range: None,
4853 duration: None,
4854 age_group: None,
4855 cell_type: None,
4856 };
4857
4858 let prediction = crate::bundle::Prediction::new(
4859 claim.to_string(),
4860 targets,
4861 None,
4862 resolves_by.map(|s| s.to_string()),
4863 criterion.to_string(),
4864 expected,
4865 by.to_string(),
4866 confidence,
4867 conditions,
4868 );
4869
4870 if project.predictions.iter().any(|p| p.id == prediction.id) {
4871 if json {
4872 println!(
4873 "{}",
4874 serde_json::to_string_pretty(&json!({
4875 "ok": false,
4876 "command": "predict",
4877 "reason": "prediction_already_exists",
4878 "id": prediction.id,
4879 }))
4880 .expect("serialize")
4881 );
4882 } else {
4883 println!(
4884 "{} prediction {} already exists in {}; skipping.",
4885 style::warn("predict"),
4886 prediction.id,
4887 frontier.display()
4888 );
4889 }
4890 return;
4891 }
4892
4893 let new_id = prediction.id.clone();
4894 project.predictions.push(prediction);
4895 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4896
4897 if json {
4898 println!(
4899 "{}",
4900 serde_json::to_string_pretty(&json!({
4901 "ok": true,
4902 "command": "predict",
4903 "id": new_id,
4904 "made_by": by,
4905 "confidence": confidence,
4906 "frontier": frontier.display().to_string(),
4907 }))
4908 .expect("serialize predict result")
4909 );
4910 } else {
4911 println!();
4912 println!(
4913 " {}",
4914 format!("VELA · PREDICT · {}", new_id)
4915 .to_uppercase()
4916 .dimmed()
4917 );
4918 println!(" {}", style::tick_row(60));
4919 println!(" by: {by}");
4920 println!(" confidence: {confidence:.3}");
4921 if let Some(d) = resolves_by {
4922 println!(" resolves by: {d}");
4923 }
4924 println!(" outcome: {outcome}");
4925 println!(" claim: {}", truncate(claim, 88));
4926 println!();
4927 println!(
4928 " {} prediction recorded in {}",
4929 style::ok("ok"),
4930 frontier.display()
4931 );
4932 }
4933}
4934
4935#[allow(clippy::too_many_arguments)]
4937fn cmd_resolve(
4938 frontier: &Path,
4939 prediction_id: &str,
4940 actual_outcome: &str,
4941 matched: bool,
4942 by: &str,
4943 confidence: f64,
4944 source_title: &str,
4945 doi: Option<&str>,
4946 json: bool,
4947) {
4948 if !prediction_id.starts_with("vpred_") {
4949 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
4950 }
4951 if !(0.0..=1.0).contains(&confidence) {
4952 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4953 }
4954 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4955 if !project.predictions.iter().any(|p| p.id == prediction_id) {
4956 fail(&format!(
4957 "prediction `{prediction_id}` not present in frontier"
4958 ));
4959 }
4960
4961 let evidence = crate::bundle::Evidence {
4962 evidence_type: "experimental".to_string(),
4963 model_system: String::new(),
4964 species: None,
4965 method: "prediction_resolution".to_string(),
4966 sample_size: None,
4967 effect_size: None,
4968 p_value: None,
4969 replicated: false,
4970 replication_count: None,
4971 evidence_spans: if source_title.is_empty() {
4972 Vec::new()
4973 } else {
4974 vec![serde_json::json!({"text": source_title})]
4975 },
4976 };
4977
4978 let _ = doi; let resolution = crate::bundle::Resolution::new(
4985 prediction_id.to_string(),
4986 actual_outcome.to_string(),
4987 matched,
4988 by.to_string(),
4989 evidence,
4990 confidence,
4991 );
4992
4993 if project.resolutions.iter().any(|r| r.id == resolution.id) {
4994 if json {
4995 println!(
4996 "{}",
4997 serde_json::to_string_pretty(&json!({
4998 "ok": false,
4999 "command": "resolve",
5000 "reason": "resolution_already_exists",
5001 "id": resolution.id,
5002 }))
5003 .expect("serialize")
5004 );
5005 } else {
5006 println!(
5007 "{} resolution {} already exists in {}; skipping.",
5008 style::warn("resolve"),
5009 resolution.id,
5010 frontier.display()
5011 );
5012 }
5013 return;
5014 }
5015
5016 let new_id = resolution.id.clone();
5017 project.resolutions.push(resolution);
5018 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5019
5020 if json {
5021 println!(
5022 "{}",
5023 serde_json::to_string_pretty(&json!({
5024 "ok": true,
5025 "command": "resolve",
5026 "id": new_id,
5027 "prediction": prediction_id,
5028 "matched": matched,
5029 "frontier": frontier.display().to_string(),
5030 }))
5031 .expect("serialize resolve result")
5032 );
5033 } else {
5034 println!();
5035 println!(
5036 " {}",
5037 format!("VELA · RESOLVE · {}", new_id)
5038 .to_uppercase()
5039 .dimmed()
5040 );
5041 println!(" {}", style::tick_row(60));
5042 println!(" prediction: {prediction_id}");
5043 println!(
5044 " matched: {}",
5045 if matched {
5046 style::ok("yes")
5047 } else {
5048 style::lost("no")
5049 }
5050 );
5051 println!(" by: {by}");
5052 println!(" outcome: {}", truncate(actual_outcome, 80));
5053 println!();
5054 println!(
5055 " {} resolution recorded in {}",
5056 style::ok("ok"),
5057 frontier.display()
5058 );
5059 }
5060}
5061
5062fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
5064 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5065
5066 let resolved_ids: std::collections::HashSet<&str> = project
5067 .resolutions
5068 .iter()
5069 .map(|r| r.prediction_id.as_str())
5070 .collect();
5071
5072 let mut filtered: Vec<&crate::bundle::Prediction> = project
5073 .predictions
5074 .iter()
5075 .filter(|p| by.is_none_or(|b| p.made_by == b))
5076 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
5077 .collect();
5078 filtered.sort_by(|a, b| {
5079 a.resolves_by
5080 .as_deref()
5081 .unwrap_or("9999")
5082 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
5083 });
5084
5085 if json {
5086 let payload: Vec<serde_json::Value> = filtered
5087 .iter()
5088 .map(|p| {
5089 json!({
5090 "id": p.id,
5091 "claim_text": p.claim_text,
5092 "made_by": p.made_by,
5093 "confidence": p.confidence,
5094 "predicted_at": p.predicted_at,
5095 "resolves_by": p.resolves_by,
5096 "expected_outcome": p.expected_outcome,
5097 "resolved": resolved_ids.contains(p.id.as_str()),
5098 })
5099 })
5100 .collect();
5101 println!(
5102 "{}",
5103 serde_json::to_string_pretty(&json!({
5104 "ok": true,
5105 "command": "predictions",
5106 "frontier": frontier.display().to_string(),
5107 "count": payload.len(),
5108 "predictions": payload,
5109 }))
5110 .expect("serialize predictions")
5111 );
5112 return;
5113 }
5114
5115 println!();
5116 println!(
5117 " {}",
5118 format!("VELA · PREDICTIONS · {}", frontier.display())
5119 .to_uppercase()
5120 .dimmed()
5121 );
5122 println!(" {}", style::tick_row(60));
5123 if filtered.is_empty() {
5124 println!(" (no predictions matching filters)");
5125 return;
5126 }
5127 for p in &filtered {
5128 let resolved = resolved_ids.contains(p.id.as_str());
5129 let chip = if resolved {
5130 style::ok("resolved")
5131 } else {
5132 style::warn("open")
5133 };
5134 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
5135 println!(
5136 " · {} {} by {} → {}",
5137 p.id.dimmed(),
5138 chip,
5139 p.made_by,
5140 deadline,
5141 );
5142 println!(" claim: {}", truncate(&p.claim_text, 90));
5143 println!(" confidence: {:.2}", p.confidence);
5144 }
5145}
5146
5147fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
5152 use chrono::DateTime;
5153
5154 let now_dt = match now_override {
5155 Some(s) => DateTime::parse_from_rfc3339(s)
5156 .map(|dt| dt.with_timezone(&chrono::Utc))
5157 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
5158 None => chrono::Utc::now(),
5159 };
5160
5161 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5162 if dry_run {
5163 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5165 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
5166 if json {
5167 println!(
5168 "{}",
5169 serde_json::to_string_pretty(&json!({
5170 "ok": true,
5171 "command": "predictions.expire",
5172 "dry_run": true,
5173 "report": report,
5174 }))
5175 .expect("serialize predictions.expire (dry-run)")
5176 );
5177 } else {
5178 println!(
5179 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
5180 style::ok("ok"),
5181 report.now,
5182 report.newly_expired.len(),
5183 report.already_expired.len(),
5184 report.already_resolved.len(),
5185 report.still_open.len(),
5186 );
5187 for id in &report.newly_expired {
5188 println!(" · {id}");
5189 }
5190 }
5191 return;
5192 }
5193
5194 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
5195 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5196
5197 if json {
5198 println!(
5199 "{}",
5200 serde_json::to_string_pretty(&json!({
5201 "ok": true,
5202 "command": "predictions.expire",
5203 "report": report,
5204 }))
5205 .expect("serialize predictions.expire")
5206 );
5207 } else {
5208 println!(
5209 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
5210 style::ok("expired"),
5211 report.now,
5212 report.newly_expired.len(),
5213 report.already_expired.len(),
5214 report.already_resolved.len(),
5215 report.still_open.len(),
5216 );
5217 for id in &report.newly_expired {
5218 println!(" · {id}");
5219 }
5220 }
5221}
5222
5223fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
5224 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5225 let records = match actor {
5226 Some(a) => {
5227 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
5228 .map(|r| vec![r])
5229 .unwrap_or_default()
5230 }
5231 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
5232 };
5233
5234 if json {
5235 println!(
5236 "{}",
5237 serde_json::to_string_pretty(&json!({
5238 "ok": true,
5239 "command": "calibration",
5240 "frontier": frontier.display().to_string(),
5241 "filter_actor": actor,
5242 "records": records,
5243 }))
5244 .expect("serialize calibration")
5245 );
5246 return;
5247 }
5248
5249 println!();
5250 println!(
5251 " {}",
5252 format!("VELA · CALIBRATION · {}", frontier.display())
5253 .to_uppercase()
5254 .dimmed()
5255 );
5256 println!(" {}", style::tick_row(60));
5257 if records.is_empty() {
5258 println!(" (no calibration records)");
5259 return;
5260 }
5261 for r in &records {
5262 println!(" · {}", r.actor);
5263 println!(
5264 " predictions: {} resolved: {} hits: {}",
5265 r.n_predictions, r.n_resolved, r.n_hit
5266 );
5267 match r.hit_rate {
5268 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
5269 None => println!(" hit rate: n/a"),
5270 }
5271 match r.brier_score {
5272 Some(b) => println!(
5273 " brier: {:.4} (lower is better; 0.25 = chance)",
5274 b
5275 ),
5276 None => println!(" brier: n/a"),
5277 }
5278 match r.log_score {
5279 Some(l) => println!(
5280 " log score: {:.4} (higher is better; 0 = perfect)",
5281 l
5282 ),
5283 None => println!(" log score: n/a"),
5284 }
5285 }
5286}
5287
5288#[allow(clippy::too_many_arguments)]
5290fn cmd_dataset_add(
5291 frontier: &Path,
5292 name: &str,
5293 version: Option<&str>,
5294 content_hash: &str,
5295 url: Option<&str>,
5296 license: Option<&str>,
5297 source_title: &str,
5298 doi: Option<&str>,
5299 row_count: Option<u64>,
5300 json: bool,
5301) {
5302 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5303
5304 let provenance = crate::bundle::Provenance {
5305 source_type: "data_release".to_string(),
5306 doi: doi.map(|s| s.to_string()),
5307 pmid: None,
5308 pmc: None,
5309 openalex_id: None,
5310 url: url.map(|s| s.to_string()),
5311 title: source_title.to_string(),
5312 authors: Vec::new(),
5313 year: None,
5314 journal: None,
5315 license: license.map(|s| s.to_string()),
5316 publisher: None,
5317 funders: Vec::new(),
5318 extraction: crate::bundle::Extraction {
5319 method: "manual_curation".to_string(),
5320 model: None,
5321 model_version: None,
5322 extracted_at: chrono::Utc::now().to_rfc3339(),
5323 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5324 },
5325 review: None,
5326 citation_count: None,
5327 };
5328
5329 let mut dataset = crate::bundle::Dataset::new(
5330 name.to_string(),
5331 version.map(|s| s.to_string()),
5332 content_hash.to_string(),
5333 url.map(|s| s.to_string()),
5334 license.map(|s| s.to_string()),
5335 provenance,
5336 );
5337 dataset.row_count = row_count;
5338
5339 if project.datasets.iter().any(|d| d.id == dataset.id) {
5340 if json {
5341 println!(
5342 "{}",
5343 serde_json::to_string_pretty(&json!({
5344 "ok": false,
5345 "command": "dataset.add",
5346 "reason": "dataset_already_exists",
5347 "id": dataset.id,
5348 }))
5349 .expect("serialize")
5350 );
5351 } else {
5352 println!(
5353 "{} dataset {} already exists in {}; skipping.",
5354 style::warn("dataset"),
5355 dataset.id,
5356 frontier.display()
5357 );
5358 }
5359 return;
5360 }
5361
5362 let new_id = dataset.id.clone();
5363 project.datasets.push(dataset);
5364 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5365
5366 if json {
5367 println!(
5368 "{}",
5369 serde_json::to_string_pretty(&json!({
5370 "ok": true,
5371 "command": "dataset.add",
5372 "id": new_id,
5373 "name": name,
5374 "version": version,
5375 "frontier": frontier.display().to_string(),
5376 }))
5377 .expect("failed to serialize dataset.add result")
5378 );
5379 } else {
5380 println!();
5381 println!(
5382 " {}",
5383 format!("VELA · DATASET · {}", new_id)
5384 .to_uppercase()
5385 .dimmed()
5386 );
5387 println!(" {}", style::tick_row(60));
5388 println!(" name: {name}");
5389 if let Some(v) = version {
5390 println!(" version: {v}");
5391 }
5392 println!(" content_hash: {content_hash}");
5393 if let Some(u) = url {
5394 println!(" url: {u}");
5395 }
5396 println!(" source: {source_title}");
5397 println!();
5398 println!(
5399 " {} dataset recorded in {}",
5400 style::ok("ok"),
5401 frontier.display()
5402 );
5403 }
5404}
5405
5406#[allow(clippy::too_many_arguments)]
5412fn cmd_negative_result_add(
5413 frontier: &Path,
5414 kind: &str,
5415 deposited_by: &str,
5416 reason: &str,
5417 conditions_text: &str,
5418 notes: &str,
5419 targets: Vec<String>,
5420 endpoint: Option<&str>,
5421 intervention: Option<&str>,
5422 comparator: Option<&str>,
5423 population: Option<&str>,
5424 n_enrolled: Option<u32>,
5425 power: Option<f64>,
5426 ci_lower: Option<f64>,
5427 ci_upper: Option<f64>,
5428 effect_size_threshold: Option<f64>,
5429 registry_id: Option<&str>,
5430 reagent: Option<&str>,
5431 observation: Option<&str>,
5432 attempts: Option<u32>,
5433 source_title: &str,
5434 doi: Option<&str>,
5435 url: Option<&str>,
5436 year: Option<i32>,
5437 json: bool,
5438) {
5439 let nr_kind = match kind {
5440 "registered_trial" => {
5441 let endpoint =
5442 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
5443 let intervention = intervention
5444 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
5445 let comparator = comparator
5446 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
5447 let population = population
5448 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
5449 let n_enrolled = n_enrolled
5450 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
5451 let power =
5452 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
5453 let ci_lower =
5454 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
5455 let ci_upper =
5456 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
5457 crate::bundle::NegativeResultKind::RegisteredTrial {
5458 endpoint: endpoint.to_string(),
5459 intervention: intervention.to_string(),
5460 comparator: comparator.to_string(),
5461 population: population.to_string(),
5462 n_enrolled,
5463 power,
5464 effect_size_ci: (ci_lower, ci_upper),
5465 effect_size_threshold,
5466 registry_id: registry_id.map(|s| s.to_string()),
5467 }
5468 }
5469 "exploratory" => {
5470 let reagent =
5471 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
5472 let observation = observation
5473 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
5474 let attempts =
5475 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
5476 crate::bundle::NegativeResultKind::Exploratory {
5477 reagent: reagent.to_string(),
5478 observation: observation.to_string(),
5479 attempts,
5480 }
5481 }
5482 other => fail_return(&format!(
5483 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
5484 )),
5485 };
5486
5487 let conditions = crate::bundle::Conditions {
5488 text: conditions_text.to_string(),
5489 species_verified: Vec::new(),
5490 species_unverified: Vec::new(),
5491 in_vitro: false,
5492 in_vivo: false,
5493 human_data: false,
5494 clinical_trial: matches!(kind, "registered_trial"),
5495 concentration_range: None,
5496 duration: None,
5497 age_group: None,
5498 cell_type: None,
5499 };
5500
5501 let provenance = crate::bundle::Provenance {
5502 source_type: if matches!(kind, "registered_trial") {
5503 "clinical_trial".to_string()
5504 } else {
5505 "lab_notebook".to_string()
5506 },
5507 doi: doi.map(|s| s.to_string()),
5508 pmid: None,
5509 pmc: None,
5510 openalex_id: None,
5511 url: url.map(|s| s.to_string()),
5512 title: source_title.to_string(),
5513 authors: Vec::new(),
5514 year,
5515 journal: None,
5516 license: None,
5517 publisher: None,
5518 funders: Vec::new(),
5519 extraction: crate::bundle::Extraction {
5520 method: "manual_curation".to_string(),
5521 model: None,
5522 model_version: None,
5523 extracted_at: chrono::Utc::now().to_rfc3339(),
5524 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5525 },
5526 review: None,
5527 citation_count: None,
5528 };
5529
5530 let report = state::add_negative_result(
5531 frontier,
5532 nr_kind,
5533 targets,
5534 deposited_by,
5535 conditions,
5536 provenance,
5537 notes,
5538 reason,
5539 )
5540 .unwrap_or_else(|e| fail_return(&e));
5541
5542 if json {
5543 println!(
5544 "{}",
5545 serde_json::to_string_pretty(&report).expect("serialize report")
5546 );
5547 } else {
5548 println!();
5549 println!(
5550 " {}",
5551 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5552 .to_uppercase()
5553 .dimmed()
5554 );
5555 println!(" {}", style::tick_row(60));
5556 println!(" kind: {kind}");
5557 println!(" deposited_by: {deposited_by}");
5558 if let Some(ev) = &report.applied_event_id {
5559 println!(" event: {ev}");
5560 }
5561 println!(
5562 " {} negative_result deposited in {}",
5563 style::ok("ok"),
5564 frontier.display()
5565 );
5566 }
5567}
5568
5569fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5572 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5573 let filtered: Vec<&crate::bundle::NegativeResult> = project
5574 .negative_results
5575 .iter()
5576 .filter(|nr| {
5577 target
5578 .map(|t| nr.target_findings.iter().any(|f| f == t))
5579 .unwrap_or(true)
5580 })
5581 .collect();
5582
5583 if json {
5584 println!(
5585 "{}",
5586 serde_json::to_string_pretty(&json!({
5587 "ok": true,
5588 "command": "negative_results",
5589 "frontier": frontier.display().to_string(),
5590 "count": filtered.len(),
5591 "negative_results": filtered,
5592 }))
5593 .expect("serialize negative_results")
5594 );
5595 return;
5596 }
5597
5598 if filtered.is_empty() {
5599 println!(" no negative_results in {}", frontier.display());
5600 return;
5601 }
5602
5603 println!();
5604 println!(
5605 " {} ({})",
5606 "VELA · NEGATIVE RESULTS".dimmed(),
5607 filtered.len()
5608 );
5609 println!(" {}", style::tick_row(60));
5610 for nr in &filtered {
5611 let kind_label = match &nr.kind {
5612 crate::bundle::NegativeResultKind::RegisteredTrial {
5613 endpoint, power, ..
5614 } => format!("trial · {endpoint} · power {power:.2}"),
5615 crate::bundle::NegativeResultKind::Exploratory {
5616 reagent, attempts, ..
5617 } => format!("exploratory · {reagent} · {attempts} attempts"),
5618 };
5619 let retracted = if nr.retracted { " [retracted]" } else { "" };
5620 let review = nr
5621 .review_state
5622 .as_ref()
5623 .map(|s| format!(" [{s:?}]"))
5624 .unwrap_or_default();
5625 println!(" {}{}{}", nr.id, retracted, review);
5626 println!(" {kind_label}");
5627 if !nr.target_findings.is_empty() {
5628 println!(" targets: {}", nr.target_findings.join(", "));
5629 }
5630 }
5631 println!();
5632}
5633
5634#[allow(clippy::too_many_arguments)]
5636fn cmd_tier_set(
5637 frontier: &Path,
5638 object_type: &str,
5639 object_id: &str,
5640 tier: &str,
5641 actor: &str,
5642 reason: &str,
5643 json: bool,
5644) {
5645 let parsed_tier =
5646 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5647 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5648 .unwrap_or_else(|e| fail_return(&e));
5649
5650 if json {
5651 println!(
5652 "{}",
5653 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5654 );
5655 } else {
5656 println!();
5657 println!(
5658 " {}",
5659 format!("VELA · TIER · {}", object_id)
5660 .to_uppercase()
5661 .dimmed()
5662 );
5663 println!(" {}", style::tick_row(60));
5664 println!(" object_type: {object_type}");
5665 println!(" new_tier: {}", parsed_tier.canonical());
5666 println!(" actor: {actor}");
5667 if let Some(ev) = &report.applied_event_id {
5668 println!(" event: {ev}");
5669 }
5670 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5671 }
5672}
5673
5674#[allow(clippy::too_many_arguments)]
5676fn cmd_trajectory_create(
5677 frontier: &Path,
5678 deposited_by: &str,
5679 reason: &str,
5680 targets: Vec<String>,
5681 notes: &str,
5682 json: bool,
5683) {
5684 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5685 .unwrap_or_else(|e| fail_return(&e));
5686
5687 if json {
5688 println!(
5689 "{}",
5690 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5691 );
5692 } else {
5693 println!();
5694 println!(
5695 " {}",
5696 format!("VELA · TRAJECTORY · {}", report.finding_id)
5697 .to_uppercase()
5698 .dimmed()
5699 );
5700 println!(" {}", style::tick_row(60));
5701 println!(" deposited_by: {deposited_by}");
5702 if let Some(ev) = &report.applied_event_id {
5703 println!(" event: {ev}");
5704 }
5705 println!(
5706 " {} trajectory opened in {}",
5707 style::ok("ok"),
5708 frontier.display()
5709 );
5710 }
5711}
5712
5713#[allow(clippy::too_many_arguments)]
5715fn cmd_trajectory_step(
5716 frontier: &Path,
5717 trajectory_id: &str,
5718 kind: &str,
5719 description: &str,
5720 actor: &str,
5721 reason: &str,
5722 references: Vec<String>,
5723 json: bool,
5724) {
5725 let parsed_kind = match kind {
5726 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5727 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5728 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5729 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5730 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5731 other => fail_return(&format!(
5732 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5733 )),
5734 };
5735 let report = state::append_trajectory_step(
5736 frontier,
5737 trajectory_id,
5738 parsed_kind,
5739 description,
5740 actor,
5741 references,
5742 reason,
5743 )
5744 .unwrap_or_else(|e| fail_return(&e));
5745
5746 if json {
5747 println!(
5748 "{}",
5749 serde_json::to_string_pretty(&report).expect("serialize step report")
5750 );
5751 } else {
5752 println!();
5753 println!(
5754 " {}",
5755 format!("VELA · STEP · {}", report.finding_id)
5756 .to_uppercase()
5757 .dimmed()
5758 );
5759 println!(" {}", style::tick_row(60));
5760 println!(" trajectory: {trajectory_id}");
5761 println!(" kind: {kind}");
5762 println!(" actor: {actor}");
5763 println!(
5764 " {} step appended in {}",
5765 style::ok("ok"),
5766 frontier.display()
5767 );
5768 }
5769}
5770
5771fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
5773 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5774 let filtered: Vec<&crate::bundle::Trajectory> = project
5775 .trajectories
5776 .iter()
5777 .filter(|t| {
5778 target
5779 .map(|tg| t.target_findings.iter().any(|f| f == tg))
5780 .unwrap_or(true)
5781 })
5782 .collect();
5783
5784 if json {
5785 println!(
5786 "{}",
5787 serde_json::to_string_pretty(&json!({
5788 "ok": true,
5789 "command": "trajectories",
5790 "frontier": frontier.display().to_string(),
5791 "count": filtered.len(),
5792 "trajectories": filtered,
5793 }))
5794 .expect("serialize trajectories")
5795 );
5796 return;
5797 }
5798
5799 if filtered.is_empty() {
5800 println!(" no trajectories in {}", frontier.display());
5801 return;
5802 }
5803
5804 println!();
5805 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
5806 println!(" {}", style::tick_row(60));
5807 for t in &filtered {
5808 let retracted = if t.retracted { " [retracted]" } else { "" };
5809 let review = t
5810 .review_state
5811 .as_ref()
5812 .map(|s| format!(" [{s:?}]"))
5813 .unwrap_or_default();
5814 println!(" {}{}{}", t.id, retracted, review);
5815 println!(
5816 " {} step(s){}",
5817 t.steps.len(),
5818 if t.target_findings.is_empty() {
5819 String::new()
5820 } else {
5821 format!(" · targets: {}", t.target_findings.join(", "))
5822 }
5823 );
5824 for step in &t.steps {
5825 let label = match step.kind {
5826 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
5827 crate::bundle::TrajectoryStepKind::Tried => "tried",
5828 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
5829 crate::bundle::TrajectoryStepKind::Observed => "observed",
5830 crate::bundle::TrajectoryStepKind::Refined => "refined",
5831 };
5832 let preview: String = step.description.chars().take(80).collect();
5833 println!(" [{label}] {preview}");
5834 }
5835 }
5836 println!();
5837}
5838
5839fn cmd_datasets(frontier: &Path, json: bool) {
5841 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5842 if json {
5843 println!(
5844 "{}",
5845 serde_json::to_string_pretty(&json!({
5846 "ok": true,
5847 "command": "datasets",
5848 "frontier": frontier.display().to_string(),
5849 "count": project.datasets.len(),
5850 "datasets": project.datasets,
5851 }))
5852 .expect("serialize datasets")
5853 );
5854 return;
5855 }
5856 println!();
5857 println!(
5858 " {}",
5859 format!("VELA · DATASETS · {}", frontier.display())
5860 .to_uppercase()
5861 .dimmed()
5862 );
5863 println!(" {}", style::tick_row(60));
5864 if project.datasets.is_empty() {
5865 println!(" (no datasets registered)");
5866 return;
5867 }
5868 for ds in &project.datasets {
5869 let v = ds
5870 .version
5871 .as_deref()
5872 .map(|s| format!("@{s}"))
5873 .unwrap_or_default();
5874 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
5875 if let Some(u) = &ds.url {
5876 println!(" url: {}", truncate(u, 80));
5877 }
5878 println!(" hash: {}", truncate(&ds.content_hash, 80));
5879 }
5880}
5881
5882#[allow(clippy::too_many_arguments)]
5884fn cmd_code_add(
5885 frontier: &Path,
5886 language: &str,
5887 repo_url: Option<&str>,
5888 commit: Option<&str>,
5889 path: &str,
5890 content_hash: &str,
5891 line_start: Option<u32>,
5892 line_end: Option<u32>,
5893 entry_point: Option<&str>,
5894 json: bool,
5895) {
5896 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5897
5898 let line_range = match (line_start, line_end) {
5899 (Some(a), Some(b)) => Some((a, b)),
5900 (Some(a), None) => Some((a, a)),
5901 _ => None,
5902 };
5903
5904 let artifact = crate::bundle::CodeArtifact::new(
5905 language.to_string(),
5906 repo_url.map(|s| s.to_string()),
5907 commit.map(|s| s.to_string()),
5908 path.to_string(),
5909 line_range,
5910 content_hash.to_string(),
5911 entry_point.map(|s| s.to_string()),
5912 );
5913
5914 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
5915 if json {
5916 println!(
5917 "{}",
5918 serde_json::to_string_pretty(&json!({
5919 "ok": false,
5920 "command": "code.add",
5921 "reason": "artifact_already_exists",
5922 "id": artifact.id,
5923 }))
5924 .expect("serialize")
5925 );
5926 } else {
5927 println!(
5928 "{} code artifact {} already exists in {}; skipping.",
5929 style::warn("code"),
5930 artifact.id,
5931 frontier.display()
5932 );
5933 }
5934 return;
5935 }
5936
5937 let new_id = artifact.id.clone();
5938 project.code_artifacts.push(artifact);
5939 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5940
5941 if json {
5942 println!(
5943 "{}",
5944 serde_json::to_string_pretty(&json!({
5945 "ok": true,
5946 "command": "code.add",
5947 "id": new_id,
5948 "language": language,
5949 "path": path,
5950 "frontier": frontier.display().to_string(),
5951 }))
5952 .expect("failed to serialize code.add result")
5953 );
5954 } else {
5955 println!();
5956 println!(
5957 " {}",
5958 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
5959 );
5960 println!(" {}", style::tick_row(60));
5961 println!(" language: {language}");
5962 if let Some(r) = repo_url {
5963 println!(" repo: {r}");
5964 }
5965 if let Some(c) = commit {
5966 println!(" commit: {c}");
5967 }
5968 println!(" path: {path}");
5969 if let Some((a, b)) = line_range {
5970 println!(" lines: {a}-{b}");
5971 }
5972 println!(" content_hash: {content_hash}");
5973 println!();
5974 println!(
5975 " {} code artifact recorded in {}",
5976 style::ok("ok"),
5977 frontier.display()
5978 );
5979 }
5980}
5981
5982fn cmd_code_artifacts(frontier: &Path, json: bool) {
5984 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5985 if json {
5986 println!(
5987 "{}",
5988 serde_json::to_string_pretty(&json!({
5989 "ok": true,
5990 "command": "code-artifacts",
5991 "frontier": frontier.display().to_string(),
5992 "count": project.code_artifacts.len(),
5993 "code_artifacts": project.code_artifacts,
5994 }))
5995 .expect("serialize code-artifacts")
5996 );
5997 return;
5998 }
5999 println!();
6000 println!(
6001 " {}",
6002 format!("VELA · CODE · {}", frontier.display())
6003 .to_uppercase()
6004 .dimmed()
6005 );
6006 println!(" {}", style::tick_row(60));
6007 if project.code_artifacts.is_empty() {
6008 println!(" (no code artifacts registered)");
6009 return;
6010 }
6011 for c in &project.code_artifacts {
6012 let lr = c
6013 .line_range
6014 .map(|(a, b)| format!(":{a}-{b}"))
6015 .unwrap_or_default();
6016 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
6017 if let Some(r) = &c.repo_url {
6018 println!(" repo: {}", truncate(r, 80));
6019 }
6020 if let Some(g) = &c.git_commit {
6021 println!(" commit: {g}");
6022 }
6023 }
6024}
6025
6026fn sha256_for_bytes(bytes: &[u8]) -> String {
6027 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
6028}
6029
6030fn sha256_hex_part(content_hash: &str) -> &str {
6031 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
6032}
6033
6034fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
6035 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
6036 return None;
6037 };
6038 let hex = sha256_hex_part(content_hash);
6039 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
6040 let path = root.join(&rel);
6041 if let Some(parent) = path.parent() {
6042 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
6043 fail(&format!(
6044 "Failed to create artifact blob directory {}: {e}",
6045 parent.display()
6046 ))
6047 });
6048 }
6049 if !path.is_file() {
6050 std::fs::write(&path, bytes)
6051 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
6052 }
6053 Some(rel)
6054}
6055
6056fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
6057 let mut out = BTreeMap::new();
6058 for pair in pairs {
6059 let Some((key, value)) = pair.split_once('=') else {
6060 fail(&format!("--metadata must be key=value, got {pair:?}"));
6061 };
6062 let key = key.trim();
6063 if key.is_empty() {
6064 fail("--metadata key must be non-empty");
6065 }
6066 out.insert(key.to_string(), Value::String(value.trim().to_string()));
6067 }
6068 out
6069}
6070
6071fn artifact_source_type(kind: &str) -> &'static str {
6072 match kind {
6073 "clinical_trial_record" | "protocol" => "clinical_trial",
6074 "dataset" => "data_release",
6075 "model_output" => "model_output",
6076 "registry_record" => "database_record",
6077 "lab_file" => "lab_notebook",
6078 _ => "database_record",
6079 }
6080}
6081
6082fn artifact_provenance(
6083 kind: &str,
6084 title: &str,
6085 url: Option<&str>,
6086 doi: Option<&str>,
6087 license: Option<&str>,
6088) -> crate::bundle::Provenance {
6089 crate::bundle::Provenance {
6090 source_type: artifact_source_type(kind).to_string(),
6091 doi: doi.map(str::to_string),
6092 pmid: None,
6093 pmc: None,
6094 openalex_id: None,
6095 url: url.map(str::to_string),
6096 title: title.to_string(),
6097 authors: Vec::new(),
6098 year: None,
6099 journal: None,
6100 license: license.map(str::to_string),
6101 publisher: None,
6102 funders: Vec::new(),
6103 extraction: crate::bundle::Extraction {
6104 method: "artifact_deposit".to_string(),
6105 model: None,
6106 model_version: None,
6107 extracted_at: chrono::Utc::now().to_rfc3339(),
6108 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6109 },
6110 review: None,
6111 citation_count: None,
6112 }
6113}
6114
6115#[allow(clippy::too_many_arguments)]
6116fn cmd_artifact_add(
6117 frontier: &Path,
6118 kind: &str,
6119 name: &str,
6120 file: Option<&Path>,
6121 url: Option<&str>,
6122 content_hash: Option<&str>,
6123 media_type: Option<&str>,
6124 license: Option<&str>,
6125 source_title: Option<&str>,
6126 source_url: Option<&str>,
6127 doi: Option<&str>,
6128 target: Vec<String>,
6129 metadata: Vec<String>,
6130 access_tier: &str,
6131 deposited_by: &str,
6132 reason: &str,
6133 json_out: bool,
6134) {
6135 let tier =
6136 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
6137 let mut size_bytes = None;
6138 let mut storage_mode = "pointer".to_string();
6139 let mut locator = url.map(str::to_string);
6140 let mut computed_hash = content_hash.map(str::to_string);
6141
6142 if let Some(path) = file {
6143 let bytes = std::fs::read(path)
6144 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
6145 let actual_hash = sha256_for_bytes(&bytes);
6146 if let Some(expected) = content_hash {
6147 let expected_hex = sha256_hex_part(expected);
6148 let actual_hex = sha256_hex_part(&actual_hash);
6149 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
6150 fail(&format!(
6151 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
6152 ));
6153 }
6154 }
6155 size_bytes = Some(bytes.len() as u64);
6156 computed_hash = Some(actual_hash.clone());
6157 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
6158 storage_mode = "local_blob".to_string();
6159 locator = Some(rel);
6160 } else {
6161 storage_mode = "local_file".to_string();
6162 locator = Some(path.display().to_string());
6163 }
6164 }
6165
6166 let Some(content_hash) = computed_hash else {
6167 fail("Provide --content-hash unless --file is present.");
6168 };
6169 let content_hash_for_print = content_hash.clone();
6170 if file.is_none() && url.is_some() {
6171 storage_mode = "remote".to_string();
6172 }
6173
6174 let source_url_effective = source_url.or(url);
6175 let source_title = source_title.unwrap_or(name);
6176 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
6177 let metadata = parse_metadata_pairs(metadata);
6178 let artifact = crate::bundle::Artifact::new(
6179 kind.to_string(),
6180 name.to_string(),
6181 content_hash,
6182 size_bytes,
6183 media_type.map(str::to_string),
6184 storage_mode,
6185 locator,
6186 source_url_effective.map(str::to_string),
6187 license.map(str::to_string),
6188 target,
6189 provenance,
6190 metadata,
6191 tier,
6192 )
6193 .unwrap_or_else(|e| fail_return(&e));
6194
6195 let artifact_id = artifact.id.clone();
6196 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6197 .unwrap_or_else(|e| fail_return(&e));
6198
6199 if json_out {
6200 println!(
6201 "{}",
6202 serde_json::to_string_pretty(&json!({
6203 "ok": true,
6204 "command": "artifact.add",
6205 "id": artifact_id,
6206 "frontier": frontier.display().to_string(),
6207 "event": report.applied_event_id,
6208 }))
6209 .expect("serialize artifact.add")
6210 );
6211 } else {
6212 println!();
6213 println!(
6214 " {}",
6215 format!("VELA · ARTIFACT · {}", artifact_id)
6216 .to_uppercase()
6217 .dimmed()
6218 );
6219 println!(" {}", style::tick_row(60));
6220 println!(" kind: {kind}");
6221 println!(" name: {name}");
6222 println!(" hash: {content_hash_for_print}");
6223 println!(
6224 " {} artifact recorded in {}",
6225 style::ok("ok"),
6226 frontier.display()
6227 );
6228 }
6229}
6230
6231fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
6232 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6233 let filtered: Vec<&crate::bundle::Artifact> = project
6234 .artifacts
6235 .iter()
6236 .filter(|artifact| {
6237 target
6238 .map(|t| artifact.target_findings.iter().any(|f| f == t))
6239 .unwrap_or(true)
6240 })
6241 .collect();
6242
6243 if json_out {
6244 println!(
6245 "{}",
6246 serde_json::to_string_pretty(&json!({
6247 "ok": true,
6248 "command": "artifacts",
6249 "frontier": frontier.display().to_string(),
6250 "count": filtered.len(),
6251 "artifacts": filtered,
6252 }))
6253 .expect("serialize artifacts")
6254 );
6255 return;
6256 }
6257
6258 println!();
6259 println!(
6260 " {}",
6261 format!("VELA · ARTIFACTS · {}", frontier.display())
6262 .to_uppercase()
6263 .dimmed()
6264 );
6265 println!(" {}", style::tick_row(60));
6266 if filtered.is_empty() {
6267 println!(" (no artifacts registered)");
6268 return;
6269 }
6270 for artifact in filtered {
6271 println!(
6272 " · {} {} · {}",
6273 artifact.id.dimmed(),
6274 artifact.kind,
6275 artifact.name
6276 );
6277 if let Some(locator) = &artifact.locator {
6278 println!(" locator: {}", truncate(locator, 88));
6279 }
6280 if !artifact.target_findings.is_empty() {
6281 println!(" targets: {}", artifact.target_findings.join(", "));
6282 }
6283 }
6284}
6285
6286fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
6287 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6288 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
6289 if json_out {
6290 println!(
6291 "{}",
6292 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
6293 );
6294 if !audit.ok {
6295 std::process::exit(1);
6296 }
6297 return;
6298 }
6299
6300 println!();
6301 println!(
6302 " {}",
6303 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
6304 .to_uppercase()
6305 .dimmed()
6306 );
6307 println!(" {}", style::tick_row(60));
6308 println!(" artifacts: {}", audit.artifact_count);
6309 println!(" checked local blobs: {}", audit.checked_local_blobs);
6310 println!(" local blob bytes: {}", audit.local_blob_bytes);
6311 if !audit.by_kind.is_empty() {
6312 let kinds = audit
6313 .by_kind
6314 .iter()
6315 .map(|(kind, count)| format!("{kind}:{count}"))
6316 .collect::<Vec<_>>()
6317 .join(", ");
6318 println!(" kinds: {kinds}");
6319 }
6320 if audit.ok {
6321 println!(" {} artifact audit passed.", style::ok("ok"));
6322 return;
6323 }
6324 for issue in &audit.issues {
6325 println!(
6326 " {} {} {}: {}",
6327 style::lost("invalid"),
6328 issue.id,
6329 issue.field,
6330 issue.message
6331 );
6332 }
6333 std::process::exit(1);
6334}
6335
6336fn cmd_decision_brief(frontier: &Path, json_out: bool) {
6337 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6338 let report = decision::load_decision_brief(frontier, &project);
6339 if json_out {
6340 println!(
6341 "{}",
6342 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
6343 );
6344 if !report.ok {
6345 std::process::exit(1);
6346 }
6347 return;
6348 }
6349 println!();
6350 println!(
6351 " {}",
6352 format!("VELA · DECISION BRIEF · {}", project.project.name)
6353 .to_uppercase()
6354 .dimmed()
6355 );
6356 println!(" {}", style::tick_row(60));
6357 if !report.ok {
6358 print_projection_issues(&report.issues, report.error.as_deref());
6359 std::process::exit(1);
6360 }
6361 let brief = report
6362 .projection
6363 .as_ref()
6364 .expect("ok decision report carries projection");
6365 for question in &brief.questions {
6366 println!(" · {} · {}", question.id.dimmed(), question.title);
6367 println!(" answer: {}", wrap_line(&question.short_answer, 82));
6368 println!(" caveat: {}", wrap_line(&question.caveat, 82));
6369 println!(" support: {}", question.supporting_findings.join(", "));
6370 if !question.tension_findings.is_empty() {
6371 println!(" tensions: {}", question.tension_findings.join(", "));
6372 }
6373 if !question.gap_findings.is_empty() {
6374 println!(" gaps: {}", question.gap_findings.join(", "));
6375 }
6376 if !question.artifact_ids.is_empty() {
6377 println!(" artifacts: {}", question.artifact_ids.join(", "));
6378 }
6379 println!(
6380 " would change: {}",
6381 wrap_line(&question.what_would_change_this_answer, 82)
6382 );
6383 }
6384}
6385
6386fn cmd_trial_summary(frontier: &Path, json_out: bool) {
6387 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6388 let report = decision::load_trial_outcomes(frontier, &project);
6389 if json_out {
6390 println!(
6391 "{}",
6392 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
6393 );
6394 if !report.ok {
6395 std::process::exit(1);
6396 }
6397 return;
6398 }
6399 println!();
6400 println!(
6401 " {}",
6402 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
6403 .to_uppercase()
6404 .dimmed()
6405 );
6406 println!(" {}", style::tick_row(60));
6407 if !report.ok {
6408 print_projection_issues(&report.issues, report.error.as_deref());
6409 std::process::exit(1);
6410 }
6411 let outcomes = report
6412 .projection
6413 .as_ref()
6414 .expect("ok trial report carries projection");
6415 for row in &outcomes.rows {
6416 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
6417 println!(" population: {}", wrap_line(&row.population, 82));
6418 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
6419 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
6420 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
6421 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
6422 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
6423 if !row.finding_ids.is_empty() {
6424 println!(" findings: {}", row.finding_ids.join(", "));
6425 }
6426 if !row.artifact_ids.is_empty() {
6427 println!(" artifacts: {}", row.artifact_ids.join(", "));
6428 }
6429 }
6430}
6431
6432fn cmd_source_verification(frontier: &Path, json_out: bool) {
6433 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6434 let report = decision::load_source_verification(frontier, &project);
6435 if json_out {
6436 println!(
6437 "{}",
6438 serde_json::to_string_pretty(&report).expect("serialize source verification report")
6439 );
6440 if !report.ok {
6441 std::process::exit(1);
6442 }
6443 return;
6444 }
6445 println!();
6446 println!(
6447 " {}",
6448 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
6449 .to_uppercase()
6450 .dimmed()
6451 );
6452 println!(" {}", style::tick_row(60));
6453 if !report.ok {
6454 print_projection_issues(&report.issues, report.error.as_deref());
6455 std::process::exit(1);
6456 }
6457 let verification = report
6458 .projection
6459 .as_ref()
6460 .expect("ok source verification report carries projection");
6461 println!(" verified_at: {}", verification.verified_at);
6462 for source in &verification.sources {
6463 println!(" · {} · {}", source.id.dimmed(), source.title);
6464 println!(" agency: {}", source.agency);
6465 println!(" url: {}", truncate(&source.url, 88));
6466 println!(" status: {}", wrap_line(&source.current_status, 82));
6467 }
6468}
6469
6470fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
6471 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6472 let report = decision::load_source_ingest_plan(frontier, &project);
6473 if json_out {
6474 println!(
6475 "{}",
6476 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
6477 );
6478 if !report.ok {
6479 std::process::exit(1);
6480 }
6481 return;
6482 }
6483 println!();
6484 println!(
6485 " {}",
6486 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6487 .to_uppercase()
6488 .dimmed()
6489 );
6490 println!(" {}", style::tick_row(60));
6491 if !report.ok {
6492 print_projection_issues(&report.issues, report.error.as_deref());
6493 std::process::exit(1);
6494 }
6495 let plan = report
6496 .projection
6497 .as_ref()
6498 .expect("ok source ingest plan report carries projection");
6499 println!(" verified_at: {}", plan.verified_at);
6500 println!(" entries: {}", plan.entries.len());
6501 for entry in &plan.entries {
6502 println!(
6503 " · {} · {} · {} · {}",
6504 entry.id.dimmed(),
6505 entry.category,
6506 entry.priority,
6507 entry.ingest_status
6508 );
6509 println!(" name: {}", wrap_line(&entry.name, 82));
6510 println!(" locator: {}", truncate(&entry.locator, 88));
6511 println!(" use: {}", wrap_line(&entry.target_use, 82));
6512 if let Some(id) = &entry.current_frontier_artifact_id {
6513 println!(" artifact: {id}");
6514 }
6515 if !entry.target_findings.is_empty() {
6516 println!(" findings: {}", entry.target_findings.join(", "));
6517 }
6518 }
6519}
6520
6521fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6522 if let Some(error) = error {
6523 println!(" {} {error}", style::lost("unavailable"));
6524 }
6525 for issue in issues {
6526 println!(
6527 " {} {}: {}",
6528 style::lost("invalid"),
6529 issue.path,
6530 issue.message
6531 );
6532 }
6533}
6534
6535fn wrap_line(text: &str, max_chars: usize) -> String {
6536 if text.chars().count() <= max_chars {
6537 return text.to_string();
6538 }
6539 let mut out = String::new();
6540 let mut line_len = 0usize;
6541 for word in text.split_whitespace() {
6542 let word_len = word.chars().count();
6543 if line_len > 0 && line_len + 1 + word_len > max_chars {
6544 out.push('\n');
6545 out.push_str(" ");
6546 out.push_str(word);
6547 line_len = word_len;
6548 } else {
6549 if line_len > 0 {
6550 out.push(' ');
6551 line_len += 1;
6552 }
6553 out.push_str(word);
6554 line_len += word_len;
6555 }
6556 }
6557 out
6558}
6559
6560fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6561 study.pointer(pointer).and_then(Value::as_str)
6562}
6563
6564fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6565 study
6566 .pointer(pointer)
6567 .and_then(Value::as_array)
6568 .map(|items| {
6569 items
6570 .iter()
6571 .filter_map(Value::as_str)
6572 .map(str::to_string)
6573 .collect()
6574 })
6575 .unwrap_or_default()
6576}
6577
6578fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6579 study
6580 .pointer(pointer)
6581 .and_then(Value::as_array)
6582 .map(|items| {
6583 items
6584 .iter()
6585 .filter_map(|item| item.get(field).and_then(Value::as_str))
6586 .map(str::to_string)
6587 .collect()
6588 })
6589 .unwrap_or_default()
6590}
6591
6592fn insert_string_vec_metadata(
6593 metadata: &mut BTreeMap<String, Value>,
6594 key: &str,
6595 values: Vec<String>,
6596) {
6597 if values.is_empty() {
6598 return;
6599 }
6600 metadata.insert(
6601 key.to_string(),
6602 Value::Array(values.into_iter().map(Value::String).collect()),
6603 );
6604}
6605
6606async fn cmd_clinical_trial_import(
6607 frontier: &Path,
6608 nct_id: &str,
6609 input_json: Option<&Path>,
6610 target: Vec<String>,
6611 deposited_by: &str,
6612 reason: &str,
6613 license: &str,
6614 json_out: bool,
6615) {
6616 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6617 let raw = if let Some(path) = input_json {
6618 std::fs::read_to_string(path)
6619 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6620 } else {
6621 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6622 fail(&format!(
6623 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6624 ))
6625 });
6626 let response = response.error_for_status().unwrap_or_else(|e| {
6627 fail(&format!(
6628 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6629 ))
6630 });
6631 response.text().await.unwrap_or_else(|e| {
6632 fail(&format!(
6633 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6634 ))
6635 })
6636 };
6637 let study: Value = serde_json::from_str(&raw)
6638 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6639 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6640 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6641 let content_hash = sha256_for_bytes(&canonical_bytes);
6642 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6643 .unwrap_or_else(|| api_url.clone());
6644 let storage_mode = if locator.starts_with(".vela/") {
6645 "local_blob"
6646 } else {
6647 "remote"
6648 };
6649
6650 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6651 .unwrap_or(nct_id)
6652 .to_string();
6653 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6654 .or_else(|| {
6655 clinical_str(
6656 &study,
6657 "/protocolSection/identificationModule/officialTitle",
6658 )
6659 })
6660 .unwrap_or(nct_id);
6661 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6662 let mut metadata = BTreeMap::new();
6663 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6664 metadata.insert(
6665 "source_api".to_string(),
6666 Value::String("clinicaltrials.gov-v2".to_string()),
6667 );
6668 metadata.insert(
6669 "retrieved_at".to_string(),
6670 Value::String(chrono::Utc::now().to_rfc3339()),
6671 );
6672 for (key, pointer) in [
6673 (
6674 "overall_status",
6675 "/protocolSection/statusModule/overallStatus",
6676 ),
6677 (
6678 "start_date",
6679 "/protocolSection/statusModule/startDateStruct/date",
6680 ),
6681 (
6682 "completion_date",
6683 "/protocolSection/statusModule/completionDateStruct/date",
6684 ),
6685 ] {
6686 if let Some(value) = clinical_str(&study, pointer) {
6687 metadata.insert(key.to_string(), Value::String(value.to_string()));
6688 }
6689 }
6690 insert_string_vec_metadata(
6691 &mut metadata,
6692 "phases",
6693 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6694 );
6695 insert_string_vec_metadata(
6696 &mut metadata,
6697 "conditions",
6698 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6699 );
6700 insert_string_vec_metadata(
6701 &mut metadata,
6702 "interventions",
6703 clinical_named_array(
6704 &study,
6705 "/protocolSection/armsInterventionsModule/interventions",
6706 "name",
6707 ),
6708 );
6709 insert_string_vec_metadata(
6710 &mut metadata,
6711 "primary_outcomes",
6712 clinical_named_array(
6713 &study,
6714 "/protocolSection/outcomesModule/primaryOutcomes",
6715 "measure",
6716 ),
6717 );
6718 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6719 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6720 }
6721
6722 let provenance = artifact_provenance(
6723 "clinical_trial_record",
6724 title,
6725 Some(&public_url),
6726 None,
6727 Some(license),
6728 );
6729 let artifact = crate::bundle::Artifact::new(
6730 "clinical_trial_record",
6731 title.to_string(),
6732 content_hash,
6733 Some(canonical_bytes.len() as u64),
6734 Some("application/json".to_string()),
6735 storage_mode.to_string(),
6736 Some(locator),
6737 Some(public_url.clone()),
6738 Some(license.to_string()),
6739 target,
6740 provenance,
6741 metadata,
6742 crate::access_tier::AccessTier::Public,
6743 )
6744 .unwrap_or_else(|e| fail_return(&e));
6745 let artifact_id = artifact.id.clone();
6746 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6747 .unwrap_or_else(|e| fail_return(&e));
6748
6749 if json_out {
6750 println!(
6751 "{}",
6752 serde_json::to_string_pretty(&json!({
6753 "ok": true,
6754 "command": "clinical-trial-import",
6755 "nct_id": parsed_nct,
6756 "id": artifact_id,
6757 "frontier": frontier.display().to_string(),
6758 "event": report.applied_event_id,
6759 "source_url": public_url,
6760 }))
6761 .expect("serialize clinical-trial-import")
6762 );
6763 } else {
6764 println!();
6765 println!(
6766 " {}",
6767 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
6768 .to_uppercase()
6769 .dimmed()
6770 );
6771 println!(" {}", style::tick_row(60));
6772 println!(" nct_id: {parsed_nct}");
6773 println!(" title: {}", truncate(title, 96));
6774 println!(" source: {public_url}");
6775 println!(
6776 " {} trial record imported into {}",
6777 style::ok("ok"),
6778 frontier.display()
6779 );
6780 }
6781}
6782
6783#[allow(clippy::too_many_arguments)]
6790fn cmd_replicate(
6791 frontier: &Path,
6792 target: &str,
6793 outcome: &str,
6794 attempted_by: &str,
6795 conditions_text: &str,
6796 source_title: &str,
6797 doi: Option<&str>,
6798 pmid: Option<&str>,
6799 sample_size: Option<&str>,
6800 note: &str,
6801 previous_attempt: Option<&str>,
6802 no_cascade: bool,
6803 json: bool,
6804) {
6805 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
6806 fail(&format!(
6807 "invalid outcome '{outcome}'; valid: {:?}",
6808 crate::bundle::VALID_REPLICATION_OUTCOMES
6809 ));
6810 }
6811 if !target.starts_with("vf_") {
6812 fail(&format!("target '{target}' is not a vf_ finding id"));
6813 }
6814
6815 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6816
6817 if !project.findings.iter().any(|f| f.id == target) {
6818 fail(&format!(
6819 "target finding '{target}' not present in frontier '{}'",
6820 frontier.display()
6821 ));
6822 }
6823
6824 let lower = conditions_text.to_lowercase();
6829 let conditions = crate::bundle::Conditions {
6830 text: conditions_text.to_string(),
6831 species_verified: Vec::new(),
6832 species_unverified: Vec::new(),
6833 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
6834 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
6835 human_data: lower.contains("human")
6836 || lower.contains("clinical")
6837 || lower.contains("patient"),
6838 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
6839 concentration_range: None,
6840 duration: None,
6841 age_group: None,
6842 cell_type: None,
6843 };
6844
6845 let evidence = crate::bundle::Evidence {
6846 evidence_type: "experimental".to_string(),
6847 model_system: String::new(),
6848 species: None,
6849 method: "replication_attempt".to_string(),
6850 sample_size: sample_size.map(|s| s.to_string()),
6851 effect_size: None,
6852 p_value: None,
6853 replicated: outcome == "replicated",
6854 replication_count: None,
6855 evidence_spans: Vec::new(),
6856 };
6857
6858 let provenance = crate::bundle::Provenance {
6859 source_type: "published_paper".to_string(),
6860 doi: doi.map(|s| s.to_string()),
6861 pmid: pmid.map(|s| s.to_string()),
6862 pmc: None,
6863 openalex_id: None,
6864 url: None,
6865 title: source_title.to_string(),
6866 authors: Vec::new(),
6867 year: None,
6868 journal: None,
6869 license: None,
6870 publisher: None,
6871 funders: Vec::new(),
6872 extraction: crate::bundle::Extraction {
6873 method: "manual_curation".to_string(),
6874 model: None,
6875 model_version: None,
6876 extracted_at: chrono::Utc::now().to_rfc3339(),
6877 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6878 },
6879 review: None,
6880 citation_count: None,
6881 };
6882
6883 let mut rep = crate::bundle::Replication::new(
6884 target.to_string(),
6885 attempted_by.to_string(),
6886 outcome.to_string(),
6887 evidence,
6888 conditions,
6889 provenance,
6890 note.to_string(),
6891 );
6892 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
6893
6894 if project.replications.iter().any(|r| r.id == rep.id) {
6897 if json {
6898 println!(
6899 "{}",
6900 serde_json::to_string_pretty(&json!({
6901 "ok": false,
6902 "command": "replicate",
6903 "reason": "replication_already_exists",
6904 "id": rep.id,
6905 }))
6906 .expect("serialize")
6907 );
6908 } else {
6909 println!(
6910 "{} replication {} already exists in {}; skipping.",
6911 style::warn("replicate"),
6912 rep.id,
6913 frontier.display()
6914 );
6915 }
6916 return;
6917 }
6918
6919 let new_id = rep.id.clone();
6920 project.replications.push(rep);
6921
6922 let cascade_result = if no_cascade {
6929 None
6930 } else {
6931 let result = propagate::propagate_correction(
6932 &mut project,
6933 target,
6934 propagate::PropagationAction::ReplicationOutcome {
6935 outcome: outcome.to_string(),
6936 vrep_id: new_id.clone(),
6937 },
6938 );
6939 project.review_events.extend(result.events.clone());
6942 project::recompute_stats(&mut project);
6943 Some(result)
6944 };
6945
6946 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6947
6948 if json {
6949 let cascade_json = cascade_result.as_ref().map(|r| {
6950 json!({
6951 "affected": r.affected,
6952 "events": r.events.len(),
6953 })
6954 });
6955 println!(
6956 "{}",
6957 serde_json::to_string_pretty(&json!({
6958 "ok": true,
6959 "command": "replicate",
6960 "id": new_id,
6961 "target": target,
6962 "outcome": outcome,
6963 "attempted_by": attempted_by,
6964 "cascade": cascade_json,
6965 "frontier": frontier.display().to_string(),
6966 }))
6967 .expect("failed to serialize replicate result")
6968 );
6969 } else {
6970 println!();
6971 println!(
6972 " {}",
6973 format!("VELA · REPLICATE · {}", new_id)
6974 .to_uppercase()
6975 .dimmed()
6976 );
6977 println!(" {}", style::tick_row(60));
6978 println!(" target: {target}");
6979 println!(" outcome: {outcome}");
6980 println!(" attempted by: {attempted_by}");
6981 println!(" conditions: {conditions_text}");
6982 println!(" source: {source_title}");
6983 if let Some(d) = doi {
6984 println!(" doi: {d}");
6985 }
6986 println!();
6987 println!(
6988 " {} replication recorded in {}",
6989 style::ok("ok"),
6990 frontier.display()
6991 );
6992 if let Some(result) = cascade_result {
6993 println!(
6994 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
6995 style::ok("ok"),
6996 result.affected,
6997 result.events.len()
6998 );
6999 } else {
7000 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
7001 }
7002 }
7003}
7004
7005fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
7007 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7008 let filtered: Vec<&crate::bundle::Replication> = project
7009 .replications
7010 .iter()
7011 .filter(|r| target.is_none_or(|t| r.target_finding == t))
7012 .collect();
7013
7014 if json {
7015 let payload = json!({
7016 "ok": true,
7017 "command": "replications",
7018 "frontier": frontier.display().to_string(),
7019 "filter_target": target,
7020 "count": filtered.len(),
7021 "replications": filtered,
7022 });
7023 println!(
7024 "{}",
7025 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
7026 );
7027 return;
7028 }
7029
7030 println!();
7031 let header = match target {
7032 Some(t) => format!("VELA · REPLICATIONS · {t}"),
7033 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
7034 };
7035 println!(" {}", header.to_uppercase().dimmed());
7036 println!(" {}", style::tick_row(60));
7037 if filtered.is_empty() {
7038 println!(" (no replications recorded)");
7039 return;
7040 }
7041 for rep in &filtered {
7042 let outcome_chip = match rep.outcome.as_str() {
7043 "replicated" => style::ok(&rep.outcome),
7044 "failed" => style::lost(&rep.outcome),
7045 "partial" => style::warn(&rep.outcome),
7046 _ => rep.outcome.clone().normal().to_string(),
7047 };
7048 println!(
7049 " · {} {} by {}",
7050 rep.id.dimmed(),
7051 outcome_chip,
7052 rep.attempted_by
7053 );
7054 println!(" target: {}", rep.target_finding);
7055 if !rep.conditions.text.is_empty() {
7056 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
7057 }
7058 if !rep.provenance.title.is_empty() {
7059 println!(" source: {}", truncate(&rep.provenance.title, 80));
7060 }
7061 }
7062}
7063
7064async fn cmd_ingest(
7077 path: &str,
7078 frontier: &Path,
7079 backend: Option<&str>,
7080 actor: Option<&str>,
7081 dry_run: bool,
7082 json: bool,
7083) {
7084 let lowered = path.trim().to_lowercase();
7086 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
7087 cmd_source_fetch(path.trim(), None, None, false, json).await;
7088 if !json {
7094 eprintln!();
7095 eprintln!(
7096 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7097 );
7098 eprintln!(
7099 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7100 frontier.display()
7101 );
7102 }
7103 return;
7104 }
7105
7106 let p = std::path::PathBuf::from(path);
7107 if !p.exists() {
7108 fail(&format!(
7109 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7110 ));
7111 }
7112
7113 let ext = p
7115 .extension()
7116 .and_then(|s| s.to_str())
7117 .map(|s| s.to_ascii_lowercase());
7118
7119 if p.is_file() {
7120 match ext.as_deref() {
7121 Some("pdf") => {
7122 cmd_scout(&p, frontier, backend, dry_run, json).await;
7126 }
7127 Some("md") | Some("markdown") => {
7128 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7131 }
7132 Some("csv") | Some("tsv") => {
7133 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7136 }
7137 Some("json") => {
7138 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7140 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
7141 }
7142 other => {
7143 fail(&format!(
7144 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
7145 other.unwrap_or("(none)")
7146 ));
7147 }
7148 }
7149 return;
7150 }
7151
7152 if p.is_dir() {
7153 let mut pdf_count = 0usize;
7160 let mut md_count = 0usize;
7161 let mut data_count = 0usize;
7162 let mut json_count = 0usize;
7163 let mut unhandled_exts: std::collections::BTreeSet<String> =
7164 std::collections::BTreeSet::new();
7165 if let Ok(entries) = std::fs::read_dir(&p) {
7166 for entry in entries.flatten() {
7167 let path = entry.path();
7168 if !path.is_file() {
7169 continue;
7170 }
7171 if let Some(name) = entry.file_name().to_str()
7172 && let Some(dot) = name.rfind('.')
7173 {
7174 let ext = name[dot + 1..].to_ascii_lowercase();
7175 match ext.as_str() {
7176 "pdf" => pdf_count += 1,
7177 "md" | "markdown" => md_count += 1,
7178 "csv" | "tsv" => data_count += 1,
7179 "json" => json_count += 1,
7180 other => {
7181 if !name.starts_with('.') {
7184 unhandled_exts.insert(other.to_string());
7185 }
7186 }
7187 }
7188 }
7189 }
7190 }
7191
7192 let dispatched_types = (pdf_count > 0) as usize
7193 + (md_count > 0) as usize
7194 + (data_count > 0) as usize
7195 + (json_count > 0) as usize;
7196
7197 if dispatched_types == 0 {
7198 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
7201 return;
7202 }
7203
7204 if dispatched_types > 1 {
7205 eprintln!(
7206 " vela ingest · folder has multiple handlable types; running each in sequence"
7207 );
7208 eprintln!(
7209 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
7210 );
7211 }
7212
7213 if pdf_count > 0 {
7220 cmd_scout(&p, frontier, backend, dry_run, json).await;
7221 }
7222 if md_count > 0 {
7223 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7224 }
7225 if data_count > 0 {
7226 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7227 }
7228 if json_count > 0 {
7229 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7232 if let Ok(entries) = std::fs::read_dir(&p) {
7233 for entry in entries.flatten() {
7234 let path = entry.path();
7235 if path.is_file()
7236 && path
7237 .extension()
7238 .and_then(|s| s.to_str())
7239 .map(|s| s.eq_ignore_ascii_case("json"))
7240 .unwrap_or(false)
7241 {
7242 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
7243 }
7244 }
7245 }
7246 }
7247
7248 if !unhandled_exts.is_empty() {
7249 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
7250 eprintln!(
7251 " vela ingest · skipped {} file extension(s) with no handler: {}",
7252 kinds.len(),
7253 kinds.join(", ")
7254 );
7255 }
7256 return;
7257 }
7258
7259 fail(&format!(
7260 "ingest: path '{path}' is neither a file nor a directory"
7261 ));
7262}
7263
7264#[allow(clippy::too_many_arguments)]
7265async fn cmd_compile_data(
7267 root: &Path,
7268 frontier: &Path,
7269 backend: Option<&str>,
7270 sample_rows: Option<usize>,
7271 dry_run: bool,
7272 json_out: bool,
7273) {
7274 match DATASETS_HANDLER.get() {
7275 Some(handler) => {
7276 handler(
7277 root.to_path_buf(),
7278 frontier.to_path_buf(),
7279 backend.map(String::from),
7280 sample_rows,
7281 dry_run,
7282 json_out,
7283 )
7284 .await;
7285 }
7286 None => {
7287 eprintln!(
7288 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
7289 style::err_prefix()
7290 );
7291 std::process::exit(1);
7292 }
7293 }
7294}
7295
7296async fn cmd_review_pending(
7299 frontier: &Path,
7300 backend: Option<&str>,
7301 max_proposals: Option<usize>,
7302 batch_size: usize,
7303 dry_run: bool,
7304 json_out: bool,
7305) {
7306 match REVIEWER_HANDLER.get() {
7307 Some(handler) => {
7308 handler(
7309 frontier.to_path_buf(),
7310 backend.map(String::from),
7311 max_proposals,
7312 batch_size,
7313 dry_run,
7314 json_out,
7315 )
7316 .await;
7317 }
7318 None => {
7319 eprintln!(
7320 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
7321 style::err_prefix()
7322 );
7323 std::process::exit(1);
7324 }
7325 }
7326}
7327
7328async fn cmd_find_tensions(
7331 frontier: &Path,
7332 backend: Option<&str>,
7333 max_findings: Option<usize>,
7334 dry_run: bool,
7335 json_out: bool,
7336) {
7337 match TENSIONS_HANDLER.get() {
7338 Some(handler) => {
7339 handler(
7340 frontier.to_path_buf(),
7341 backend.map(String::from),
7342 max_findings,
7343 dry_run,
7344 json_out,
7345 )
7346 .await;
7347 }
7348 None => {
7349 eprintln!(
7350 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
7351 style::err_prefix()
7352 );
7353 std::process::exit(1);
7354 }
7355 }
7356}
7357
7358async fn cmd_plan_experiments(
7361 frontier: &Path,
7362 backend: Option<&str>,
7363 max_findings: Option<usize>,
7364 dry_run: bool,
7365 json_out: bool,
7366) {
7367 match EXPERIMENTS_HANDLER.get() {
7368 Some(handler) => {
7369 handler(
7370 frontier.to_path_buf(),
7371 backend.map(String::from),
7372 max_findings,
7373 dry_run,
7374 json_out,
7375 )
7376 .await;
7377 }
7378 None => {
7379 eprintln!(
7380 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
7381 style::err_prefix()
7382 );
7383 std::process::exit(1);
7384 }
7385 }
7386}
7387
7388async fn cmd_compile_code(
7391 root: &Path,
7392 frontier: &Path,
7393 backend: Option<&str>,
7394 max_files: Option<usize>,
7395 dry_run: bool,
7396 json_out: bool,
7397) {
7398 match CODE_HANDLER.get() {
7399 Some(handler) => {
7400 handler(
7401 root.to_path_buf(),
7402 frontier.to_path_buf(),
7403 backend.map(String::from),
7404 max_files,
7405 dry_run,
7406 json_out,
7407 )
7408 .await;
7409 }
7410 None => {
7411 eprintln!(
7412 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
7413 style::err_prefix()
7414 );
7415 std::process::exit(1);
7416 }
7417 }
7418}
7419
7420async fn cmd_compile_notes(
7425 vault: &Path,
7426 frontier: &Path,
7427 backend: Option<&str>,
7428 max_files: Option<usize>,
7429 max_items_per_category: Option<usize>,
7430 dry_run: bool,
7431 json_out: bool,
7432) {
7433 match NOTES_HANDLER.get() {
7434 Some(handler) => {
7435 handler(
7436 vault.to_path_buf(),
7437 frontier.to_path_buf(),
7438 backend.map(String::from),
7439 max_files,
7440 max_items_per_category,
7441 dry_run,
7442 json_out,
7443 )
7444 .await;
7445 }
7446 None => {
7447 eprintln!(
7448 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
7449 style::err_prefix()
7450 );
7451 std::process::exit(1);
7452 }
7453 }
7454}
7455
7456async fn cmd_scout(
7463 folder: &Path,
7464 frontier: &Path,
7465 backend: Option<&str>,
7466 dry_run: bool,
7467 json_out: bool,
7468) {
7469 match SCOUT_HANDLER.get() {
7470 Some(handler) => {
7471 handler(
7472 folder.to_path_buf(),
7473 frontier.to_path_buf(),
7474 backend.map(String::from),
7475 dry_run,
7476 json_out,
7477 )
7478 .await;
7479 }
7480 None => {
7481 eprintln!(
7482 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
7483 style::err_prefix()
7484 );
7485 std::process::exit(1);
7486 }
7487 }
7488}
7489
7490#[allow(clippy::too_many_arguments)]
7491fn cmd_check(
7492 source: Option<&Path>,
7493 schema: bool,
7494 stats: bool,
7495 conformance_flag: bool,
7496 conformance_dir: &Path,
7497 all: bool,
7498 schema_only: bool,
7499 strict: bool,
7500 fix: bool,
7501 json_output: bool,
7502) {
7503 if json_output {
7504 let Some(src) = source else {
7505 fail("--json requires a frontier source");
7506 };
7507 let payload = check_json_payload(src, schema_only, strict);
7508 println!(
7509 "{}",
7510 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
7511 );
7512 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
7513 std::process::exit(1);
7514 }
7515 return;
7516 }
7517
7518 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
7519 if run_all || schema || schema_only {
7520 let Some(src) = source else {
7521 fail("check requires a frontier source");
7522 };
7523 validate::run(src);
7524 }
7525 if !schema_only && (run_all || stats) {
7526 let Some(src) = source else {
7527 fail("--stats requires a frontier source");
7528 };
7529 let frontier = load_frontier_or_fail(src);
7530 let report = lint::lint(&frontier, None, None);
7531 lint::print_report(&report);
7532 let replay_report = events::replay_report(&frontier);
7533 println!("event replay: {}", replay_report.status);
7534 if !replay_report.conflicts.is_empty() {
7535 for conflict in &replay_report.conflicts {
7536 println!(" - {conflict}");
7537 }
7538 }
7539 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
7540 && signature_report.signed > 0
7541 {
7542 println!(
7543 "Signatures: {} valid / {} invalid / {} unsigned",
7544 signature_report.valid, signature_report.invalid, signature_report.unsigned
7545 );
7546 }
7547 let signal_report = signals::analyze(&frontier, &[]);
7548 print_signal_summary(&signal_report, strict);
7549 if !replay_report.ok
7550 || (strict
7551 && (!signal_report.review_queue.is_empty()
7552 || signal_report.proof_readiness.status != "ready"))
7553 {
7554 std::process::exit(1);
7555 }
7556 }
7557 if run_all || conformance_flag {
7558 if conformance_flag || conformance_dir.is_dir() {
7568 conformance::run(conformance_dir);
7569 } else {
7570 eprintln!(
7571 " conformance: skipped ({} not present; pass --conformance-dir <path> to point at the source repo's tests/conformance)",
7572 conformance_dir.display()
7573 );
7574 }
7575 }
7576 let _ = fix;
7577}
7578
7579fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
7580 let report = validate::validate(src);
7581 let loaded = repo::load_from_path(src).ok();
7582 let (method_report, graph_report) = if schema_only {
7583 (None, None)
7584 } else if let Some(frontier) = loaded.as_ref() {
7585 (
7586 Some(lint::lint(frontier, None, None)),
7587 Some(lint::lint_frontier(frontier)),
7588 )
7589 } else {
7590 (None, None)
7591 };
7592 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7593 let mut diagnostics = Vec::new();
7594 diagnostics.extend(report.errors.iter().map(|e| {
7595 json!({
7596 "severity": "error",
7597 "rule_id": "schema",
7598 "finding_id": null,
7599 "file": &e.file,
7600 "field_path": null,
7601 "message": &e.error,
7602 "suggestion": schema_error_suggestion(&e.error),
7603 "fixable": schema_error_fix(&e.error),
7604 "normalize_action": schema_error_action(&e.error),
7605 })
7606 }));
7607 for (check_id, lint_report) in [
7608 ("methodology", method_report.as_ref()),
7609 ("frontier_graph", graph_report.as_ref()),
7610 ] {
7611 if let Some(lint_report) = lint_report {
7612 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7613 json!({
7614 "severity": d.severity.to_string(),
7615 "rule_id": &d.rule_id,
7616 "check": check_id,
7617 "finding_id": &d.finding_id,
7618 "field_path": null,
7619 "message": &d.message,
7620 "suggestion": &d.suggestion,
7621 "fixable": false,
7622 "normalize_action": null,
7623 })
7624 }));
7625 }
7626 }
7627 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7628 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7629 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7630 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7631 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7632 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7633 let replay_report = loaded.as_ref().map(events::replay_report);
7634 let state_integrity_report = if schema_only {
7635 loaded.as_ref().map(state_integrity::analyze)
7636 } else {
7637 state_integrity::analyze_path(src).ok()
7638 };
7639 if let Some(replay) = replay_report.as_ref()
7640 && !replay.ok
7641 {
7642 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7643 json!({
7644 "severity": "error",
7645 "rule_id": "event_replay",
7646 "check": "events",
7647 "finding_id": null,
7648 "field_path": null,
7649 "message": conflict,
7650 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
7651 "fixable": false,
7652 "normalize_action": null,
7653 })
7654 }));
7655 }
7656 let event_errors = replay_report
7657 .as_ref()
7658 .map_or(0, |replay| usize::from(!replay.ok));
7659 let state_integrity_errors = state_integrity_report
7660 .as_ref()
7661 .map_or(0, |report| report.structural_errors.len());
7662 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
7663 .as_ref()
7664 .map(|frontier| {
7665 (
7666 sources::source_summary(frontier),
7667 sources::evidence_summary(frontier),
7668 sources::condition_summary(frontier),
7669 proposals::summary(frontier),
7670 proposals::proof_state_json(&frontier.proof_state),
7671 )
7672 })
7673 .unwrap_or_else(|| {
7674 (
7675 sources::SourceRegistrySummary::default(),
7676 sources::EvidenceAtomSummary::default(),
7677 sources::ConditionSummary::default(),
7678 proposals::ProposalSummary::default(),
7679 Value::Null,
7680 )
7681 });
7682 let signature_report = loaded
7683 .as_ref()
7684 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
7685 if let Some(frontier) = loaded.as_ref()
7686 && !schema_only
7687 {
7688 let projection = sources::derive_projection(frontier);
7689 let existing_sources = frontier
7690 .sources
7691 .iter()
7692 .map(|source| source.id.as_str())
7693 .collect::<std::collections::BTreeSet<_>>();
7694 let existing_atoms = frontier
7695 .evidence_atoms
7696 .iter()
7697 .map(|atom| atom.id.as_str())
7698 .collect::<std::collections::BTreeSet<_>>();
7699 let existing_conditions = frontier
7700 .condition_records
7701 .iter()
7702 .map(|record| record.id.as_str())
7703 .collect::<std::collections::BTreeSet<_>>();
7704 for source in projection
7705 .sources
7706 .iter()
7707 .filter(|source| !existing_sources.contains(source.id.as_str()))
7708 {
7709 diagnostics.push(json!({
7710 "severity": "warning",
7711 "rule_id": "missing_source_record",
7712 "check": "source_registry",
7713 "finding_id": source.finding_ids.first(),
7714 "field_path": "sources",
7715 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
7716 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
7717 "fixable": true,
7718 "normalize_action": "materialize_source_record",
7719 }));
7720 }
7721 for atom in projection
7722 .evidence_atoms
7723 .iter()
7724 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
7725 {
7726 diagnostics.push(json!({
7727 "severity": "warning",
7728 "rule_id": "missing_evidence_atom",
7729 "check": "evidence_atoms",
7730 "finding_id": atom.finding_id,
7731 "field_path": "evidence_atoms",
7732 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
7733 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
7734 "fixable": true,
7735 "normalize_action": "materialize_evidence_atom",
7736 }));
7737 }
7738 for atom in projection
7739 .evidence_atoms
7740 .iter()
7741 .filter(|atom| atom.locator.is_none())
7742 {
7743 diagnostics.push(json!({
7744 "severity": "warning",
7745 "rule_id": "missing_evidence_locator",
7746 "check": "evidence_atoms",
7747 "finding_id": atom.finding_id,
7748 "field_path": "evidence_atoms[].locator",
7749 "message": format!("Evidence atom {} has no source locator.", atom.id),
7750 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
7751 "fixable": false,
7752 "normalize_action": null,
7753 }));
7754 }
7755 for condition in projection
7756 .condition_records
7757 .iter()
7758 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
7759 {
7760 diagnostics.push(json!({
7761 "severity": "warning",
7762 "rule_id": "condition_record_missing",
7763 "check": "conditions",
7764 "finding_id": condition.finding_id,
7765 "field_path": "condition_records",
7766 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
7767 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
7768 "fixable": true,
7769 "normalize_action": "materialize_condition_record",
7770 }));
7771 }
7772 for proposal in frontier.proposals.iter().filter(|proposal| {
7773 matches!(proposal.status.as_str(), "accepted" | "applied")
7774 && proposal
7775 .reviewed_by
7776 .as_deref()
7777 .is_none_or(proposals::is_placeholder_reviewer)
7778 }) {
7779 diagnostics.push(json!({
7780 "severity": "error",
7781 "rule_id": "reviewer_identity_missing",
7782 "check": "proposals",
7783 "finding_id": proposal.target.id,
7784 "field_path": "proposals[].reviewed_by",
7785 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
7786 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
7787 "fixable": false,
7788 "normalize_action": null,
7789 }));
7790 }
7791 }
7792 let signal_report = loaded
7793 .as_ref()
7794 .map(|frontier| signals::analyze(frontier, &diagnostics))
7795 .unwrap_or_else(empty_signal_report);
7796 let errors =
7797 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
7798 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
7799 let infos = method_infos + graph_infos;
7800 let strict_blockers = signal_report
7801 .signals
7802 .iter()
7803 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
7804 .count();
7805 let fixable = diagnostics
7806 .iter()
7807 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
7808 .count();
7809 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
7810
7811 json!({
7812 "ok": ok,
7813 "command": "check",
7814 "schema_version": project::VELA_SCHEMA_VERSION,
7815 "source": {
7816 "path": src.display().to_string(),
7817 "hash": format!("sha256:{source_hash}"),
7818 },
7819 "summary": {
7820 "status": if ok { "pass" } else { "fail" },
7821 "checked_findings": report.total_files,
7822 "valid_findings": report.valid,
7823 "invalid_findings": report.invalid,
7824 "errors": errors,
7825 "warnings": warnings,
7826 "info": infos,
7827 "fixable": fixable,
7828 "strict": strict,
7829 "schema_only": schema_only,
7830 },
7831 "checks": [
7832 {
7833 "id": "schema",
7834 "status": if report.invalid == 0 { "pass" } else { "fail" },
7835 "checked": report.total_files,
7836 "failed": report.invalid,
7837 "errors": report.errors.iter().map(|e| json!({
7838 "file": e.file,
7839 "message": e.error,
7840 })).collect::<Vec<_>>(),
7841 },
7842 {
7843 "id": "methodology",
7844 "status": if method_errors == 0 { "pass" } else { "fail" },
7845 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
7846 "failed": method_errors,
7847 "warnings": method_warnings,
7848 "info": method_infos,
7849 "skipped": schema_only,
7850 },
7851 {
7852 "id": "frontier_graph",
7853 "status": if graph_errors == 0 { "pass" } else { "fail" },
7854 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
7855 "failed": graph_errors,
7856 "warnings": graph_warnings,
7857 "info": graph_infos,
7858 "skipped": schema_only,
7859 },
7860 {
7861 "id": "signals",
7862 "status": if strict_blockers == 0 { "pass" } else { "fail" },
7863 "checked": signal_report.signals.len(),
7864 "failed": strict_blockers,
7865 "warnings": signal_report.proof_readiness.warnings,
7866 "skipped": loaded.is_none(),
7867 "blockers": signal_report.signals.iter()
7868 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
7869 .map(|s| json!({
7870 "id": s.id,
7871 "kind": s.kind,
7872 "severity": s.severity,
7873 "reason": s.reason,
7874 }))
7875 .collect::<Vec<_>>(),
7876 },
7877 {
7878 "id": "events",
7879 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
7880 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
7881 "failed": event_errors,
7882 "skipped": schema_only || loaded.is_none(),
7883 },
7884 {
7885 "id": "state_integrity",
7886 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
7887 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
7888 "failed": state_integrity_errors,
7889 "skipped": schema_only || loaded.is_none(),
7890 }
7891 ],
7892 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
7893 "replay": replay_report,
7894 "state_integrity": state_integrity_report,
7895 "source_registry": source_registry,
7896 "evidence_atoms": evidence_atoms,
7897 "conditions": conditions,
7898 "proposals": proposal_summary,
7899 "proof_state": proof_state,
7900 "signatures": signature_report,
7901 "diagnostics": diagnostics,
7902 "signals": signal_report.signals,
7903 "review_queue": signal_report.review_queue,
7904 "proof_readiness": signal_report.proof_readiness,
7905 "repair_plan": build_repair_plan(&diagnostics),
7906 })
7907}
7908
7909#[allow(clippy::too_many_arguments)]
7910fn cmd_normalize(
7911 source: &Path,
7912 out: Option<&Path>,
7913 write: bool,
7914 dry_run: bool,
7915 rewrite_ids: bool,
7916 id_map: Option<&Path>,
7917 resync_provenance: bool,
7918 json_output: bool,
7919) {
7920 if write && out.is_some() {
7921 fail("Use either --write or --out, not both.");
7922 }
7923 if dry_run && (write || out.is_some()) {
7924 fail("--dry-run cannot be combined with --write or --out.");
7925 }
7926 if id_map.is_some() && !rewrite_ids {
7927 fail("--id-map requires --rewrite-ids.");
7928 }
7929
7930 let detected = repo::detect(source).unwrap_or_else(|e| {
7931 eprintln!("{e}");
7932 std::process::exit(1);
7933 });
7934 if matches!(detected, repo::VelaSource::PacketDir(_)) {
7935 fail(
7936 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
7937 );
7938 }
7939 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
7940 let has_substantive_events = frontier
7945 .events
7946 .iter()
7947 .any(|event| event.kind != "frontier.created");
7948 if has_substantive_events && (write || out.is_some()) {
7949 fail(
7950 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
7951 );
7952 }
7953 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
7954 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7955 let (entity_type_fixes, entity_name_fixes) =
7956 normalize::normalize_findings(&mut frontier.findings);
7957 let confidence_updates =
7958 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
7959 let provenance_resync_count = if resync_provenance {
7963 sources::resync_provenance_from_sources(&mut frontier)
7964 } else {
7965 0
7966 };
7967 let before_source_count = frontier.sources.len();
7968 let before_evidence_atom_count = frontier.evidence_atoms.len();
7969 let before_condition_record_count = frontier.condition_records.len();
7970
7971 let mut id_rewrites = Vec::new();
7972 if rewrite_ids {
7973 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
7974 for finding in &frontier.findings {
7975 let expected =
7976 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
7977 if expected != finding.id {
7978 id_map_values.insert(finding.id.clone(), expected);
7979 }
7980 }
7981 let new_ids = id_map_values
7982 .values()
7983 .map(String::as_str)
7984 .collect::<std::collections::HashSet<_>>();
7985 if new_ids.len() != id_map_values.len() {
7986 fail("Refusing to rewrite IDs because two findings map to the same content address.");
7987 }
7988 for finding in &mut frontier.findings {
7989 if let Some(new_id) = id_map_values.get(&finding.id) {
7990 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
7991 finding.previous_version = Some(finding.id.clone());
7992 finding.id = new_id.clone();
7993 }
7994 }
7995 for finding in &mut frontier.findings {
7996 for link in &mut finding.links {
7997 if let Some(new_target) = id_map_values.get(&link.target) {
7998 link.target = new_target.clone();
7999 }
8000 }
8001 }
8002 if let Some(path) = id_map {
8003 std::fs::write(
8004 path,
8005 serde_json::to_string_pretty(&id_map_values)
8006 .expect("failed to serialize normalize id map"),
8007 )
8008 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
8009 }
8010 }
8011
8012 sources::materialize_project(&mut frontier);
8013 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
8014 let evidence_atoms_materialized = frontier
8015 .evidence_atoms
8016 .len()
8017 .saturating_sub(before_evidence_atom_count);
8018 let condition_records_materialized = frontier
8019 .condition_records
8020 .len()
8021 .saturating_sub(before_condition_record_count);
8022 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8023 let id_rewrite_count = id_rewrites.len();
8024 let wrote_to = if write {
8025 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
8026 Some(source.display().to_string())
8027 } else if let Some(out_path) = out {
8028 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
8029 Some(out_path.display().to_string())
8030 } else {
8031 None
8032 };
8033 let wrote = wrote_to.is_some();
8034 let planned_changes = entity_type_fixes
8035 + entity_name_fixes
8036 + confidence_updates
8037 + id_rewrite_count
8038 + source_records_materialized
8039 + evidence_atoms_materialized
8040 + condition_records_materialized
8041 + provenance_resync_count;
8042 let payload = json!({
8043 "ok": true,
8044 "command": "normalize",
8045 "schema_version": project::VELA_SCHEMA_VERSION,
8046 "source": {
8047 "path": source.display().to_string(),
8048 "hash": format!("sha256:{source_hash}"),
8049 },
8050 "dry_run": wrote_to.is_none(),
8051 "wrote_to": wrote_to,
8052 "summary": {
8053 "planned": planned_changes,
8054 "safe": planned_changes,
8055 "unsafe": 0,
8056 "applied": if wrote { planned_changes } else { 0 },
8057 },
8058 "changes": {
8059 "entity_type_fixes": entity_type_fixes,
8060 "entity_name_fixes": entity_name_fixes,
8061 "confidence_updates": confidence_updates,
8062 "id_rewrites": id_rewrite_count,
8063 "source_records_materialized": source_records_materialized,
8064 "evidence_atoms_materialized": evidence_atoms_materialized,
8065 "condition_records_materialized": condition_records_materialized,
8066 "provenance_resyncs": provenance_resync_count,
8067 "stats_changed": before_stats != after_stats,
8068 },
8069 "id_rewrites": id_rewrites,
8070 "repair_plan": if wrote { Vec::<Value>::new() } else {
8071 vec![json!({
8072 "action": "apply_normalization",
8073 "command": "vela normalize <frontier> --out frontier.normalized.json"
8074 })]
8075 },
8076 });
8077 if json_output {
8078 println!(
8079 "{}",
8080 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
8081 );
8082 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
8083 println!("{} normalized frontier written to {path}", style::ok("ok"));
8084 println!(
8085 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8086 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8087 );
8088 } else {
8089 println!("normalize dry run for {}", source.display());
8090 println!(
8091 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8092 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8093 );
8094 }
8095}
8096
8097fn cmd_proof(
8098 frontier: &Path,
8099 out: &Path,
8100 template: &str,
8101 gold: Option<&Path>,
8102 record_proof_state: bool,
8103 json_output: bool,
8104) {
8105 if template != "bbb-alzheimer" {
8106 fail(&format!(
8107 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
8108 ));
8109 }
8110 let mut loaded = load_frontier_or_fail(frontier);
8111 let source_hash = hash_path_or_fail(frontier);
8112 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
8113 .unwrap_or_else(|e| fail(&e));
8114 let benchmark_summary = gold.map(|gold_path| {
8115 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
8116 fail(&format!(
8117 "Failed to run proof benchmark '{}': {e}",
8118 gold_path.display()
8119 ))
8120 });
8121 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
8122 fail(&format!("Failed to write benchmark summary: {e}"));
8123 });
8124 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
8125 fail(&format!(
8126 "Proof benchmark failed for {}",
8127 gold_path.display()
8128 ));
8129 }
8130 summary
8131 });
8132 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
8133 fail(&format!("Proof packet validation failed: {e}"));
8134 });
8135 proposals::record_proof_export(
8136 &mut loaded,
8137 proposals::ProofPacketRecord {
8138 generated_at: export_record.generated_at.clone(),
8139 snapshot_hash: export_record.snapshot_hash.clone(),
8140 event_log_hash: export_record.event_log_hash.clone(),
8141 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
8142 },
8143 );
8144 project::recompute_stats(&mut loaded);
8145 if record_proof_state {
8146 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
8147 }
8148 let signal_report = signals::analyze(&loaded, &[]);
8149 if json_output {
8150 let payload = json!({
8151 "ok": true,
8152 "command": "proof",
8153 "schema_version": project::VELA_SCHEMA_VERSION,
8154 "recorded_proof_state": record_proof_state,
8155 "frontier": {
8156 "name": &loaded.project.name,
8157 "source": frontier.display().to_string(),
8158 "hash": format!("sha256:{source_hash}"),
8159 },
8160 "template": template,
8161 "gold": gold.map(|p| p.display().to_string()),
8162 "benchmark": benchmark_summary,
8163 "output": out.display().to_string(),
8164 "packet": {
8165 "manifest_path": out.join("manifest.json").display().to_string(),
8166 },
8167 "validation": {
8168 "status": "ok",
8169 "summary": validation_summary,
8170 },
8171 "proposals": proposals::summary(&loaded),
8172 "proof_state": loaded.proof_state,
8173 "signals": signal_report.signals,
8174 "review_queue": signal_report.review_queue,
8175 "proof_readiness": signal_report.proof_readiness,
8176 "trace_path": out.join("proof-trace.json").display().to_string(),
8177 });
8178 println!(
8179 "{}",
8180 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
8181 );
8182 } else {
8183 println!("vela proof");
8184 println!(" source: {}", frontier.display());
8185 println!(" template: {template}");
8186 println!(" output: {}", out.display());
8187 println!(" trace: {}", out.join("proof-trace.json").display());
8188 println!(
8189 " proof state: {}",
8190 if record_proof_state {
8191 "recorded"
8192 } else {
8193 "not recorded"
8194 }
8195 );
8196 println!();
8197 println!("{validation_summary}");
8198 }
8199}
8200
8201fn cmd_status(path: &Path, json: bool) {
8205 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8206
8207 let mut pending_total = 0usize;
8209 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
8210 std::collections::BTreeMap::new();
8211 for p in &project.proposals {
8212 if p.status == "pending_review" {
8213 pending_total += 1;
8214 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8215 }
8216 }
8217
8218 let audit = crate::causal_reasoning::audit_frontier(&project);
8220 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
8221
8222 let mut last_sync: Option<&crate::events::StateEvent> = None;
8224 let mut last_conflict: Option<&crate::events::StateEvent> = None;
8225 let mut total_conflicts = 0usize;
8226 for e in &project.events {
8227 match e.kind.as_str() {
8228 "frontier.synced_with_peer" => {
8229 if last_sync
8230 .map(|prev| e.timestamp > prev.timestamp)
8231 .unwrap_or(true)
8232 {
8233 last_sync = Some(e);
8234 }
8235 }
8236 "frontier.conflict_detected" => {
8237 total_conflicts += 1;
8238 if last_conflict
8239 .map(|prev| e.timestamp > prev.timestamp)
8240 .unwrap_or(true)
8241 {
8242 last_conflict = Some(e);
8243 }
8244 }
8245 _ => {}
8246 }
8247 }
8248
8249 let mut targets_with_success = std::collections::HashSet::new();
8251 let mut failed_replications = 0usize;
8252 for r in &project.replications {
8253 if r.outcome == "replicated" {
8254 targets_with_success.insert(r.target_finding.clone());
8255 } else if r.outcome == "failed" {
8256 failed_replications += 1;
8257 }
8258 }
8259
8260 if json {
8261 println!(
8262 "{}",
8263 serde_json::to_string_pretty(&json!({
8264 "ok": true,
8265 "command": "status",
8266 "frontier": frontier_label(&project),
8267 "vfr_id": project.frontier_id(),
8268 "findings": project.findings.len(),
8269 "events": project.events.len(),
8270 "actors": project.actors.len(),
8271 "peers": project.peers.len(),
8272 "inbox": {
8273 "pending_total": pending_total,
8274 "pending_by_kind": pending_by_kind,
8275 },
8276 "causal_audit": {
8277 "identified": audit_summary.identified,
8278 "conditional": audit_summary.conditional,
8279 "underidentified": audit_summary.underidentified,
8280 "underdetermined": audit_summary.underdetermined,
8281 },
8282 "replications": {
8283 "total": project.replications.len(),
8284 "findings_with_success": targets_with_success.len(),
8285 "failed": failed_replications,
8286 },
8287 "federation": {
8288 "peers": project.peers.len(),
8289 "last_sync": last_sync.map(|e| e.timestamp.clone()),
8290 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
8291 "total_conflicts": total_conflicts,
8292 },
8293 }))
8294 .expect("serialize status")
8295 );
8296 return;
8297 }
8298
8299 println!();
8300 println!(
8301 " {}",
8302 format!("VELA · STATUS · {}", path.display())
8303 .to_uppercase()
8304 .dimmed()
8305 );
8306 println!(" {}", style::tick_row(60));
8307 println!();
8308 println!(" frontier: {}", frontier_label(&project));
8309 println!(" vfr_id: {}", project.frontier_id());
8310 println!(
8311 " findings: {} events: {} peers: {} actors: {}",
8312 project.findings.len(),
8313 project.events.len(),
8314 project.peers.len(),
8315 project.actors.len(),
8316 );
8317 println!();
8318 if pending_total > 0 {
8319 println!(
8320 " {} {pending_total} pending proposals",
8321 style::warn("inbox")
8322 );
8323 for (k, n) in &pending_by_kind {
8324 println!(" · {n:>3} {k}");
8325 }
8326 } else {
8327 println!(" {} inbox clean", style::ok("ok"));
8328 }
8329 println!();
8330 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
8331 let chip = if audit_summary.underidentified > 0 {
8332 style::lost("audit")
8333 } else {
8334 style::warn("audit")
8335 };
8336 println!(
8337 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
8338 chip,
8339 audit_summary.identified,
8340 audit_summary.conditional,
8341 audit_summary.underidentified,
8342 audit_summary.underdetermined,
8343 );
8344 if audit_summary.underidentified > 0 {
8345 println!(
8346 " next: vela causal audit {} --problems-only",
8347 path.display()
8348 );
8349 }
8350 } else if audit_summary.underdetermined == 0 {
8351 println!(
8352 " {} causal audit: all {} identified",
8353 style::ok("ok"),
8354 audit_summary.identified
8355 );
8356 } else {
8357 println!(
8358 " {} causal audit: {} identified, {} ungraded",
8359 style::warn("audit"),
8360 audit_summary.identified,
8361 audit_summary.underdetermined,
8362 );
8363 }
8364 println!();
8365 if !project.replications.is_empty() {
8366 println!(
8367 " {} {} records · {} findings replicated · {} failed",
8368 style::ok("replications"),
8369 project.replications.len(),
8370 targets_with_success.len(),
8371 failed_replications,
8372 );
8373 }
8374 if project.peers.is_empty() {
8375 println!(
8376 " {} no federation peers registered",
8377 style::warn("federation")
8378 );
8379 } else {
8380 let last = last_sync
8381 .map(|e| fmt_timestamp(&e.timestamp))
8382 .unwrap_or_else(|| "never".to_string());
8383 let chip = if total_conflicts > 0 {
8384 style::warn("federation")
8385 } else {
8386 style::ok("federation")
8387 };
8388 println!(
8389 " {} {} peer(s) · last sync {} · {} conflict events",
8390 chip,
8391 project.peers.len(),
8392 last,
8393 total_conflicts,
8394 );
8395 }
8396 println!();
8397}
8398
8399fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
8401 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8402 let mut events: Vec<&crate::events::StateEvent> = project
8403 .events
8404 .iter()
8405 .filter(|e| match kind_filter {
8406 Some(k) => e.kind.contains(k),
8407 None => true,
8408 })
8409 .collect();
8410 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8411 events.truncate(limit);
8412
8413 if json {
8414 let payload: Vec<_> = events
8415 .iter()
8416 .map(|e| {
8417 json!({
8418 "id": e.id,
8419 "kind": e.kind,
8420 "actor": e.actor.id,
8421 "target": &e.target.id,
8422 "target_type": &e.target.r#type,
8423 "timestamp": e.timestamp,
8424 "reason": e.reason,
8425 })
8426 })
8427 .collect();
8428 println!(
8429 "{}",
8430 serde_json::to_string_pretty(&json!({
8431 "ok": true,
8432 "command": "log",
8433 "events": payload,
8434 }))
8435 .expect("serialize log")
8436 );
8437 return;
8438 }
8439
8440 println!();
8441 println!(
8442 " {}",
8443 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
8444 .to_uppercase()
8445 .dimmed()
8446 );
8447 println!(" {}", style::tick_row(60));
8448 if events.is_empty() {
8449 println!(" (no events)");
8450 return;
8451 }
8452 for e in &events {
8453 let when = fmt_timestamp(&e.timestamp);
8454 let target_short = if e.target.id.len() > 22 {
8455 format!("{}…", &e.target.id[..21])
8456 } else {
8457 e.target.id.clone()
8458 };
8459 let reason: String = e.reason.chars().take(70).collect();
8460 println!(
8461 " {:<19} {:<32} {:<24} {}",
8462 when, e.kind, target_short, reason
8463 );
8464 }
8465 println!();
8466}
8467
8468fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
8470 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8471
8472 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
8475 std::collections::HashMap::new();
8476 for p in &project.proposals {
8477 if p.kind != "finding.note" {
8478 continue;
8479 }
8480 if p.actor.id != "agent:reviewer-agent" {
8481 continue;
8482 }
8483 let reason = &p.reason;
8484 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
8485 continue;
8486 };
8487 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
8488 let extract = |k: &str| -> f64 {
8489 let pat = format!("{k} ");
8490 text.find(&pat)
8491 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
8492 .and_then(|t| t.parse::<f64>().ok())
8493 .unwrap_or(0.0)
8494 };
8495 score_map.insert(
8496 target.to_string(),
8497 (
8498 extract("plausibility"),
8499 extract("evidence"),
8500 extract("scope"),
8501 extract("duplicate-risk"),
8502 ),
8503 );
8504 }
8505
8506 let mut pending: Vec<&crate::proposals::StateProposal> = project
8507 .proposals
8508 .iter()
8509 .filter(|p| {
8510 p.status == "pending_review"
8511 && match kind_filter {
8512 Some(k) => p.kind.contains(k),
8513 None => true,
8514 }
8515 })
8516 .collect();
8517 pending.sort_by(|a, b| {
8519 let sa = score_map
8520 .get(&a.id)
8521 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8522 let sb = score_map
8523 .get(&b.id)
8524 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8525 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
8526 });
8527 pending.truncate(limit);
8528
8529 if json {
8530 let payload: Vec<_> = pending
8531 .iter()
8532 .map(|p| {
8533 let assertion_text = p
8534 .payload
8535 .get("finding")
8536 .and_then(|f| f.get("assertion"))
8537 .and_then(|a| a.get("text"))
8538 .and_then(|t| t.as_str());
8539 let assertion_type = p
8540 .payload
8541 .get("finding")
8542 .and_then(|f| f.get("assertion"))
8543 .and_then(|a| a.get("type"))
8544 .and_then(|t| t.as_str());
8545 let composite = score_map
8546 .get(&p.id)
8547 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8548 json!({
8549 "proposal_id": p.id,
8550 "kind": p.kind,
8551 "actor": p.actor,
8552 "reason": p.reason,
8553 "assertion_text": assertion_text,
8554 "assertion_type": assertion_type,
8555 "reviewer_composite": composite,
8556 })
8557 })
8558 .collect();
8559 println!(
8560 "{}",
8561 serde_json::to_string_pretty(&json!({
8562 "ok": true,
8563 "command": "inbox",
8564 "shown": pending.len(),
8565 "proposals": payload,
8566 }))
8567 .expect("serialize inbox")
8568 );
8569 return;
8570 }
8571
8572 println!();
8573 println!(
8574 " {}",
8575 format!(
8576 "VELA · INBOX · {} ({} pending shown)",
8577 path.display(),
8578 pending.len()
8579 )
8580 .to_uppercase()
8581 .dimmed()
8582 );
8583 println!(" {}", style::tick_row(60));
8584 if pending.is_empty() {
8585 println!(" (inbox clean)");
8586 return;
8587 }
8588 for p in &pending {
8589 let assertion_text = p
8590 .payload
8591 .get("finding")
8592 .and_then(|f| f.get("assertion"))
8593 .and_then(|a| a.get("text"))
8594 .and_then(|t| t.as_str())
8595 .unwrap_or("");
8596 let assertion_type = p
8597 .payload
8598 .get("finding")
8599 .and_then(|f| f.get("assertion"))
8600 .and_then(|a| a.get("type"))
8601 .and_then(|t| t.as_str())
8602 .unwrap_or("");
8603 let composite = score_map
8604 .get(&p.id)
8605 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8606 let score_str = composite
8607 .map(|c| format!("[{:.2}]", c))
8608 .unwrap_or_else(|| "[—] ".to_string());
8609 let kind_short = if p.kind.len() > 12 {
8610 format!("{}…", &p.kind[..11])
8611 } else {
8612 p.kind.clone()
8613 };
8614 let summary: String = if !assertion_text.is_empty() {
8615 assertion_text.chars().take(80).collect()
8616 } else {
8617 p.reason.chars().take(80).collect()
8618 };
8619 println!(
8620 " {} {} {:<13} {:<18} {}",
8621 score_str, p.id, kind_short, assertion_type, summary
8622 );
8623 }
8624 println!();
8625}
8626
8627fn cmd_ask(path: &Path, question: &str, json: bool) {
8632 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8633
8634 if question.trim().is_empty() {
8635 use std::io::{BufRead, Write};
8637 println!();
8638 println!(
8639 " {}",
8640 format!("VELA · ASK · {}", path.display())
8641 .to_uppercase()
8642 .dimmed()
8643 );
8644 println!(" {}", style::tick_row(60));
8645 println!(" Ask a question. Type `exit` to quit.");
8646 println!(" Examples:");
8647 println!(" · what's pending?");
8648 println!(" · what's underidentified?");
8649 println!(" · how many findings?");
8650 println!(" · what changed recently?");
8651 println!(" · who has what calibration?");
8652 println!();
8653 let stdin = std::io::stdin();
8654 let mut stdout = std::io::stdout();
8655 loop {
8656 print!(" ask> ");
8657 stdout.flush().ok();
8658 let mut line = String::new();
8659 if stdin.lock().read_line(&mut line).is_err() {
8660 break;
8661 }
8662 let q = line.trim();
8663 if q.is_empty() {
8664 continue;
8665 }
8666 if matches!(q, "exit" | "quit" | "q") {
8667 break;
8668 }
8669 answer(&project, q, false);
8670 }
8671 return;
8672 }
8673
8674 answer(&project, question, json);
8675}
8676
8677fn answer(project: &crate::project::Project, q: &str, json: bool) {
8678 let lower = q.to_lowercase();
8679
8680 if lower.contains("pending")
8682 || lower.contains("inbox")
8683 || lower.contains("queue")
8684 || lower.contains("to review")
8685 {
8686 let pending: Vec<&crate::proposals::StateProposal> = project
8687 .proposals
8688 .iter()
8689 .filter(|p| p.status == "pending_review")
8690 .collect();
8691 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
8692 for p in &pending {
8693 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8694 }
8695 if json {
8696 println!(
8697 "{}",
8698 serde_json::to_string_pretty(&json!({
8699 "answer": "pending",
8700 "total": pending.len(),
8701 "by_kind": by_kind,
8702 }))
8703 .unwrap()
8704 );
8705 } else {
8706 println!(" {} pending proposals.", pending.len());
8707 for (k, n) in &by_kind {
8708 println!(" · {n:>3} {k}");
8709 }
8710 if pending.is_empty() {
8711 println!(" Inbox is clean.");
8712 } else {
8713 println!(" Run `vela inbox <frontier>` to triage.");
8714 }
8715 }
8716 return;
8717 }
8718
8719 if lower.contains("underident")
8721 || lower.contains("audit")
8722 || lower.contains("identif")
8723 || lower.contains("causal")
8724 {
8725 let entries = crate::causal_reasoning::audit_frontier(project);
8726 let summary = crate::causal_reasoning::summarize_audit(&entries);
8727 if json {
8728 println!(
8729 "{}",
8730 serde_json::to_string_pretty(&json!({
8731 "answer": "audit",
8732 "summary": {
8733 "identified": summary.identified,
8734 "conditional": summary.conditional,
8735 "underidentified": summary.underidentified,
8736 "underdetermined": summary.underdetermined,
8737 },
8738 }))
8739 .unwrap()
8740 );
8741 } else {
8742 println!(
8743 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
8744 summary.identified,
8745 summary.conditional,
8746 summary.underidentified,
8747 summary.underdetermined,
8748 );
8749 if summary.underidentified > 0 {
8750 println!(
8751 " The {} underidentified findings are concrete review items:",
8752 summary.underidentified
8753 );
8754 for e in entries
8755 .iter()
8756 .filter(|e| {
8757 matches!(
8758 e.verdict,
8759 crate::causal_reasoning::Identifiability::Underidentified
8760 )
8761 })
8762 .take(8)
8763 {
8764 let txt: String = e.assertion_text.chars().take(70).collect();
8765 println!(" · {} {}", e.finding_id, txt);
8766 }
8767 }
8768 }
8769 return;
8770 }
8771
8772 if lower.contains("recent")
8774 || lower.contains("changed")
8775 || lower.contains("latest")
8776 || lower.contains("happen")
8777 {
8778 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
8779 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8780 events.truncate(8);
8781 if json {
8782 println!(
8783 "{}",
8784 serde_json::to_string_pretty(&json!({
8785 "answer": "recent_events",
8786 "events": events.iter().map(|e| json!({
8787 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
8788 "actor": e.actor.id, "target": e.target.id,
8789 })).collect::<Vec<_>>(),
8790 }))
8791 .unwrap()
8792 );
8793 } else {
8794 println!(" Most recent {} events:", events.len());
8795 for e in &events {
8796 let when = fmt_timestamp(&e.timestamp);
8797 println!(" · {when} {:<28} {}", e.kind, e.target.id);
8798 }
8799 }
8800 return;
8801 }
8802
8803 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
8805 let n = project.findings.len();
8806 let evs = project.events.len();
8807 let peers = project.peers.len();
8808 let actors = project.actors.len();
8809 if json {
8810 println!(
8811 "{}",
8812 serde_json::to_string_pretty(&json!({
8813 "answer": "counts",
8814 "findings": n,
8815 "events": evs,
8816 "peers": peers,
8817 "actors": actors,
8818 "replications": project.replications.len(),
8819 "predictions": project.predictions.len(),
8820 }))
8821 .unwrap()
8822 );
8823 } else {
8824 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
8825 println!(
8826 " {} replications · {} predictions · {} datasets · {} code artifacts.",
8827 project.replications.len(),
8828 project.predictions.len(),
8829 project.datasets.len(),
8830 project.code_artifacts.len(),
8831 );
8832 }
8833 return;
8834 }
8835
8836 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
8838 let records =
8839 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
8840 if json {
8841 println!("{}", serde_json::to_string_pretty(&records).unwrap());
8842 } else if records.is_empty() {
8843 println!(" No predictions yet. The calibration ledger is empty.");
8844 } else {
8845 println!(" Calibration over {} actor(s):", records.len());
8846 for r in &records {
8847 let brier = r
8848 .brier_score
8849 .map(|b| format!("{:.3}", b))
8850 .unwrap_or_else(|| "—".into());
8851 println!(
8852 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
8853 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
8854 );
8855 }
8856 }
8857 return;
8858 }
8859
8860 if lower.contains("peer")
8862 || lower.contains("federat")
8863 || lower.contains("sync")
8864 || lower.contains("conflict")
8865 {
8866 let mut total_conflicts = 0usize;
8867 for e in &project.events {
8868 if e.kind == "frontier.conflict_detected" {
8869 total_conflicts += 1;
8870 }
8871 }
8872 if json {
8873 println!(
8874 "{}",
8875 serde_json::to_string_pretty(&json!({
8876 "answer": "federation",
8877 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
8878 "total_conflicts": total_conflicts,
8879 }))
8880 .unwrap()
8881 );
8882 } else {
8883 println!(" {} peer(s) registered:", project.peers.len());
8884 for p in &project.peers {
8885 println!(" · {:<24} {}", p.id, p.url);
8886 }
8887 println!(" {total_conflicts} conflict events on the canonical log.");
8888 }
8889 return;
8890 }
8891
8892 if json {
8894 println!(
8895 "{}",
8896 serde_json::to_string_pretty(&json!({
8897 "answer": "unknown_question",
8898 "question": q,
8899 "hint": "Try: pending, audit, recent, how many, calibration, peers."
8900 }))
8901 .unwrap()
8902 );
8903 } else {
8904 println!(" Don't know how to route that question yet.");
8905 println!(" Try: pending · audit · recent · how many · calibration · peers");
8906 }
8907}
8908
8909fn frontier_label(p: &crate::project::Project) -> String {
8910 if p.project.name.trim().is_empty() {
8911 "(unnamed)".to_string()
8912 } else {
8913 p.project.name.clone()
8914 }
8915}
8916
8917fn fmt_timestamp(ts: &str) -> String {
8918 chrono::DateTime::parse_from_rfc3339(ts)
8921 .map(|dt| dt.format("%m-%d %H:%M").to_string())
8922 .unwrap_or_else(|_| ts.chars().take(16).collect())
8923}
8924
8925fn cmd_stats(path: &Path) {
8926 let frontier = load_frontier_or_fail(path);
8927 let s = &frontier.stats;
8928 println!();
8929 println!(" {}", "FRONTIER · V0.36.0".dimmed());
8930 println!(" {}", frontier.project.name.bold());
8931 println!(" {}", style::tick_row(60));
8932 println!(" id: {}", frontier.frontier_id());
8933 println!(" compiled: {}", frontier.project.compiled_at);
8934 println!(" papers: {}", frontier.project.papers_processed);
8935 println!(" findings: {}", s.findings);
8936 println!(" links: {}", s.links);
8937 println!(" replicated: {}", s.replicated);
8938 println!(" avg confidence: {}", s.avg_confidence);
8939 println!(" gaps: {}", s.gaps);
8940 println!(" contested: {}", s.contested);
8941 println!(" reviewed: {}", s.human_reviewed);
8942 println!(" proposals: {}", s.proposal_count);
8943 println!(
8944 " recorded proof: {}",
8945 frontier.proof_state.latest_packet.status
8946 );
8947 if frontier.proof_state.latest_packet.status != "never_exported" {
8948 println!(
8949 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
8950 );
8951 }
8952 if !s.categories.is_empty() {
8953 println!();
8954 println!(" {}", "categories".dimmed());
8955 let mut categories = s.categories.iter().collect::<Vec<_>>();
8956 categories.sort_by(|a, b| b.1.cmp(a.1));
8957 for (category, count) in categories {
8958 println!(" {category}: {}", count);
8959 }
8960 }
8961 println!();
8962 println!(" {}", style::tick_row(60));
8963 println!();
8964}
8965
8966fn cmd_proposals(action: ProposalAction) {
8967 match action {
8968 ProposalAction::List {
8969 frontier,
8970 status,
8971 json,
8972 } => {
8973 let frontier_state =
8974 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8975 let proposals_list = proposals::list(&frontier_state, status.as_deref());
8976 let payload = json!({
8977 "ok": true,
8978 "command": "proposals.list",
8979 "frontier": frontier_state.project.name,
8980 "status_filter": status,
8981 "summary": proposals::summary(&frontier_state),
8982 "proposals": proposals_list,
8983 });
8984 if json {
8985 println!(
8986 "{}",
8987 serde_json::to_string_pretty(&payload)
8988 .expect("failed to serialize proposals list")
8989 );
8990 } else {
8991 println!("vela proposals list");
8992 println!(" frontier: {}", frontier_state.project.name);
8993 println!(
8994 " proposals: {}",
8995 payload["proposals"].as_array().map_or(0, Vec::len)
8996 );
8997 }
8998 }
8999 ProposalAction::Show {
9000 frontier,
9001 proposal_id,
9002 json,
9003 } => {
9004 let frontier_state =
9005 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9006 let proposal =
9007 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
9008 let payload = json!({
9009 "ok": true,
9010 "command": "proposals.show",
9011 "frontier": frontier_state.project.name,
9012 "proposal": proposal,
9013 });
9014 if json {
9015 println!(
9016 "{}",
9017 serde_json::to_string_pretty(&payload)
9018 .expect("failed to serialize proposal show")
9019 );
9020 } else {
9021 println!("vela proposals show");
9022 println!(" frontier: {}", frontier_state.project.name);
9023 println!(" proposal: {}", proposal_id);
9024 println!(" kind: {}", proposal.kind);
9025 println!(" status: {}", proposal.status);
9026 }
9027 }
9028 ProposalAction::Preview {
9029 frontier,
9030 proposal_id,
9031 reviewer,
9032 json,
9033 } => {
9034 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
9035 .unwrap_or_else(|e| fail_return(&e));
9036 let payload = json!({
9037 "ok": true,
9038 "command": "proposals.preview",
9039 "frontier": frontier.display().to_string(),
9040 "preview": preview,
9041 });
9042 if json {
9043 println!(
9044 "{}",
9045 serde_json::to_string_pretty(&payload)
9046 .expect("failed to serialize proposal preview")
9047 );
9048 } else {
9049 println!("vela proposals preview");
9050 println!(" proposal: {}", proposal_id);
9051 println!(" kind: {}", preview.kind);
9052 println!(
9053 " findings: {} -> {}",
9054 preview.findings_before, preview.findings_after
9055 );
9056 println!(
9057 " artifacts: {} -> {}",
9058 preview.artifacts_before, preview.artifacts_after
9059 );
9060 println!(
9061 " events: {} -> {}",
9062 preview.events_before, preview.events_after
9063 );
9064 if !preview.changed_findings.is_empty() {
9065 println!(
9066 " findings changed: {}",
9067 preview.changed_findings.join(", ")
9068 );
9069 }
9070 if !preview.changed_artifacts.is_empty() {
9071 println!(
9072 " artifacts changed: {}",
9073 preview.changed_artifacts.join(", ")
9074 );
9075 }
9076 if !preview.event_kinds.is_empty() {
9077 println!(" event kinds: {}", preview.event_kinds.join(", "));
9078 }
9079 println!(" event: {}", preview.applied_event_id);
9080 }
9081 }
9082 ProposalAction::Import {
9083 frontier,
9084 source,
9085 json,
9086 } => {
9087 let report =
9088 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
9089 let payload = json!({
9090 "ok": true,
9091 "command": "proposals.import",
9092 "frontier": frontier.display().to_string(),
9093 "source": source.display().to_string(),
9094 "summary": {
9095 "imported": report.imported,
9096 "applied": report.applied,
9097 "rejected": report.rejected,
9098 "duplicates": report.duplicates,
9099 },
9100 });
9101 if json {
9102 println!(
9103 "{}",
9104 serde_json::to_string_pretty(&payload)
9105 .expect("failed to serialize proposal import")
9106 );
9107 } else {
9108 println!(
9109 "Imported {} proposals into {}",
9110 report.imported, report.wrote_to
9111 );
9112 }
9113 }
9114 ProposalAction::Validate { source, json } => {
9115 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
9116 let payload = json!({
9117 "ok": report.ok,
9118 "command": "proposals.validate",
9119 "source": source.display().to_string(),
9120 "summary": {
9121 "checked": report.checked,
9122 "valid": report.valid,
9123 "invalid": report.invalid,
9124 },
9125 "proposal_ids": report.proposal_ids,
9126 "errors": report.errors,
9127 });
9128 if json {
9129 println!(
9130 "{}",
9131 serde_json::to_string_pretty(&payload)
9132 .expect("failed to serialize proposal validation")
9133 );
9134 } else if report.ok {
9135 println!("{} validated {} proposals", style::ok("ok"), report.valid);
9136 } else {
9137 println!(
9138 "{} validated {} proposals, {} invalid",
9139 style::lost("lost"),
9140 report.valid,
9141 report.invalid
9142 );
9143 for error in &report.errors {
9144 println!(" · {error}");
9145 }
9146 std::process::exit(1);
9147 }
9148 }
9149 ProposalAction::Export {
9150 frontier,
9151 output,
9152 status,
9153 json,
9154 } => {
9155 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
9156 .unwrap_or_else(|e| fail_return(&e));
9157 let payload = json!({
9158 "ok": true,
9159 "command": "proposals.export",
9160 "frontier": frontier.display().to_string(),
9161 "output": output.display().to_string(),
9162 "status": status,
9163 "exported": count,
9164 });
9165 if json {
9166 println!(
9167 "{}",
9168 serde_json::to_string_pretty(&payload)
9169 .expect("failed to serialize proposal export")
9170 );
9171 } else {
9172 println!("sealed · {count} proposals · {}", output.display());
9173 }
9174 }
9175 ProposalAction::Accept {
9176 frontier,
9177 proposal_id,
9178 reviewer,
9179 reason,
9180 json,
9181 } => {
9182 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
9183 .unwrap_or_else(|e| fail_return(&e));
9184 let payload = json!({
9185 "ok": true,
9186 "command": "proposals.accept",
9187 "frontier": frontier.display().to_string(),
9188 "proposal_id": proposal_id,
9189 "reviewer": reviewer,
9190 "applied_event_id": event_id,
9191 });
9192 if json {
9193 println!(
9194 "{}",
9195 serde_json::to_string_pretty(&payload)
9196 .expect("failed to serialize proposal accept")
9197 );
9198 } else {
9199 println!(
9200 "{} accepted and applied proposal {}",
9201 style::ok("ok"),
9202 proposal_id
9203 );
9204 println!(" event: {}", event_id);
9205 }
9206 }
9207 ProposalAction::Reject {
9208 frontier,
9209 proposal_id,
9210 reviewer,
9211 reason,
9212 json,
9213 } => {
9214 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
9215 .unwrap_or_else(|e| fail_return(&e));
9216 let payload = json!({
9217 "ok": true,
9218 "command": "proposals.reject",
9219 "frontier": frontier.display().to_string(),
9220 "proposal_id": proposal_id,
9221 "reviewer": reviewer,
9222 "status": "rejected",
9223 });
9224 if json {
9225 println!(
9226 "{}",
9227 serde_json::to_string_pretty(&payload)
9228 .expect("failed to serialize proposal reject")
9229 );
9230 } else {
9231 println!(
9232 "{} rejected proposal {}",
9233 style::warn("rejected"),
9234 proposal_id
9235 );
9236 }
9237 }
9238 }
9239}
9240
9241fn cmd_artifact_to_state(
9242 frontier: &Path,
9243 packet: &Path,
9244 actor: &str,
9245 apply_artifacts: bool,
9246 json: bool,
9247) {
9248 let report =
9249 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
9250 .unwrap_or_else(|e| fail_return(&e));
9251 if json {
9252 println!(
9253 "{}",
9254 serde_json::to_string_pretty(&report)
9255 .expect("failed to serialize artifact-to-state report")
9256 );
9257 } else {
9258 println!("vela artifact-to-state");
9259 println!(" packet: {}", report.packet_id);
9260 println!(" frontier: {}", report.frontier);
9261 println!(" artifact proposals: {}", report.artifact_proposals);
9262 println!(" finding proposals: {}", report.finding_proposals);
9263 println!(" gap proposals: {}", report.gap_proposals);
9264 println!(
9265 " applied artifact events: {}",
9266 report.applied_artifact_events
9267 );
9268 println!(
9269 " pending truth proposals: {}",
9270 report.pending_truth_proposals
9271 );
9272 }
9273}
9274
9275async fn cmd_bridge_kit(action: BridgeKitAction) {
9276 match action {
9277 BridgeKitAction::Validate { source, json } => {
9278 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
9279 if json {
9280 println!(
9281 "{}",
9282 serde_json::to_string_pretty(&report)
9283 .expect("failed to serialize bridge-kit validation report")
9284 );
9285 } else {
9286 println!("vela bridge-kit validate");
9287 println!(" source: {}", report.source);
9288 println!(" packets: {}", report.packet_count);
9289 println!(" valid: {}", report.valid_packet_count);
9290 println!(" invalid: {}", report.invalid_packet_count);
9291 for packet in &report.packets {
9292 if packet.ok {
9293 println!(
9294 " ok: {} · {} artifacts · {} claims · {} needs",
9295 packet
9296 .packet_id
9297 .as_deref()
9298 .unwrap_or("packet id unavailable"),
9299 packet.artifact_count,
9300 packet.candidate_claim_count,
9301 packet.open_need_count
9302 );
9303 } else {
9304 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
9305 }
9306 }
9307 for error in &report.errors {
9308 println!(" error: {error}");
9309 }
9310 }
9311 if !report.ok {
9312 std::process::exit(1);
9313 }
9314 }
9315 BridgeKitAction::VerifyProvenance { packet, json } => {
9316 let report = verify_packet_provenance(&packet).await;
9317 if json {
9318 println!(
9319 "{}",
9320 serde_json::to_string_pretty(&report)
9321 .expect("failed to serialize provenance verification report")
9322 );
9323 } else {
9324 println!("vela bridge-kit verify-provenance");
9325 println!(" packet: {}", report.packet);
9326 println!(" identifiers: {}", report.identifiers.len());
9327 println!(" resolved: {}", report.resolved_count);
9328 println!(" unresolved: {}", report.unresolved_count);
9329 println!(" skipped: {}", report.skipped_count);
9330 for entry in &report.identifiers {
9331 let status = match entry.status.as_str() {
9332 "resolved" => "ok ",
9333 "unresolved" => "FAIL",
9334 "skipped" => "skip",
9335 _ => "? ",
9336 };
9337 println!(
9338 " {} {} ({})",
9339 status,
9340 entry.identifier,
9341 entry.note.as_deref().unwrap_or(entry.kind.as_str())
9342 );
9343 }
9344 }
9345 if report.unresolved_count > 0 {
9346 std::process::exit(1);
9347 }
9348 }
9349 }
9350}
9351
9352#[derive(Debug, Clone, Serialize)]
9353struct ProvenanceVerificationReport {
9354 command: String,
9355 packet: String,
9356 identifiers: Vec<ProvenanceVerificationEntry>,
9357 resolved_count: usize,
9358 unresolved_count: usize,
9359 skipped_count: usize,
9360}
9361
9362#[derive(Debug, Clone, Serialize)]
9363struct ProvenanceVerificationEntry {
9364 identifier: String,
9365 kind: String,
9366 status: String,
9367 #[serde(skip_serializing_if = "Option::is_none")]
9368 note: Option<String>,
9369}
9370
9371async fn verify_packet_provenance(packet_path: &Path) -> ProvenanceVerificationReport {
9376 use crate::artifact_to_state::ArtifactPacket;
9377 let raw = std::fs::read_to_string(packet_path)
9378 .unwrap_or_else(|e| fail_return(&format!("read packet: {e}")));
9379 let parsed: ArtifactPacket =
9380 serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse packet: {e}")));
9381 let packet = parsed
9382 .validate()
9383 .unwrap_or_else(|e| fail_return(&format!("validate packet: {e}")));
9384
9385 let mut candidates: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
9387 for artifact in &packet.artifacts {
9388 if let Some(ident) = extract_identifier(&artifact.locator) {
9389 candidates.insert(ident);
9390 }
9391 }
9392 for claim in &packet.candidate_claims {
9393 for source_ref in &claim.source_refs {
9394 if let Some(ident) = extract_identifier(source_ref) {
9395 candidates.insert(ident);
9396 }
9397 }
9398 }
9399
9400 let client = reqwest::Client::builder()
9401 .user_agent("vela/0.108 (+https://github.com/vela-science/vela)")
9402 .timeout(std::time::Duration::from_secs(15))
9403 .build()
9404 .unwrap_or_else(|e| fail_return(&format!("build http client: {e}")));
9405
9406 let mut entries: Vec<ProvenanceVerificationEntry> = Vec::new();
9407 let mut resolved = 0usize;
9408 let mut unresolved = 0usize;
9409 let mut skipped = 0usize;
9410 for candidate in &candidates {
9411 let entry = if let Some(doi) = candidate.strip_prefix("doi:") {
9412 verify_doi(&client, doi).await
9413 } else if let Some(pmid) = candidate.strip_prefix("pmid:") {
9414 verify_pmid(&client, pmid).await
9415 } else {
9416 ProvenanceVerificationEntry {
9417 identifier: candidate.clone(),
9418 kind: "unknown".to_string(),
9419 status: "skipped".to_string(),
9420 note: Some("no recognized identifier prefix".to_string()),
9421 }
9422 };
9423 match entry.status.as_str() {
9424 "resolved" => resolved += 1,
9425 "unresolved" => unresolved += 1,
9426 _ => skipped += 1,
9427 }
9428 entries.push(entry);
9429 }
9430
9431 ProvenanceVerificationReport {
9432 command: "bridge-kit.verify-provenance".to_string(),
9433 packet: packet_path.display().to_string(),
9434 identifiers: entries,
9435 resolved_count: resolved,
9436 unresolved_count: unresolved,
9437 skipped_count: skipped,
9438 }
9439}
9440
9441fn extract_identifier(s: &str) -> Option<String> {
9446 let trimmed = s.trim();
9447 if trimmed.is_empty() {
9448 return None;
9449 }
9450 if trimmed.starts_with("doi:") || trimmed.starts_with("pmid:") {
9452 return Some(trimmed.to_string());
9453 }
9454 for prefix in ["https://doi.org/", "http://doi.org/", "https://dx.doi.org/"] {
9456 if let Some(rest) = trimmed.strip_prefix(prefix) {
9457 return Some(format!("doi:{rest}"));
9458 }
9459 }
9460 for prefix in [
9462 "https://pubmed.ncbi.nlm.nih.gov/",
9463 "http://pubmed.ncbi.nlm.nih.gov/",
9464 ] {
9465 if let Some(rest) = trimmed.strip_prefix(prefix) {
9466 let pmid = rest.trim_end_matches('/');
9467 return Some(format!("pmid:{pmid}"));
9468 }
9469 }
9470 if trimmed.starts_with("10.") && trimmed.contains('/') && !trimmed.contains(' ') {
9472 return Some(format!("doi:{trimmed}"));
9473 }
9474 None
9475}
9476
9477async fn verify_doi(client: &reqwest::Client, doi: &str) -> ProvenanceVerificationEntry {
9478 let url = format!("https://api.crossref.org/works/{doi}");
9479 match client.get(&url).send().await {
9480 Ok(resp) if resp.status().is_success() => ProvenanceVerificationEntry {
9481 identifier: format!("doi:{doi}"),
9482 kind: "doi".to_string(),
9483 status: "resolved".to_string(),
9484 note: None,
9485 },
9486 Ok(resp) => ProvenanceVerificationEntry {
9487 identifier: format!("doi:{doi}"),
9488 kind: "doi".to_string(),
9489 status: "unresolved".to_string(),
9490 note: Some(format!("crossref returned {}", resp.status())),
9491 },
9492 Err(e) => ProvenanceVerificationEntry {
9493 identifier: format!("doi:{doi}"),
9494 kind: "doi".to_string(),
9495 status: "skipped".to_string(),
9496 note: Some(format!("crossref unreachable: {e}")),
9497 },
9498 }
9499}
9500
9501async fn verify_pmid(client: &reqwest::Client, pmid: &str) -> ProvenanceVerificationEntry {
9502 let url = format!(
9503 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id={pmid}&retmode=json"
9504 );
9505 match client.get(&url).send().await {
9506 Ok(resp) if resp.status().is_success() => {
9507 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
9510 let result = body.get("result");
9511 let uids = result
9512 .and_then(|r| r.get("uids"))
9513 .and_then(|u| u.as_array());
9514 let resolved = uids.is_some_and(|a| !a.is_empty());
9515 if resolved {
9516 ProvenanceVerificationEntry {
9517 identifier: format!("pmid:{pmid}"),
9518 kind: "pmid".to_string(),
9519 status: "resolved".to_string(),
9520 note: None,
9521 }
9522 } else {
9523 ProvenanceVerificationEntry {
9524 identifier: format!("pmid:{pmid}"),
9525 kind: "pmid".to_string(),
9526 status: "unresolved".to_string(),
9527 note: Some("eutils returned empty uids".to_string()),
9528 }
9529 }
9530 }
9531 Ok(resp) => ProvenanceVerificationEntry {
9532 identifier: format!("pmid:{pmid}"),
9533 kind: "pmid".to_string(),
9534 status: "unresolved".to_string(),
9535 note: Some(format!("eutils returned {}", resp.status())),
9536 },
9537 Err(e) => ProvenanceVerificationEntry {
9538 identifier: format!("pmid:{pmid}"),
9539 kind: "pmid".to_string(),
9540 status: "skipped".to_string(),
9541 note: Some(format!("eutils unreachable: {e}")),
9542 },
9543 }
9544}
9545
9546async fn cmd_source_adapter(action: SourceAdapterAction) {
9547 match action {
9548 SourceAdapterAction::Run {
9549 frontier,
9550 adapter,
9551 actor,
9552 entries,
9553 priority,
9554 include_excluded,
9555 allow_partial,
9556 dry_run,
9557 input_dir,
9558 apply_artifacts,
9559 json,
9560 } => {
9561 let report = crate::source_adapters::run(
9562 &frontier,
9563 crate::source_adapters::SourceAdapterRunOptions {
9564 adapter,
9565 actor,
9566 entries,
9567 priority,
9568 include_excluded,
9569 allow_partial,
9570 dry_run,
9571 input_dir,
9572 apply_artifacts,
9573 },
9574 )
9575 .await
9576 .unwrap_or_else(|e| fail_return(&e));
9577 if json {
9578 println!(
9579 "{}",
9580 serde_json::to_string_pretty(&report)
9581 .expect("failed to serialize source adapter report")
9582 );
9583 } else {
9584 println!("vela source-adapter run");
9585 println!(" adapter: {}", report.adapter);
9586 println!(" run: {}", report.run_id);
9587 println!(" frontier: {}", report.frontier);
9588 println!(" selected entries: {}", report.selected_entries);
9589 println!(" fetched records: {}", report.fetched_records);
9590 println!(" changed records: {}", report.changed_records);
9591 println!(" unchanged records: {}", report.unchanged_records);
9592 println!(" failed records: {}", report.failed_records.len());
9593 if let Some(packet_id) = report.packet_id {
9594 println!(" packet: {packet_id}");
9595 }
9596 println!(" artifact proposals: {}", report.artifact_proposals);
9597 println!(" review note proposals: {}", report.review_note_proposals);
9598 println!(" applied events: {}", report.applied_event_ids.len());
9599 }
9600 }
9601 }
9602}
9603
9604fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
9605 match action {
9606 RuntimeAdapterAction::Run {
9607 frontier,
9608 adapter,
9609 input,
9610 actor,
9611 dry_run,
9612 apply_artifacts,
9613 json,
9614 } => {
9615 let report = crate::runtime_adapters::run(
9616 &frontier,
9617 crate::runtime_adapters::RuntimeAdapterRunOptions {
9618 adapter,
9619 input,
9620 actor,
9621 dry_run,
9622 apply_artifacts,
9623 },
9624 )
9625 .unwrap_or_else(|e| fail_return(&e));
9626 if json {
9627 println!(
9628 "{}",
9629 serde_json::to_string_pretty(&report)
9630 .expect("failed to serialize runtime adapter report")
9631 );
9632 } else {
9633 println!("vela runtime-adapter run");
9634 println!(" adapter: {}", report.adapter);
9635 println!(" run: {}", report.run_id);
9636 println!(" frontier: {}", report.frontier);
9637 if let Some(packet_id) = report.packet_id {
9638 println!(" packet: {packet_id}");
9639 }
9640 println!(" artifact proposals: {}", report.artifact_proposals);
9641 println!(" finding proposals: {}", report.finding_proposals);
9642 println!(" gap proposals: {}", report.gap_proposals);
9643 println!(" review note proposals: {}", report.review_note_proposals);
9644 println!(
9645 " applied artifact events: {}",
9646 report.applied_artifact_events
9647 );
9648 println!(
9649 " pending truth proposals: {}",
9650 report.pending_truth_proposals
9651 );
9652 }
9653 }
9654 }
9655}
9656
9657fn cmd_sign(action: SignAction) {
9658 match action {
9659 SignAction::GenerateKeypair { out, json } => {
9660 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
9661 let payload = json!({
9662 "ok": true,
9663 "command": "sign.generate-keypair",
9664 "output_dir": out.display().to_string(),
9665 "public_key": public_key,
9666 });
9667 if json {
9668 println!(
9669 "{}",
9670 serde_json::to_string_pretty(&payload)
9671 .expect("failed to serialize sign.generate-keypair")
9672 );
9673 } else {
9674 println!("{} keypair · {}", style::ok("generated"), out.display());
9675 println!(" public key: {public_key}");
9676 }
9677 }
9678 SignAction::Apply {
9679 frontier,
9680 private_key,
9681 json,
9682 } => {
9683 let count =
9684 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
9685 let payload = json!({
9686 "ok": true,
9687 "command": "sign.apply",
9688 "frontier": frontier.display().to_string(),
9689 "private_key": private_key.display().to_string(),
9690 "signed": count,
9691 });
9692 if json {
9693 println!(
9694 "{}",
9695 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
9696 );
9697 } else {
9698 println!(
9699 "{} {count} findings in {}",
9700 style::ok("signed"),
9701 frontier.display()
9702 );
9703 }
9704 }
9705 SignAction::Verify {
9706 frontier,
9707 public_key,
9708 json,
9709 } => {
9710 let report = sign::verify_frontier(&frontier, public_key.as_deref())
9711 .unwrap_or_else(|e| fail_return(&e));
9712 if json {
9713 println!(
9714 "{}",
9715 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
9716 );
9717 } else {
9718 println!();
9719 println!(
9720 " {}",
9721 format!("VELA · SIGN · VERIFY · {}", frontier.display())
9722 .to_uppercase()
9723 .dimmed()
9724 );
9725 println!(" {}", style::tick_row(60));
9726 println!(" total findings: {}", report.total_findings);
9727 println!(" signed: {}", report.signed);
9728 println!(" unsigned: {}", report.unsigned);
9729 println!(" valid: {}", report.valid);
9730 println!(" invalid: {}", report.invalid);
9731 if report.findings_with_threshold > 0 {
9732 println!(" with threshold: {}", report.findings_with_threshold);
9733 println!(" jointly accepted: {}", report.jointly_accepted);
9734 }
9735 }
9736 }
9737 SignAction::ThresholdSet {
9738 frontier,
9739 finding_id,
9740 to,
9741 json,
9742 } => {
9743 if to == 0 {
9744 fail("--to must be >= 1");
9745 }
9746 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9747 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
9748 fail(&format!("finding '{finding_id}' not present in frontier"));
9749 };
9750 project.findings[idx].flags.signature_threshold = Some(to);
9751 sign::refresh_jointly_accepted(&mut project);
9755 let met = project.findings[idx].flags.jointly_accepted;
9756 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9757
9758 if json {
9759 println!(
9760 "{}",
9761 serde_json::to_string_pretty(&json!({
9762 "ok": true,
9763 "command": "sign.threshold-set",
9764 "finding_id": finding_id,
9765 "threshold": to,
9766 "jointly_accepted": met,
9767 "frontier": frontier.display().to_string(),
9768 }))
9769 .expect("failed to serialize sign.threshold-set")
9770 );
9771 } else {
9772 println!(
9773 "{} signature_threshold={to} on {finding_id} ({})",
9774 style::ok("set"),
9775 if met {
9776 "jointly accepted"
9777 } else {
9778 "awaiting signatures"
9779 }
9780 );
9781 }
9782 }
9783 }
9784}
9785
9786fn cmd_actor(action: ActorAction) {
9787 match action {
9788 ActorAction::Add {
9789 frontier,
9790 id,
9791 pubkey,
9792 tier,
9793 orcid,
9794 clearance,
9795 json,
9796 } => {
9797 let trimmed = pubkey.trim();
9799 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
9800 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
9801 }
9802 let orcid_normalized = orcid
9804 .as_deref()
9805 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
9806 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
9809 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
9810 });
9811
9812 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9813 if project.actors.iter().any(|actor| actor.id == id) {
9814 fail(&format!(
9815 "Actor '{id}' already registered in this frontier."
9816 ));
9817 }
9818 project.actors.push(sign::ActorRecord {
9819 id: id.clone(),
9820 public_key: trimmed.to_string(),
9821 algorithm: "ed25519".to_string(),
9822 created_at: chrono::Utc::now().to_rfc3339(),
9823 tier: tier.clone(),
9824 orcid: orcid_normalized.clone(),
9825 access_clearance: clearance,
9826 });
9827 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9828 let payload = json!({
9829 "ok": true,
9830 "command": "actor.add",
9831 "frontier": frontier.display().to_string(),
9832 "actor_id": id,
9833 "public_key": trimmed,
9834 "tier": tier,
9835 "orcid": orcid_normalized,
9836 "registered_count": project.actors.len(),
9837 });
9838 if json {
9839 println!(
9840 "{}",
9841 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
9842 );
9843 } else {
9844 let tier_suffix = tier
9845 .as_deref()
9846 .map_or_else(String::new, |t| format!(" tier={t}"));
9847 println!(
9848 "{} actor {} (pubkey {}{tier_suffix})",
9849 style::ok("registered"),
9850 id,
9851 &trimmed[..16]
9852 );
9853 }
9854 }
9855 ActorAction::List { frontier, json } => {
9856 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9857 if json {
9858 let payload = json!({
9859 "ok": true,
9860 "command": "actor.list",
9861 "frontier": frontier.display().to_string(),
9862 "actors": project.actors,
9863 });
9864 println!(
9865 "{}",
9866 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
9867 );
9868 } else {
9869 println!();
9870 println!(
9871 " {}",
9872 format!("VELA · ACTOR · LIST · {}", frontier.display())
9873 .to_uppercase()
9874 .dimmed()
9875 );
9876 println!(" {}", style::tick_row(60));
9877 if project.actors.is_empty() {
9878 println!(" (no actors registered)");
9879 } else {
9880 for actor in &project.actors {
9881 println!(
9882 " {:<28} {}… registered {}",
9883 actor.id,
9884 &actor.public_key[..16],
9885 actor.created_at
9886 );
9887 }
9888 }
9889 }
9890 }
9891 }
9892}
9893
9894fn cmd_causal(action: CausalAction) {
9896 use crate::causal_reasoning;
9897
9898 match action {
9899 CausalAction::Audit {
9900 frontier,
9901 problems_only,
9902 json,
9903 } => {
9904 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9905 let mut entries = causal_reasoning::audit_frontier(&project);
9906 if problems_only {
9907 entries.retain(|e| e.verdict.needs_reviewer_attention());
9908 }
9909 let summary = causal_reasoning::summarize_audit(&entries);
9910
9911 if json {
9912 println!(
9913 "{}",
9914 serde_json::to_string_pretty(&json!({
9915 "ok": true,
9916 "command": "causal.audit",
9917 "frontier": frontier.display().to_string(),
9918 "summary": summary,
9919 "entries": entries,
9920 }))
9921 .expect("serialize causal.audit")
9922 );
9923 return;
9924 }
9925
9926 println!();
9927 println!(
9928 " {}",
9929 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
9930 .to_uppercase()
9931 .dimmed()
9932 );
9933 println!(" {}", style::tick_row(60));
9934 println!(
9935 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
9936 summary.total,
9937 summary.identified,
9938 summary.conditional,
9939 summary.underidentified,
9940 summary.underdetermined,
9941 );
9942 if entries.is_empty() {
9943 println!(" (no entries to report)");
9944 return;
9945 }
9946 for e in &entries {
9947 let chip = match e.verdict {
9948 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
9949 crate::causal_reasoning::Identifiability::Conditional => {
9950 style::warn("conditional")
9951 }
9952 crate::causal_reasoning::Identifiability::Underidentified => {
9953 style::lost("underidentified")
9954 }
9955 crate::causal_reasoning::Identifiability::Underdetermined => {
9956 style::warn("underdetermined")
9957 }
9958 };
9959 let claim = e
9960 .causal_claim
9961 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
9962 let grade = e
9963 .causal_evidence_grade
9964 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
9965 println!();
9966 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
9967 let assertion_short: String = e.assertion_text.chars().take(78).collect();
9968 println!(" {assertion_short}");
9969 println!(" {} {}", style::ok("why:"), e.rationale);
9970 if e.verdict.needs_reviewer_attention()
9971 || matches!(
9972 e.verdict,
9973 crate::causal_reasoning::Identifiability::Underdetermined
9974 )
9975 {
9976 println!(" {} {}", style::ok("fix:"), e.remediation);
9977 }
9978 }
9979 }
9980 CausalAction::Effect {
9981 frontier,
9982 source,
9983 on: target,
9984 json,
9985 } => {
9986 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
9987
9988 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9989 let verdict = identify_effect(&project, &source, &target);
9990
9991 if json {
9992 println!(
9993 "{}",
9994 serde_json::to_string_pretty(&json!({
9995 "ok": true,
9996 "command": "causal.effect",
9997 "frontier": frontier.display().to_string(),
9998 "source": source,
9999 "target": target,
10000 "verdict": verdict,
10001 }))
10002 .expect("serialize causal.effect")
10003 );
10004 return;
10005 }
10006
10007 println!();
10008 println!(
10009 " {}",
10010 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
10011 .to_uppercase()
10012 .dimmed()
10013 );
10014 println!(" {}", style::tick_row(60));
10015 match verdict {
10016 CausalEffectVerdict::Identified {
10017 adjustment_set,
10018 back_door_paths_considered,
10019 } => {
10020 if adjustment_set.is_empty() {
10021 println!(
10022 " {} no back-door adjustment needed",
10023 style::ok("identified")
10024 );
10025 } else {
10026 println!(" {} identified by adjusting on:", style::ok("identified"));
10027 for z in &adjustment_set {
10028 println!(" · {z}");
10029 }
10030 }
10031 println!(
10032 " back-door paths considered: {}",
10033 back_door_paths_considered
10034 );
10035 }
10036 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
10037 println!(
10038 " {} identified via front-door criterion (Pearl 1995 §3.3)",
10039 style::ok("identified")
10040 );
10041 println!(" mediators that intercept all directed paths:");
10042 for m in &mediator_set {
10043 println!(" · {m}");
10044 }
10045 println!(
10046 " applies when source-target confounders are unobserved but the mediator chain is."
10047 );
10048 }
10049 CausalEffectVerdict::NoCausalPath { reason } => {
10050 println!(" {} no causal path: {reason}", style::warn("no_path"));
10051 }
10052 CausalEffectVerdict::Underidentified {
10053 unblocked_back_door_paths,
10054 candidates_tried,
10055 } => {
10056 println!(
10057 " {} no observational adjustment set found ({} candidates tried)",
10058 style::lost("underidentified"),
10059 candidates_tried
10060 );
10061 println!(" open back-door paths:");
10062 for path in unblocked_back_door_paths.iter().take(5) {
10063 println!(" · {}", path.join(" — "));
10064 }
10065 println!(
10066 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
10067 );
10068 }
10069 CausalEffectVerdict::UnknownNode { which } => {
10070 fail(&which);
10071 }
10072 }
10073 println!();
10074 }
10075 CausalAction::Graph {
10076 frontier,
10077 node,
10078 json,
10079 } => {
10080 use crate::causal_graph::CausalGraph;
10081 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10082 let graph = CausalGraph::from_project(&project);
10083
10084 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
10087 if !graph.contains(n) {
10088 fail(&format!("node not in frontier: {n}"));
10089 }
10090 vec![n]
10091 } else {
10092 project.findings.iter().map(|f| f.id.as_str()).collect()
10093 };
10094
10095 if json {
10096 let payload: Vec<_> = nodes
10097 .iter()
10098 .map(|n| {
10099 let parents: Vec<&str> = graph.parents_of(n).collect();
10100 let children: Vec<&str> = graph.children_of(n).collect();
10101 json!({
10102 "node": n,
10103 "parents": parents,
10104 "children": children,
10105 })
10106 })
10107 .collect();
10108 println!(
10109 "{}",
10110 serde_json::to_string_pretty(&json!({
10111 "ok": true,
10112 "command": "causal.graph",
10113 "node_count": graph.node_count(),
10114 "edge_count": graph.edge_count(),
10115 "nodes": payload,
10116 }))
10117 .expect("serialize causal.graph")
10118 );
10119 return;
10120 }
10121
10122 println!();
10123 println!(
10124 " {}",
10125 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
10126 .to_uppercase()
10127 .dimmed()
10128 );
10129 println!(" {}", style::tick_row(60));
10130 println!(
10131 " {} nodes · {} edges",
10132 graph.node_count(),
10133 graph.edge_count()
10134 );
10135 println!();
10136 for n in &nodes {
10137 let parents: Vec<&str> = graph.parents_of(n).collect();
10138 let children: Vec<&str> = graph.children_of(n).collect();
10139 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
10140 continue; }
10142 println!(" {n}");
10143 if !parents.is_empty() {
10144 println!(" parents: {}", parents.join(", "));
10145 }
10146 if !children.is_empty() {
10147 println!(" children: {}", children.join(", "));
10148 }
10149 }
10150 }
10151 CausalAction::Counterfactual {
10152 frontier,
10153 intervene_on,
10154 set_to,
10155 target,
10156 json,
10157 } => {
10158 use crate::counterfactual::{
10159 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
10160 };
10161
10162 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10163 let query = CounterfactualQuery {
10164 intervene_on: intervene_on.clone(),
10165 set_to,
10166 target: target.clone(),
10167 };
10168 let verdict = answer_counterfactual(&project, &query);
10169
10170 if json {
10171 println!(
10172 "{}",
10173 serde_json::to_string_pretty(&json!({
10174 "ok": true,
10175 "command": "causal.counterfactual",
10176 "frontier": frontier.display().to_string(),
10177 "query": query,
10178 "verdict": verdict,
10179 }))
10180 .expect("serialize causal.counterfactual")
10181 );
10182 return;
10183 }
10184
10185 println!();
10186 println!(
10187 " {}",
10188 format!(
10189 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
10190 )
10191 .to_uppercase()
10192 .dimmed()
10193 );
10194 println!(" {}", style::tick_row(72));
10195 match verdict {
10196 CounterfactualVerdict::Resolved {
10197 factual,
10198 counterfactual,
10199 delta,
10200 paths_used,
10201 } => {
10202 println!(
10203 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
10204 style::ok("resolved")
10205 );
10206 println!(
10207 " twin-network propagation through {} causal path(s):",
10208 paths_used.len()
10209 );
10210 for p in paths_used.iter().take(5) {
10211 println!(" · {}", p.join(" → "));
10212 }
10213 println!(
10214 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
10215 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
10216 );
10217 }
10218 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
10219 println!(
10220 " {} causal path exists but {} edge(s) lack a mechanism annotation",
10221 style::warn("mechanism_unspecified"),
10222 unspecified_edges.len()
10223 );
10224 for (parent, child) in unspecified_edges.iter().take(8) {
10225 println!(" · {parent} → {child}");
10226 }
10227 println!(
10228 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
10229 );
10230 }
10231 CounterfactualVerdict::NoCausalPath { factual } => {
10232 println!(
10233 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
10234 style::warn("no_path")
10235 );
10236 }
10237 CounterfactualVerdict::UnknownNode { which } => {
10238 fail(&format!("node not in frontier: {which}"));
10239 }
10240 CounterfactualVerdict::InvalidIntervention { reason } => {
10241 fail(&reason);
10242 }
10243 }
10244 println!();
10245 }
10246 }
10247}
10248
10249fn cmd_bridges(action: BridgesAction) {
10252 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
10253 use std::collections::HashMap;
10254
10255 fn bridges_dir(frontier: &Path) -> PathBuf {
10256 frontier.join(".vela/bridges")
10257 }
10258
10259 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
10260 let path = bridges_dir(frontier).join(format!("{id}.json"));
10261 if !path.is_file() {
10262 return Err(format!("bridge not found: {id}"));
10263 }
10264 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
10265 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
10266 }
10267
10268 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
10269 let dir = bridges_dir(frontier);
10270 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
10271 let path = dir.join(format!("{}.json", b.id));
10272 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
10273 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
10274 }
10275
10276 fn default_reviewer_id() -> String {
10279 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
10280 }
10281
10282 fn emit_bridge_reviewed_event(
10293 frontier: &Path,
10294 bridge_id: &str,
10295 status: &str,
10296 reviewer_id: &str,
10297 note: Option<&str>,
10298 ) -> Result<(), String> {
10299 let mut payload = serde_json::json!({
10300 "bridge_id": bridge_id,
10301 "status": status,
10302 });
10303 if let Some(n) = note
10304 && !n.trim().is_empty()
10305 {
10306 payload["note"] = serde_json::Value::String(n.to_string());
10307 }
10308 let known_ids: Vec<String> = list_bridges(frontier)
10310 .unwrap_or_default()
10311 .into_iter()
10312 .map(|b| b.id)
10313 .collect();
10314 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
10315 let event = crate::events::new_bridge_reviewed_event(
10316 bridge_id,
10317 reviewer_id,
10318 "human",
10319 &format!("Bridge {status} by {reviewer_id}"),
10320 payload,
10321 Vec::new(),
10322 );
10323 let events_dir = frontier.join(".vela/events");
10324 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
10325 let event_path = events_dir.join(format!("{}.json", event.id));
10326 let data =
10327 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
10328 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
10329 }
10330
10331 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
10332 let dir = bridges_dir(frontier);
10333 if !dir.is_dir() {
10334 return Ok(Vec::new());
10335 }
10336 let mut out = Vec::new();
10337 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
10338 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
10339 let path = entry.path();
10340 if path.extension().and_then(|s| s.to_str()) != Some("json") {
10341 continue;
10342 }
10343 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
10344 let b: Bridge =
10345 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
10346 out.push(b);
10347 }
10348 out.sort_by(|a, b| {
10349 b.finding_refs
10350 .len()
10351 .cmp(&a.finding_refs.len())
10352 .then(a.entity_name.cmp(&b.entity_name))
10353 });
10354 Ok(out)
10355 }
10356
10357 match action {
10358 BridgesAction::Derive {
10359 frontier_a,
10360 label_a,
10361 frontier_b,
10362 label_b,
10363 json,
10364 } => {
10365 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
10366 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
10367 let now = chrono::Utc::now().to_rfc3339();
10368 let new_bridges =
10369 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
10370
10371 let existing = list_bridges(&frontier_a).unwrap_or_default();
10375 let existing_by_id: HashMap<String, Bridge> =
10376 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
10377 let mut written = 0;
10378 let mut preserved = 0;
10379 let mut new_ids = Vec::new();
10380 for mut bridge in new_bridges {
10381 if let Some(prev) = existing_by_id.get(&bridge.id)
10382 && prev.status != BridgeStatus::Derived
10383 {
10384 bridge.status = prev.status;
10386 bridge.derived_at = prev.derived_at.clone();
10387 preserved += 1;
10388 }
10389 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
10390 new_ids.push(bridge.id.clone());
10391 written += 1;
10392 }
10393
10394 if json {
10395 println!(
10396 "{}",
10397 serde_json::to_string_pretty(&json!({
10398 "ok": true,
10399 "command": "bridges.derive",
10400 "frontier_a": frontier_a.display().to_string(),
10401 "frontier_b": frontier_b.display().to_string(),
10402 "bridges_written": written,
10403 "reviewer_judgments_preserved": preserved,
10404 "ids": new_ids,
10405 }))
10406 .expect("serialize bridges.derive")
10407 );
10408 return;
10409 }
10410
10411 println!();
10412 println!(
10413 " {}",
10414 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
10415 .to_uppercase()
10416 .dimmed()
10417 );
10418 println!(" {}", style::tick_row(60));
10419 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
10420 if preserved > 0 {
10421 println!(
10422 " {} {} reviewer judgment(s) preserved",
10423 style::ok("kept"),
10424 preserved
10425 );
10426 }
10427 for id in new_ids.iter().take(10) {
10428 println!(" · {id}");
10429 }
10430 if new_ids.len() > 10 {
10431 println!(" … and {} more", new_ids.len() - 10);
10432 }
10433 println!();
10434 }
10435 BridgesAction::List {
10436 frontier,
10437 status,
10438 json,
10439 } => {
10440 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
10441 if let Some(s) = status.as_deref() {
10442 let want = match s.to_lowercase().as_str() {
10443 "derived" => BridgeStatus::Derived,
10444 "confirmed" => BridgeStatus::Confirmed,
10445 "refuted" => BridgeStatus::Refuted,
10446 other => fail_return(&format!(
10447 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
10448 )),
10449 };
10450 bridges.retain(|b| b.status == want);
10451 }
10452 if json {
10453 println!(
10454 "{}",
10455 serde_json::to_string_pretty(&json!({
10456 "ok": true,
10457 "command": "bridges.list",
10458 "frontier": frontier.display().to_string(),
10459 "count": bridges.len(),
10460 "bridges": bridges,
10461 }))
10462 .expect("serialize bridges.list")
10463 );
10464 return;
10465 }
10466 println!();
10467 println!(
10468 " {}",
10469 format!("VELA · BRIDGES · LIST · {}", frontier.display())
10470 .to_uppercase()
10471 .dimmed()
10472 );
10473 println!(" {}", style::tick_row(60));
10474 println!(" {} bridge(s)", bridges.len());
10475 for b in &bridges {
10476 let chip = match b.status {
10477 BridgeStatus::Derived => style::warn("derived"),
10478 BridgeStatus::Confirmed => style::ok("confirmed"),
10479 BridgeStatus::Refuted => style::lost("refuted"),
10480 };
10481 println!();
10482 println!(
10483 " {chip} {} {} ↔ findings:{}",
10484 b.id,
10485 b.entity_name,
10486 b.finding_refs.len()
10487 );
10488 println!(" frontiers: {}", b.frontiers.join(", "));
10489 if let Some(t) = &b.tension {
10490 println!(" tension: {t}");
10491 }
10492 }
10493 println!();
10494 }
10495 BridgesAction::Show {
10496 frontier,
10497 bridge_id,
10498 json,
10499 } => {
10500 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10501 if json {
10502 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10503 return;
10504 }
10505 println!();
10506 println!(
10507 " {}",
10508 format!("VELA · BRIDGES · SHOW · {}", b.id)
10509 .to_uppercase()
10510 .dimmed()
10511 );
10512 println!(" {}", style::tick_row(60));
10513 println!(" entity: {}", b.entity_name);
10514 println!(" status: {:?}", b.status);
10515 println!(" frontiers: {}", b.frontiers.join(", "));
10516 if !b.frontier_ids.is_empty() {
10517 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
10518 }
10519 if let Some(t) = &b.tension {
10520 println!(" tension: {t}");
10521 }
10522 println!(" derived_at: {}", b.derived_at);
10523 println!(" finding refs ({}):", b.finding_refs.len());
10524 for r in &b.finding_refs {
10525 let dir = r.direction.as_deref().unwrap_or("—");
10526 let truncated: String = r.assertion_text.chars().take(72).collect();
10527 println!(
10528 " · [{}] {} (conf={:.2}, dir={})",
10529 r.frontier, r.finding_id, r.confidence, dir
10530 );
10531 println!(" {truncated}");
10532 }
10533 println!();
10534 }
10535 BridgesAction::Confirm {
10536 frontier,
10537 bridge_id,
10538 reviewer,
10539 note,
10540 json,
10541 } => {
10542 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10543 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10544 b.status = BridgeStatus::Confirmed;
10545 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10546 let _ = emit_bridge_reviewed_event(
10550 &frontier,
10551 &bridge_id,
10552 "confirmed",
10553 &reviewer_id,
10554 note.as_deref(),
10555 );
10556 if json {
10557 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10558 return;
10559 }
10560 println!();
10561 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
10562 println!();
10563 }
10564 BridgesAction::Refute {
10565 frontier,
10566 bridge_id,
10567 reviewer,
10568 note,
10569 json,
10570 } => {
10571 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10572 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10573 b.status = BridgeStatus::Refuted;
10574 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10575 let _ = emit_bridge_reviewed_event(
10576 &frontier,
10577 &bridge_id,
10578 "refuted",
10579 &reviewer_id,
10580 note.as_deref(),
10581 );
10582 if json {
10583 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10584 return;
10585 }
10586 println!();
10587 println!(" {} {} now refuted", style::lost("refuted"), b.id);
10588 println!();
10589 }
10590 }
10591}
10592
10593fn cmd_federation(action: FederationAction) {
10595 use crate::federation::PeerHub;
10596
10597 match action {
10598 FederationAction::PeerAdd {
10599 frontier,
10600 id,
10601 url,
10602 pubkey,
10603 note,
10604 json,
10605 } => {
10606 let peer = PeerHub {
10607 id: id.clone(),
10608 url: url.clone(),
10609 public_key: pubkey.trim().to_string(),
10610 added_at: chrono::Utc::now().to_rfc3339(),
10611 note: note.clone(),
10612 };
10613 peer.validate().unwrap_or_else(|e| fail_return(&e));
10614
10615 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10616 if project.peers.iter().any(|p| p.id == id) {
10617 fail(&format!("peer '{id}' already in registry"));
10618 }
10619 project.peers.push(peer.clone());
10620 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10621
10622 if json {
10623 println!(
10624 "{}",
10625 serde_json::to_string_pretty(&json!({
10626 "ok": true,
10627 "command": "federation.peer-add",
10628 "frontier": frontier.display().to_string(),
10629 "peer": peer,
10630 "registered_count": project.peers.len(),
10631 }))
10632 .expect("serialize federation.peer-add")
10633 );
10634 } else {
10635 println!(
10636 "{} peer {} (pubkey {}…) at {}",
10637 style::ok("registered"),
10638 id,
10639 &peer.public_key[..16],
10640 peer.url
10641 );
10642 }
10643 }
10644 FederationAction::PeerList { frontier, json } => {
10645 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10646 if json {
10647 println!(
10648 "{}",
10649 serde_json::to_string_pretty(&json!({
10650 "ok": true,
10651 "command": "federation.peer-list",
10652 "frontier": frontier.display().to_string(),
10653 "peers": project.peers,
10654 }))
10655 .expect("serialize federation.peer-list")
10656 );
10657 } else {
10658 println!();
10659 println!(
10660 " {}",
10661 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
10662 .to_uppercase()
10663 .dimmed()
10664 );
10665 println!(" {}", style::tick_row(60));
10666 if project.peers.is_empty() {
10667 println!(" (no peers registered)");
10668 } else {
10669 for p in &project.peers {
10670 let note_suffix = if p.note.is_empty() {
10671 String::new()
10672 } else {
10673 format!(" · {}", p.note)
10674 };
10675 println!(
10676 " {:<24} {} {}…{note_suffix}",
10677 p.id,
10678 p.url,
10679 &p.public_key[..16]
10680 );
10681 }
10682 }
10683 }
10684 }
10685 FederationAction::Sync {
10686 frontier,
10687 peer_id,
10688 url,
10689 via_hub,
10690 vfr_id,
10691 allow_cross_vfr,
10692 dry_run,
10693 json,
10694 } => {
10695 use crate::federation::{self, DiscoveryResult};
10696
10697 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10698 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
10699 fail(&format!(
10700 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
10701 ));
10702 };
10703 let local_frontier_id = project.frontier_id();
10704
10705 if via_hub
10712 && let Some(target) = vfr_id.as_deref()
10713 && target != local_frontier_id
10714 && !allow_cross_vfr
10715 {
10716 fail(&format!(
10717 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
10718 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
10719 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
10720 ));
10721 }
10722
10723 #[derive(Debug)]
10725 enum SyncOutcome {
10726 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
10730 }
10731
10732 let outcome = if via_hub {
10733 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
10734 match federation::discover_peer_frontier(
10735 &peer.url,
10736 &target_vfr,
10737 Some(&peer.public_key),
10738 ) {
10739 DiscoveryResult::Resolved(p) => {
10740 let src =
10741 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
10742 SyncOutcome::Resolved(p, src)
10743 }
10744 DiscoveryResult::BrokenLocator {
10745 vfr_id,
10746 locator,
10747 status,
10748 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
10749 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
10750 SyncOutcome::UnverifiedEntry(vfr_id, reason)
10751 }
10752 DiscoveryResult::EntryNotFound { vfr_id, status } => {
10753 SyncOutcome::EntryNotFound(vfr_id, status)
10754 }
10755 DiscoveryResult::Unreachable { url, error } => {
10756 fail(&format!("peer hub unreachable ({url}): {error}"));
10757 }
10758 }
10759 } else {
10760 let resolved_url = url.unwrap_or_else(|| {
10761 let base = peer.url.trim_end_matches('/');
10762 format!("{base}/manifest/{local_frontier_id}.json")
10763 });
10764 match federation::fetch_peer_frontier(&resolved_url) {
10765 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
10766 Err(e) => fail(&format!("direct fetch failed: {e}")),
10767 }
10768 };
10769
10770 let peer_source: String;
10773 let peer_state = match outcome {
10774 SyncOutcome::Resolved(p, src) => {
10775 if !json {
10776 println!(" · resolved via {src}");
10777 }
10778 peer_source = src;
10779 p
10780 }
10781 SyncOutcome::BrokenLocator(vfr, locator, status) => {
10782 if dry_run {
10783 if json {
10784 println!(
10785 "{}",
10786 serde_json::to_string_pretty(&json!({
10787 "ok": true,
10788 "command": "federation.sync",
10789 "dry_run": true,
10790 "outcome": "broken_locator",
10791 "vfr_id": vfr,
10792 "locator": locator,
10793 "http_status": status,
10794 }))
10795 .expect("serialize")
10796 );
10797 } else {
10798 println!(
10799 "{} dry-run: peer entry resolved but locator dead",
10800 style::warn("broken_locator")
10801 );
10802 println!(" vfr_id: {vfr}");
10803 println!(" locator: {locator} (HTTP {status})");
10804 }
10805 return;
10806 }
10807 let report = federation::record_locator_failure(
10808 &mut project,
10809 &peer_id,
10810 &vfr,
10811 &locator,
10812 status,
10813 );
10814 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10815 if json {
10816 println!(
10817 "{}",
10818 serde_json::to_string_pretty(&json!({
10819 "ok": true,
10820 "command": "federation.sync",
10821 "outcome": "broken_locator",
10822 "report": report,
10823 }))
10824 .expect("serialize")
10825 );
10826 } else {
10827 println!(
10828 "{} sync recorded broken-locator conflict against {peer_id}",
10829 style::warn("broken_locator")
10830 );
10831 println!(" vfr_id: {vfr}");
10832 println!(" locator: {locator} (HTTP {status})");
10833 println!(" events appended: {}", report.events_appended);
10834 }
10835 return;
10836 }
10837 SyncOutcome::UnverifiedEntry(vfr, reason) => {
10838 if dry_run {
10839 if json {
10840 println!(
10841 "{}",
10842 serde_json::to_string_pretty(&json!({
10843 "ok": true,
10844 "command": "federation.sync",
10845 "dry_run": true,
10846 "outcome": "unverified_peer_entry",
10847 "vfr_id": vfr,
10848 "reason": reason,
10849 }))
10850 .expect("serialize")
10851 );
10852 } else {
10853 println!(
10854 "{} dry-run: peer entry signature did not verify",
10855 style::lost("unverified_peer_entry")
10856 );
10857 println!(" vfr_id: {vfr}");
10858 println!(" reason: {reason}");
10859 }
10860 return;
10861 }
10862 let report =
10863 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
10864 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10865 if json {
10866 println!(
10867 "{}",
10868 serde_json::to_string_pretty(&json!({
10869 "ok": true,
10870 "command": "federation.sync",
10871 "outcome": "unverified_peer_entry",
10872 "report": report,
10873 }))
10874 .expect("serialize")
10875 );
10876 } else {
10877 println!(
10878 "{} sync halted; peer's registry entry signature did not verify",
10879 style::lost("unverified_peer_entry")
10880 );
10881 println!(" vfr_id: {vfr}");
10882 println!(" reason: {reason}");
10883 }
10884 return;
10885 }
10886 SyncOutcome::EntryNotFound(vfr, status) => {
10887 if json {
10888 println!(
10889 "{}",
10890 serde_json::to_string_pretty(&json!({
10891 "ok": false,
10892 "command": "federation.sync",
10893 "outcome": "entry_not_found",
10894 "vfr_id": vfr,
10895 "http_status": status,
10896 }))
10897 .expect("serialize")
10898 );
10899 } else {
10900 println!(
10901 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
10902 style::warn("entry_not_found")
10903 );
10904 }
10905 return;
10906 }
10907 };
10908
10909 if dry_run {
10910 let conflicts = federation::diff_frontiers(&project, &peer_state);
10911 if json {
10912 println!(
10913 "{}",
10914 serde_json::to_string_pretty(&json!({
10915 "ok": true,
10916 "command": "federation.sync",
10917 "dry_run": true,
10918 "peer_id": peer_id,
10919 "peer_source": peer_source,
10920 "conflicts": conflicts,
10921 }))
10922 .expect("serialize federation.sync (dry-run)")
10923 );
10924 } else {
10925 println!(
10926 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
10927 style::ok("ok"),
10928 peer_source,
10929 conflicts.len()
10930 );
10931 for c in &conflicts {
10932 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10933 }
10934 }
10935 return;
10936 }
10937
10938 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
10939 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10940
10941 if json {
10942 println!(
10943 "{}",
10944 serde_json::to_string_pretty(&json!({
10945 "ok": true,
10946 "command": "federation.sync",
10947 "peer_id": peer_id,
10948 "peer_source": peer_source,
10949 "report": report,
10950 }))
10951 .expect("serialize federation.sync")
10952 );
10953 } else {
10954 println!(
10955 "{} synced with {} ({})",
10956 style::ok("ok"),
10957 peer_id,
10958 peer_source
10959 );
10960 println!(
10961 " our: {}",
10962 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
10963 );
10964 println!(
10965 " peer: {}",
10966 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
10967 );
10968 println!(
10969 " conflicts: {} events appended: {}",
10970 report.conflicts.len(),
10971 report.events_appended
10972 );
10973 for c in &report.conflicts {
10974 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10975 }
10976 }
10977 }
10978 FederationAction::PushResolution {
10979 frontier,
10980 conflict_event_id,
10981 to,
10982 key,
10983 vfr_id,
10984 json,
10985 } => {
10986 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
10987 }
10988 FederationAction::PeerRemove { frontier, id, json } => {
10989 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10990 let before = project.peers.len();
10991 project.peers.retain(|p| p.id != id);
10992 if project.peers.len() == before {
10993 fail(&format!("peer '{id}' not found in registry"));
10994 }
10995 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10996
10997 if json {
10998 println!(
10999 "{}",
11000 serde_json::to_string_pretty(&json!({
11001 "ok": true,
11002 "command": "federation.peer-remove",
11003 "frontier": frontier.display().to_string(),
11004 "removed": id,
11005 "remaining": project.peers.len(),
11006 }))
11007 .expect("serialize federation.peer-remove")
11008 );
11009 } else {
11010 println!(
11011 "{} peer {} ({} remaining)",
11012 style::ok("removed"),
11013 id,
11014 project.peers.len()
11015 );
11016 }
11017 }
11018 }
11019}
11020
11021fn cmd_federation_push_resolution(
11033 frontier: PathBuf,
11034 conflict_event_id: String,
11035 to: String,
11036 key: Option<PathBuf>,
11037 vfr_id: Option<String>,
11038 json: bool,
11039) {
11040 use crate::canonical;
11041 use crate::sign;
11042
11043 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11044
11045 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
11046 fail(&format!(
11047 "peer '{to}' not in registry; run `vela federation peer-add` first"
11048 ));
11049 };
11050
11051 let Some(resolution) = project
11053 .events
11054 .iter()
11055 .find(|e| {
11056 e.kind == "frontier.conflict_resolved"
11057 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
11058 == Some(conflict_event_id.as_str())
11059 })
11060 .cloned()
11061 else {
11062 fail(&format!(
11063 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
11064 frontier.display()
11065 ));
11066 };
11067
11068 let actor_id = resolution.actor.id.clone();
11071 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
11072 fail(&format!(
11073 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
11074 register the reviewer with `vela actor add` before pushing"
11075 ));
11076 };
11077
11078 let key_path = key.unwrap_or_else(|| {
11081 let home = std::env::var("HOME").unwrap_or_default();
11082 let base = PathBuf::from(home)
11083 .join(".config")
11084 .join("vela")
11085 .join("keys");
11086 let safe_id = actor.id.replace([':', '/'], "_");
11087 let by_actor = base.join(format!("{safe_id}.key"));
11088 if by_actor.exists() {
11089 by_actor
11090 } else {
11091 base.join("private.key")
11092 }
11093 });
11094
11095 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
11096 fail_return(&format!(
11097 "load private key from {}: {e}",
11098 key_path.display()
11099 ))
11100 });
11101 let pubkey_hex = sign::pubkey_hex(&signing_key);
11102 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
11103 fail(&format!(
11104 "private key at {} does not match actor {}'s registered public key. \
11105 Loaded pubkey {}, expected {}.",
11106 key_path.display(),
11107 actor.id,
11108 &pubkey_hex[..16],
11109 &actor.public_key[..16]
11110 ));
11111 }
11112
11113 let signature_hex = sign::sign_event(&resolution, &signing_key)
11116 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
11117
11118 let mut body = resolution.clone();
11123 body.signature = None;
11124 let body_value =
11125 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
11126 let _canonical_check = canonical::to_canonical_bytes(&body_value)
11127 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
11128
11129 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
11130 let url = format!(
11131 "{}/entries/{}/events",
11132 peer.url.trim_end_matches('/'),
11133 target_vfr
11134 );
11135
11136 let url_owned = url.clone();
11138 let pubkey_owned = pubkey_hex.clone();
11139 let signature_owned = signature_hex.clone();
11140 let body_owned = body_value.clone();
11141 let response: Result<(u16, String), String> = std::thread::spawn(move || {
11142 let client = reqwest::blocking::Client::new();
11143 let resp = client
11144 .post(&url_owned)
11145 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
11146 .header("X-Vela-Signature", &signature_owned)
11147 .json(&body_owned)
11148 .send()
11149 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
11150 let status = resp.status().as_u16();
11151 let text = resp.text().unwrap_or_default();
11152 Ok((status, text))
11153 })
11154 .join()
11155 .map_err(|_| "push thread panicked".to_string())
11156 .unwrap_or_else(|e| fail_return(&e));
11157
11158 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
11159 let parsed: serde_json::Value =
11160 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
11161
11162 let accepted = matches!(status, 200..=202);
11163 if json {
11164 println!(
11165 "{}",
11166 serde_json::to_string_pretty(&json!({
11167 "ok": accepted,
11168 "command": "federation.push-resolution",
11169 "frontier": frontier.display().to_string(),
11170 "peer_id": to,
11171 "url": url,
11172 "conflict_event_id": conflict_event_id,
11173 "event_id": resolution.id,
11174 "actor_id": actor.id,
11175 "http_status": status,
11176 "response": parsed,
11177 }))
11178 .expect("serialize federation.push-resolution")
11179 );
11180 } else if accepted {
11181 println!(
11182 "{} resolution {} pushed to {} (HTTP {})",
11183 style::ok("ok"),
11184 &resolution.id[..16.min(resolution.id.len())],
11185 to,
11186 status
11187 );
11188 println!(" url: {url}");
11189 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
11190 } else {
11191 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
11192 println!(" url: {url}");
11193 println!(" response: {text}");
11194 std::process::exit(1);
11195 }
11196}
11197
11198fn cmd_queue(action: QueueAction) {
11203 use crate::queue;
11204 match action {
11205 QueueAction::List { queue_file, json } => {
11206 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11207 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11208 if json {
11209 let payload = json!({
11210 "ok": true,
11211 "command": "queue.list",
11212 "queue_file": path.display().to_string(),
11213 "schema": q.schema,
11214 "actions": q.actions,
11215 });
11216 println!(
11217 "{}",
11218 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
11219 );
11220 } else {
11221 println!();
11222 println!(
11223 " {}",
11224 format!("VELA · QUEUE · LIST · {}", path.display())
11225 .to_uppercase()
11226 .dimmed()
11227 );
11228 println!(" {}", style::tick_row(60));
11229 if q.actions.is_empty() {
11230 println!(" (queue is empty)");
11231 } else {
11232 for (idx, action) in q.actions.iter().enumerate() {
11233 println!(
11234 " [{idx}] {} → {} queued {}",
11235 action.kind,
11236 action.frontier.display(),
11237 action.queued_at
11238 );
11239 }
11240 }
11241 }
11242 }
11243 QueueAction::Clear { queue_file, json } => {
11244 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11245 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
11246 if json {
11247 let payload = json!({
11248 "ok": true,
11249 "command": "queue.clear",
11250 "queue_file": path.display().to_string(),
11251 "dropped": dropped,
11252 });
11253 println!(
11254 "{}",
11255 serde_json::to_string_pretty(&payload)
11256 .expect("failed to serialize queue.clear")
11257 );
11258 } else {
11259 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
11260 }
11261 }
11262 QueueAction::Sign {
11263 actor,
11264 key,
11265 queue_file,
11266 yes_to_all,
11267 json,
11268 } => {
11269 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11270 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11271 if q.actions.is_empty() {
11272 if json {
11273 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
11274 } else {
11275 println!("{} queue is empty", style::ok("ok"));
11276 }
11277 return;
11278 }
11279 let key_hex = std::fs::read_to_string(&key)
11280 .map(|s| s.trim().to_string())
11281 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
11282 let signing_key = parse_signing_key(&key_hex);
11283 let mut signed_count = 0usize;
11284 let mut remaining = Vec::new();
11285 for action in q.actions.iter() {
11286 if !yes_to_all && !confirm_action(action) {
11287 remaining.push(action.clone());
11288 continue;
11289 }
11290 match sign_and_apply(&signing_key, &actor, action) {
11291 Ok(report) => {
11292 signed_count += 1;
11293 if !json {
11294 println!(
11295 "{} {} on {} → {}",
11296 style::ok("signed"),
11297 action.kind,
11298 action.frontier.display(),
11299 report
11300 );
11301 }
11302 }
11303 Err(error) => {
11304 remaining.push(action.clone());
11306 if !json {
11307 eprintln!(
11308 "{} {} on {}: {error}",
11309 style::warn("failed"),
11310 action.kind,
11311 action.frontier.display()
11312 );
11313 }
11314 }
11315 }
11316 }
11317 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
11318 if json {
11319 let payload = json!({
11320 "ok": true,
11321 "command": "queue.sign",
11322 "signed": signed_count,
11323 "remaining": remaining.len(),
11324 });
11325 println!(
11326 "{}",
11327 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
11328 );
11329 } else {
11330 println!(
11331 "{} signed {signed_count} action(s); {} remaining in queue",
11332 style::ok("ok"),
11333 remaining.len()
11334 );
11335 }
11336 }
11337 }
11338}
11339
11340fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
11341 let bytes = hex::decode(hex_str)
11342 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
11343 let key_bytes: [u8; 32] = bytes
11344 .try_into()
11345 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
11346 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
11347}
11348
11349fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
11350 use std::io::{self, BufRead, Write};
11351 let mut stdout = io::stdout().lock();
11352 let _ = writeln!(
11353 stdout,
11354 " sign {} on {}? [y/N] ",
11355 action.kind,
11356 action.frontier.display()
11357 );
11358 let _ = stdout.flush();
11359 drop(stdout);
11360 let stdin = io::stdin();
11361 let mut line = String::new();
11362 if stdin.lock().read_line(&mut line).is_err() {
11363 return false;
11364 }
11365 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
11366}
11367
11368fn sign_and_apply(
11373 signing_key: &ed25519_dalek::SigningKey,
11374 actor: &str,
11375 action: &crate::queue::QueuedAction,
11376) -> Result<String, String> {
11377 use crate::events::StateTarget;
11378 use crate::proposals;
11379 let args = &action.args;
11380 match action.kind.as_str() {
11381 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
11382 let kind = match action.kind.as_str() {
11383 "propose_review" => "finding.review",
11384 "propose_note" => "finding.note",
11385 "propose_revise_confidence" => "finding.confidence_revise",
11386 "propose_retract" => "finding.retract",
11387 _ => unreachable!(),
11388 };
11389 let target_id = args
11390 .get("target_finding_id")
11391 .and_then(Value::as_str)
11392 .ok_or("target_finding_id missing")?;
11393 let reason = args
11394 .get("reason")
11395 .and_then(Value::as_str)
11396 .ok_or("reason missing")?;
11397 let payload = match action.kind.as_str() {
11398 "propose_review" => {
11399 let status = args
11400 .get("status")
11401 .and_then(Value::as_str)
11402 .ok_or("status missing")?;
11403 json!({"status": status})
11404 }
11405 "propose_note" => {
11406 let text = args
11407 .get("text")
11408 .and_then(Value::as_str)
11409 .ok_or("text missing")?;
11410 json!({"text": text})
11411 }
11412 "propose_revise_confidence" => {
11413 let new_score = args
11414 .get("new_score")
11415 .and_then(Value::as_f64)
11416 .ok_or("new_score missing")?;
11417 json!({"new_score": new_score})
11418 }
11419 "propose_retract" => json!({}),
11420 _ => unreachable!(),
11421 };
11422 let created_at = args
11423 .get("created_at")
11424 .and_then(Value::as_str)
11425 .map(String::from)
11426 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11427 let mut proposal = proposals::new_proposal(
11428 kind,
11429 StateTarget {
11430 r#type: "finding".to_string(),
11431 id: target_id.to_string(),
11432 },
11433 actor,
11434 "human",
11435 reason,
11436 payload,
11437 Vec::new(),
11438 Vec::new(),
11439 );
11440 proposal.created_at = created_at;
11441 proposal.id = proposals::proposal_id(&proposal);
11442 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
11446 let result = proposals::create_or_apply(&action.frontier, proposal, false)
11447 .map_err(|e| format!("create_or_apply: {e}"))?;
11448 Ok(format!("proposal {}", result.proposal_id))
11449 }
11450 "accept_proposal" | "reject_proposal" => {
11451 let proposal_id = args
11452 .get("proposal_id")
11453 .and_then(Value::as_str)
11454 .ok_or("proposal_id missing")?;
11455 let reason = args
11456 .get("reason")
11457 .and_then(Value::as_str)
11458 .ok_or("reason missing")?;
11459 let timestamp = args
11460 .get("timestamp")
11461 .and_then(Value::as_str)
11462 .map(String::from)
11463 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11464 let preimage = json!({
11466 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
11467 "proposal_id": proposal_id,
11468 "reviewer_id": actor,
11469 "reason": reason,
11470 "timestamp": timestamp,
11471 });
11472 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
11473 use ed25519_dalek::Signer;
11474 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
11475 if action.kind == "accept_proposal" {
11476 let event_id =
11477 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
11478 .map_err(|e| format!("accept_at_path: {e}"))?;
11479 Ok(format!("event {event_id}"))
11480 } else {
11481 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
11482 .map_err(|e| format!("reject_at_path: {e}"))?;
11483 Ok(format!("rejected {proposal_id}"))
11484 }
11485 }
11486 other => Err(format!("unsupported queued action kind '{other}'")),
11487 }
11488}
11489
11490fn cmd_entity(action: EntityAction) {
11502 use crate::entity_resolve;
11503 match action {
11504 EntityAction::Resolve {
11505 frontier,
11506 force,
11507 json,
11508 } => {
11509 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11510 let report = entity_resolve::resolve_frontier(&mut p, force);
11511 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11512 if json {
11513 println!(
11514 "{}",
11515 serde_json::to_string_pretty(&serde_json::json!({
11516 "ok": true,
11517 "command": "entity.resolve",
11518 "frontier_path": frontier.display().to_string(),
11519 "report": report,
11520 }))
11521 .expect("serialize")
11522 );
11523 } else {
11524 println!(
11525 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
11526 style::ok("entity"),
11527 report.resolved,
11528 report.total_entities,
11529 report.already_resolved,
11530 report.unresolved_count,
11531 report.findings_touched,
11532 );
11533 let unresolved_summary: std::collections::BTreeSet<&str> = report
11534 .per_finding
11535 .iter()
11536 .flat_map(|f| f.unresolved.iter().map(String::as_str))
11537 .collect();
11538 if !unresolved_summary.is_empty() {
11539 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
11540 println!(
11541 " unresolved (first {}): {}",
11542 take.len(),
11543 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
11544 );
11545 }
11546 }
11547 }
11548 EntityAction::List { json } => {
11549 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
11550 .map(|(name, etype, source, id)| {
11551 serde_json::json!({
11552 "canonical_name": name,
11553 "entity_type": etype,
11554 "source": source,
11555 "id": id,
11556 })
11557 })
11558 .collect();
11559 if json {
11560 println!(
11561 "{}",
11562 serde_json::to_string_pretty(&serde_json::json!({
11563 "ok": true,
11564 "command": "entity.list",
11565 "count": entries.len(),
11566 "entries": entries,
11567 }))
11568 .expect("serialize")
11569 );
11570 } else {
11571 println!("{} {} bundled entries", style::ok("entity"), entries.len());
11572 for e in &entries {
11573 println!(
11574 " {:32} {:18} {} {}",
11575 e["canonical_name"].as_str().unwrap_or("?"),
11576 e["entity_type"].as_str().unwrap_or("?"),
11577 e["source"].as_str().unwrap_or("?"),
11578 e["id"].as_str().unwrap_or("?"),
11579 );
11580 }
11581 }
11582 }
11583 }
11584}
11585
11586fn cmd_link(action: LinkAction) {
11587 use crate::bundle::{Link, LinkRef};
11588 match action {
11589 LinkAction::Add {
11590 frontier,
11591 from,
11592 to,
11593 r#type,
11594 note,
11595 inferred_by,
11596 no_check_target,
11597 json,
11598 } => {
11599 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
11600 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
11601 fail(&format!(
11602 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
11603 ));
11604 }
11605 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
11606 fail(&format!(
11607 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
11608 ))
11609 });
11610 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11611 let source_idx = p
11612 .findings
11613 .iter()
11614 .position(|f| f.id == from)
11615 .unwrap_or_else(|| {
11616 fail_return(&format!("--from finding '{from}' not in frontier"))
11617 });
11618 if let LinkRef::Local { vf_id } = &parsed
11619 && !p.findings.iter().any(|f| &f.id == vf_id)
11620 {
11621 fail(&format!(
11622 "local --to target '{vf_id}' not in frontier; add the target finding first"
11623 ));
11624 }
11625 if let LinkRef::Cross { vfr_id, .. } = &parsed
11626 && p.dep_for_vfr(vfr_id).is_none()
11627 {
11628 fail(&format!(
11629 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
11630 ));
11631 }
11632
11633 let mut target_warning: Option<String> = None;
11639 if let LinkRef::Cross {
11640 vfr_id: target_vfr,
11641 vf_id: target_vf,
11642 } = &parsed
11643 && !no_check_target
11644 && let Some(dep) = p.dep_for_vfr(target_vfr)
11645 && let Some(locator) = dep.locator.as_deref()
11646 && (locator.starts_with("http://") || locator.starts_with("https://"))
11647 {
11648 let client = reqwest::blocking::Client::builder()
11649 .timeout(std::time::Duration::from_secs(15))
11650 .build()
11651 .ok();
11652 if let Some(client) = client
11653 && let Ok(resp) = client.get(locator).send()
11654 && resp.status().is_success()
11655 && let Ok(dep_project) = resp.json::<crate::project::Project>()
11656 {
11657 if let Some(target_finding) =
11658 dep_project.findings.iter().find(|f| &f.id == target_vf)
11659 {
11660 if target_finding.flags.superseded {
11661 target_warning = Some(format!(
11662 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
11663You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
11664Use --no-check-target to skip this check."
11665 ));
11666 }
11667 } else {
11668 target_warning = Some(format!(
11669 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
11670The target may have been removed or never existed in the pinned snapshot."
11671 ));
11672 }
11673 }
11674 }
11675
11676 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11677 let link = Link {
11678 target: to.clone(),
11679 link_type: r#type.clone(),
11680 note: note.clone(),
11681 inferred_by: inferred_by.clone(),
11682 created_at: now,
11683 mechanism: None,
11684 };
11685 p.findings[source_idx].links.push(link);
11686 project::recompute_stats(&mut p);
11687 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11688 let payload = json!({
11689 "ok": true,
11690 "command": "link.add",
11691 "frontier": frontier.display().to_string(),
11692 "from": from,
11693 "to": to,
11694 "type": r#type,
11695 "cross_frontier": parsed.is_cross_frontier(),
11696 });
11697 if json {
11698 let mut p2 = payload.clone();
11699 if let Some(w) = &target_warning
11700 && let serde_json::Value::Object(m) = &mut p2
11701 {
11702 m.insert(
11703 "target_warning".to_string(),
11704 serde_json::Value::String(w.clone()),
11705 );
11706 }
11707 println!(
11708 "{}",
11709 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
11710 );
11711 } else {
11712 println!(
11713 "{} {} --[{}]--> {}{}",
11714 style::ok("link"),
11715 from,
11716 r#type,
11717 to,
11718 if parsed.is_cross_frontier() {
11719 " (cross-frontier)"
11720 } else {
11721 ""
11722 }
11723 );
11724 if let Some(w) = target_warning {
11725 println!(" {w}");
11726 }
11727 }
11728 }
11729 }
11730}
11731
11732fn cmd_frontier(action: FrontierAction) {
11733 use crate::project::ProjectDependency;
11734 use crate::repo;
11735 match action {
11736 FrontierAction::New {
11737 path,
11738 name,
11739 description,
11740 force,
11741 json,
11742 } => {
11743 if path.exists() && !force {
11744 fail(&format!(
11745 "{} already exists; pass --force to overwrite",
11746 path.display()
11747 ));
11748 }
11749 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11750 let project = project::Project {
11751 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
11752 schema: project::VELA_SCHEMA_URL.to_string(),
11753 frontier_id: None,
11754 project: project::ProjectMeta {
11755 name: name.clone(),
11756 description: description.clone(),
11757 compiled_at: now,
11758 compiler: project::VELA_COMPILER_VERSION.to_string(),
11759 papers_processed: 0,
11760 errors: 0,
11761 dependencies: Vec::new(),
11762 },
11763 stats: project::ProjectStats::default(),
11764 findings: Vec::new(),
11765 sources: Vec::new(),
11766 evidence_atoms: Vec::new(),
11767 condition_records: Vec::new(),
11768 review_events: Vec::new(),
11769 confidence_updates: Vec::new(),
11770 events: Vec::new(),
11771 proposals: Vec::new(),
11772 proof_state: proposals::ProofState::default(),
11773 signatures: Vec::new(),
11774 actors: Vec::new(),
11775 replications: Vec::new(),
11776 datasets: Vec::new(),
11777 code_artifacts: Vec::new(),
11778 artifacts: Vec::new(),
11779 predictions: Vec::new(),
11780 resolutions: Vec::new(),
11781 peers: Vec::new(),
11782 negative_results: Vec::new(),
11783 trajectories: Vec::new(),
11784 };
11785 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
11786 let payload = json!({
11787 "ok": true,
11788 "command": "frontier.new",
11789 "path": path.display().to_string(),
11790 "name": name,
11791 "schema": project::VELA_SCHEMA_URL,
11792 "vela_version": env!("CARGO_PKG_VERSION"),
11793 "next_steps": [
11794 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
11795 "vela sign generate-keypair --out keys",
11796 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
11797 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11798 ],
11799 });
11800 if json {
11801 println!(
11802 "{}",
11803 serde_json::to_string_pretty(&payload)
11804 .expect("failed to serialize frontier.new")
11805 );
11806 } else {
11807 println!(
11808 "{} scaffolded frontier '{name}' at {}",
11809 style::ok("frontier"),
11810 path.display()
11811 );
11812 println!(" next steps:");
11813 println!(
11814 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
11815 path.display()
11816 );
11817 println!(" 2. vela sign generate-keypair --out keys");
11818 println!(
11819 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
11820 path.display()
11821 );
11822 println!(
11823 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11824 path.display()
11825 );
11826 }
11827 }
11828 FrontierAction::Materialize { frontier, json } => {
11829 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
11830 if json {
11831 println!(
11832 "{}",
11833 serde_json::to_string_pretty(&payload)
11834 .expect("failed to serialize frontier materialize")
11835 );
11836 } else {
11837 println!(
11838 "{} materialized frontier repo at {}",
11839 style::ok("frontier"),
11840 frontier.display()
11841 );
11842 }
11843 }
11844 FrontierAction::AddDep {
11845 frontier,
11846 vfr_id,
11847 locator,
11848 snapshot,
11849 name,
11850 json,
11851 } => {
11852 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11853 if p.project
11854 .dependencies
11855 .iter()
11856 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
11857 {
11858 fail(&format!(
11859 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
11860 ));
11861 }
11862 let dep = ProjectDependency {
11863 name: name.unwrap_or_else(|| vfr_id.clone()),
11864 source: "vela.hub".into(),
11865 version: None,
11866 pinned_hash: None,
11867 vfr_id: Some(vfr_id.clone()),
11868 locator: Some(locator.clone()),
11869 pinned_snapshot_hash: Some(snapshot.clone()),
11870 };
11871 p.project.dependencies.push(dep);
11872 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11873 let payload = json!({
11874 "ok": true,
11875 "command": "frontier.add-dep",
11876 "frontier": frontier.display().to_string(),
11877 "vfr_id": vfr_id,
11878 "locator": locator,
11879 "pinned_snapshot_hash": snapshot,
11880 "declared_count": p.project.dependencies.len(),
11881 });
11882 if json {
11883 println!(
11884 "{}",
11885 serde_json::to_string_pretty(&payload)
11886 .expect("failed to serialize frontier.add-dep")
11887 );
11888 } else {
11889 println!(
11890 "{} declared cross-frontier dep {vfr_id}",
11891 style::ok("frontier")
11892 );
11893 println!(" locator: {locator}");
11894 println!(" snapshot: {snapshot}");
11895 }
11896 }
11897 FrontierAction::ListDeps { frontier, json } => {
11898 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11899 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
11900 if json {
11901 let payload = json!({
11902 "ok": true,
11903 "command": "frontier.list-deps",
11904 "frontier": frontier.display().to_string(),
11905 "count": deps.len(),
11906 "dependencies": deps,
11907 });
11908 println!(
11909 "{}",
11910 serde_json::to_string_pretty(&payload)
11911 .expect("failed to serialize frontier.list-deps")
11912 );
11913 } else {
11914 println!();
11915 println!(
11916 " {}",
11917 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
11918 .to_uppercase()
11919 .dimmed()
11920 );
11921 println!(" {}", style::tick_row(60));
11922 if deps.is_empty() {
11923 println!(" (no dependencies declared)");
11924 } else {
11925 for d in &deps {
11926 let kind = if d.is_cross_frontier() {
11927 "cross-frontier"
11928 } else {
11929 "compile-time"
11930 };
11931 println!(" · {} [{kind}]", d.name);
11932 if let Some(v) = &d.vfr_id {
11933 println!(" vfr_id: {v}");
11934 }
11935 if let Some(l) = &d.locator {
11936 println!(" locator: {l}");
11937 }
11938 if let Some(s) = &d.pinned_snapshot_hash {
11939 println!(" snapshot: {s}");
11940 }
11941 }
11942 }
11943 }
11944 }
11945 FrontierAction::RemoveDep {
11946 frontier,
11947 vfr_id,
11948 json,
11949 } => {
11950 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11951 for f in &p.findings {
11953 for l in &f.links {
11954 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
11955 crate::bundle::LinkRef::parse(&l.target)
11956 && v == &vfr_id
11957 {
11958 fail(&format!(
11959 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
11960 f.id, l.target
11961 ));
11962 }
11963 }
11964 }
11965 let before = p.project.dependencies.len();
11966 p.project
11967 .dependencies
11968 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
11969 let removed = before - p.project.dependencies.len();
11970 if removed == 0 {
11971 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
11972 }
11973 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11974 let payload = json!({
11975 "ok": true,
11976 "command": "frontier.remove-dep",
11977 "frontier": frontier.display().to_string(),
11978 "vfr_id": vfr_id,
11979 "removed": removed,
11980 });
11981 if json {
11982 println!(
11983 "{}",
11984 serde_json::to_string_pretty(&payload)
11985 .expect("failed to serialize frontier.remove-dep")
11986 );
11987 } else {
11988 println!(
11989 "{} removed cross-frontier dep {vfr_id}",
11990 style::ok("frontier")
11991 );
11992 }
11993 }
11994 FrontierAction::RefreshDeps {
11995 frontier,
11996 from,
11997 dry_run,
11998 json,
11999 } => {
12000 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12001 let cross_deps: Vec<String> = p
12002 .project
12003 .dependencies
12004 .iter()
12005 .filter_map(|d| d.vfr_id.clone())
12006 .collect();
12007 if cross_deps.is_empty() {
12008 if json {
12009 println!(
12010 "{}",
12011 serde_json::to_string_pretty(&json!({
12012 "ok": true,
12013 "command": "frontier.refresh-deps",
12014 "frontier": frontier.display().to_string(),
12015 "from": from,
12016 "dry_run": dry_run,
12017 "deps": [],
12018 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
12019 })).expect("serialize")
12020 );
12021 } else {
12022 println!(
12023 "{} no cross-frontier deps declared in {}",
12024 style::ok("frontier"),
12025 frontier.display()
12026 );
12027 }
12028 return;
12029 }
12030 let client = reqwest::blocking::Client::builder()
12031 .timeout(std::time::Duration::from_secs(20))
12032 .build()
12033 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
12034 let base = from.trim_end_matches('/');
12035 #[derive(serde::Deserialize)]
12036 struct HubEntry {
12037 latest_snapshot_hash: String,
12038 }
12039 let mut per_dep: Vec<serde_json::Value> = Vec::new();
12040 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
12041 (0u32, 0u32, 0u32, 0u32);
12042 for vfr in &cross_deps {
12043 let url = format!("{base}/entries/{vfr}");
12044 let resp = client.get(&url).send();
12045 let outcome = match resp {
12046 Ok(r) if r.status().as_u16() == 404 => {
12047 missing += 1;
12048 json!({ "vfr_id": vfr, "status": "missing", "url": url })
12049 }
12050 Ok(r) if !r.status().is_success() => {
12051 unreachable += 1;
12052 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
12053 }
12054 Err(e) => {
12055 unreachable += 1;
12056 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
12057 }
12058 Ok(r) => match r.json::<HubEntry>() {
12059 Err(e) => {
12060 unreachable += 1;
12061 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
12062 }
12063 Ok(entry) => {
12064 match p
12066 .project
12067 .dependencies
12068 .iter()
12069 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
12070 {
12071 None => {
12072 unreachable += 1;
12073 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
12074 }
12075 Some(idx) => {
12076 let local_pin =
12077 p.project.dependencies[idx].pinned_snapshot_hash.clone();
12078 let new_pin = entry.latest_snapshot_hash;
12079 if local_pin.as_deref() == Some(new_pin.as_str()) {
12080 unchanged += 1;
12081 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
12082 } else {
12083 if !dry_run {
12084 p.project.dependencies[idx].pinned_snapshot_hash =
12085 Some(new_pin.clone());
12086 }
12087 refreshed += 1;
12088 json!({
12089 "vfr_id": vfr,
12090 "status": "refreshed",
12091 "old_snapshot": local_pin,
12092 "new_snapshot": new_pin,
12093 })
12094 }
12095 }
12096 }
12097 }
12098 },
12099 };
12100 per_dep.push(outcome);
12101 }
12102 if !dry_run && refreshed > 0 {
12103 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12104 }
12105 let payload = json!({
12106 "ok": true,
12107 "command": "frontier.refresh-deps",
12108 "frontier": frontier.display().to_string(),
12109 "from": from,
12110 "dry_run": dry_run,
12111 "deps": per_dep,
12112 "summary": {
12113 "total": cross_deps.len(),
12114 "refreshed": refreshed,
12115 "unchanged": unchanged,
12116 "missing": missing,
12117 "unreachable": unreachable,
12118 },
12119 });
12120 if json {
12121 println!(
12122 "{}",
12123 serde_json::to_string_pretty(&payload)
12124 .expect("failed to serialize frontier.refresh-deps")
12125 );
12126 } else {
12127 let mode = if dry_run { " (dry-run)" } else { "" };
12128 println!(
12129 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
12130 style::ok("frontier"),
12131 cross_deps.len()
12132 );
12133 for d in &per_dep {
12134 let vfr = d["vfr_id"].as_str().unwrap_or("?");
12135 let status = d["status"].as_str().unwrap_or("?");
12136 match status {
12137 "refreshed" => println!(
12138 " {vfr} refreshed {} → {}",
12139 d["old_snapshot"]
12140 .as_str()
12141 .unwrap_or("(none)")
12142 .chars()
12143 .take(16)
12144 .collect::<String>(),
12145 d["new_snapshot"]
12146 .as_str()
12147 .unwrap_or("?")
12148 .chars()
12149 .take(16)
12150 .collect::<String>(),
12151 ),
12152 "unchanged" => println!(" {vfr} unchanged"),
12153 "missing" => println!(" {vfr} missing on hub"),
12154 _ => println!(" {vfr} unreachable"),
12155 }
12156 }
12157 }
12158 }
12159 FrontierAction::Diff {
12160 frontier,
12161 since,
12162 week,
12163 json,
12164 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
12165 }
12166}
12167
12168fn cmd_repo(action: RepoAction) {
12169 match action {
12170 RepoAction::Status { frontier, json } => {
12171 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
12172 if json {
12173 println!(
12174 "{}",
12175 serde_json::to_string_pretty(&payload)
12176 .expect("failed to serialize repo status")
12177 );
12178 } else {
12179 let summary = payload.get("summary").unwrap_or(&Value::Null);
12180 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
12181 println!("vela repo status");
12182 println!(" frontier: {}", frontier.display());
12183 println!(
12184 " events: {}",
12185 summary
12186 .get("accepted_events")
12187 .and_then(Value::as_u64)
12188 .unwrap_or_default()
12189 );
12190 println!(
12191 " open proposals: {}",
12192 summary
12193 .get("open_proposals")
12194 .and_then(Value::as_u64)
12195 .unwrap_or_default()
12196 );
12197 println!(
12198 " state: {}",
12199 freshness
12200 .get("materialized_state")
12201 .and_then(Value::as_str)
12202 .unwrap_or("unknown")
12203 );
12204 println!(
12205 " proof: {}",
12206 freshness
12207 .get("proof")
12208 .and_then(Value::as_str)
12209 .unwrap_or("unknown")
12210 );
12211 }
12212 }
12213 RepoAction::Doctor { frontier, json } => {
12214 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
12215 if json {
12216 println!(
12217 "{}",
12218 serde_json::to_string_pretty(&payload)
12219 .expect("failed to serialize repo doctor")
12220 );
12221 } else {
12222 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12223 let issues = payload
12224 .get("issues")
12225 .and_then(Value::as_array)
12226 .map_or(0, Vec::len);
12227 println!("vela repo doctor");
12228 println!(" frontier: {}", frontier.display());
12229 println!(" status: {}", if ok { "ok" } else { "needs attention" });
12230 println!(" issues: {issues}");
12231 }
12232 }
12233 }
12234}
12235
12236fn cmd_proof_verify(frontier: &Path, json_output: bool) {
12237 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
12238 if json_output {
12239 println!(
12240 "{}",
12241 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
12242 );
12243 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12244 std::process::exit(1);
12245 }
12246 } else {
12247 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12248 println!("vela proof verify");
12249 println!(" frontier: {}", frontier.display());
12250 println!(" status: {}", if ok { "ok" } else { "failed" });
12251 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
12252 for issue in issues {
12253 if let Some(message) = issue.get("message").and_then(Value::as_str) {
12254 println!(" issue: {message}");
12255 }
12256 }
12257 }
12258 if !ok {
12259 std::process::exit(1);
12260 }
12261 }
12262}
12263
12264fn cmd_proof_explain(frontier: &Path) {
12265 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
12266 print!("{text}");
12267}
12268
12269fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
12278 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
12279
12280 let now = chrono::Utc::now();
12282 let (window_start, window_end, week_label): (
12283 chrono::DateTime<chrono::Utc>,
12284 chrono::DateTime<chrono::Utc>,
12285 Option<String>,
12286 ) = if let Some(s) = since {
12287 let parsed = chrono::DateTime::parse_from_rfc3339(s)
12288 .map(|d| d.with_timezone(&chrono::Utc))
12289 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
12290 (parsed, now, None)
12291 } else {
12292 let key = week
12293 .map(str::to_owned)
12294 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
12295 let (start, end) = iso_week_bounds(&key)
12296 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
12297 (start, end, Some(key))
12298 };
12299
12300 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
12302 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
12303 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
12304 let mut cumulative: usize = 0;
12305
12306 for f in &project.findings {
12307 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
12308 .map(|d| d.with_timezone(&chrono::Utc))
12309 .ok();
12310 let updated_ts = f
12311 .updated
12312 .as_deref()
12313 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
12314 .map(|d| d.with_timezone(&chrono::Utc));
12315
12316 if let Some(c) = created
12317 && c < window_end
12318 {
12319 cumulative += 1;
12320 }
12321
12322 if let Some(c) = created
12323 && c >= window_start
12324 && c < window_end
12325 {
12326 added.push(f);
12327 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
12328 if is_tension {
12329 new_contradictions.push(f);
12330 }
12331 continue;
12332 }
12333 if let Some(u) = updated_ts
12334 && u >= window_start
12335 && u < window_end
12336 {
12337 updated.push(f);
12338 }
12339 }
12340
12341 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
12343 list.iter()
12344 .map(|f| {
12345 json!({
12346 "id": f.id,
12347 "assertion": f.assertion.text,
12348 "evidence_type": f.evidence.evidence_type,
12349 "confidence": f.confidence.score,
12350 "doi": f.provenance.doi,
12351 "pmid": f.provenance.pmid,
12352 })
12353 })
12354 .collect()
12355 };
12356
12357 let payload = json!({
12358 "ok": true,
12359 "command": "frontier.diff",
12360 "frontier": frontier.display().to_string(),
12361 "frontier_id": project.frontier_id,
12362 "window": {
12363 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12364 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12365 "iso_week": week_label,
12366 },
12367 "totals": {
12368 "added": added.len(),
12369 "updated": updated.len(),
12370 "new_contradictions": new_contradictions.len(),
12371 "cumulative_claims": cumulative,
12372 },
12373 "added": summary_for(&added),
12374 "updated": summary_for(&updated),
12375 "new_contradictions": summary_for(&new_contradictions),
12376 });
12377
12378 if json {
12379 println!(
12380 "{}",
12381 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
12382 );
12383 return;
12384 }
12385
12386 let label = week_label
12387 .clone()
12388 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
12389 println!();
12390 println!(
12391 " {}",
12392 format!("VELA · FRONTIER · DIFF · {label}")
12393 .to_uppercase()
12394 .dimmed()
12395 );
12396 println!(" {}", style::tick_row(60));
12397 println!(
12398 " range: {} → {}",
12399 window_start.format("%Y-%m-%d %H:%M"),
12400 window_end.format("%Y-%m-%d %H:%M")
12401 );
12402 println!(" added: {}", added.len());
12403 println!(" updated: {}", updated.len());
12404 println!(" contradictions: {}", new_contradictions.len());
12405 println!(" cumulative: {cumulative}");
12406 if added.is_empty() && updated.is_empty() {
12407 println!();
12408 println!(" (quiet window — no findings added or updated)");
12409 } else {
12410 println!();
12411 println!(" added:");
12412 for f in &added {
12413 println!(
12414 " · {} {}",
12415 f.id.dimmed(),
12416 truncate(&f.assertion.text, 88)
12417 );
12418 }
12419 if !updated.is_empty() {
12420 println!();
12421 println!(" updated:");
12422 for f in &updated {
12423 println!(
12424 " · {} {}",
12425 f.id.dimmed(),
12426 truncate(&f.assertion.text, 88)
12427 );
12428 }
12429 }
12430 }
12431}
12432
12433fn truncate(s: &str, n: usize) -> String {
12434 if s.chars().count() <= n {
12435 s.to_string()
12436 } else {
12437 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
12438 out.push('…');
12439 out
12440 }
12441}
12442
12443fn iso_week_key_for(d: chrono::NaiveDate) -> String {
12445 use chrono::Datelike;
12446 let iso = d.iso_week();
12447 format!("{:04}-W{:02}", iso.year(), iso.week())
12448}
12449
12450fn iso_week_bounds(
12453 key: &str,
12454) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
12455 let (year_str, week_str) = key
12456 .split_once("-W")
12457 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
12458 let year: i32 = year_str
12459 .parse()
12460 .map_err(|e| format!("bad year in '{key}': {e}"))?;
12461 let week: u32 = week_str
12462 .parse()
12463 .map_err(|e| format!("bad week in '{key}': {e}"))?;
12464 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
12465 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
12466 let next_monday = monday + chrono::Duration::days(7);
12467 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
12468 let end = next_monday
12469 .and_hms_opt(0, 0, 0)
12470 .expect("00:00 valid")
12471 .and_utc();
12472 Ok((start, end))
12473}
12474
12475fn cmd_registry(action: RegistryAction) {
12480 use crate::registry;
12481 let default_registry = || -> PathBuf {
12482 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12483 PathBuf::from(home)
12484 .join(".vela")
12485 .join("registry")
12486 .join("entries.json")
12487 };
12488 match action {
12489 RegistryAction::DependsOn { vfr_id, from, json } => {
12490 let base = from.trim_end_matches('/');
12491 let url = format!("{base}/entries/{vfr_id}/depends-on");
12492 let client = reqwest::blocking::Client::builder()
12493 .timeout(std::time::Duration::from_secs(30))
12494 .build()
12495 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12496 let resp = client
12497 .get(&url)
12498 .send()
12499 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
12500 if !resp.status().is_success() {
12501 fail(&format!("GET {url}: HTTP {}", resp.status()));
12502 }
12503 let body: serde_json::Value = resp
12504 .json()
12505 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
12506 if json {
12507 println!(
12508 "{}",
12509 serde_json::to_string_pretty(&body).expect("serialize")
12510 );
12511 } else {
12512 let dependents = body
12513 .get("dependents")
12514 .and_then(|v| v.as_array())
12515 .cloned()
12516 .unwrap_or_default();
12517 let count = dependents.len();
12518 println!(
12519 "{} {count} {} on {vfr_id}",
12520 style::ok("registry"),
12521 if count == 1 {
12522 "frontier depends"
12523 } else {
12524 "frontiers depend"
12525 },
12526 );
12527 for e in &dependents {
12528 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
12529 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
12530 let o = e
12531 .get("owner_actor_id")
12532 .and_then(|v| v.as_str())
12533 .unwrap_or("?");
12534 println!(" {v} {n} ({o})");
12535 }
12536 }
12537 }
12538 RegistryAction::Mirror {
12539 vfr_id,
12540 from,
12541 to,
12542 json,
12543 } => {
12544 let src_base = from.trim_end_matches('/');
12545 let dst_base = to.trim_end_matches('/');
12546 let src_url = format!("{src_base}/entries/{vfr_id}");
12547 let dst_url = format!("{dst_base}/entries");
12548 let client = reqwest::blocking::Client::builder()
12549 .timeout(std::time::Duration::from_secs(30))
12550 .build()
12551 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12552
12553 let entry: serde_json::Value = client
12554 .get(&src_url)
12555 .send()
12556 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12557 .error_for_status()
12558 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12559 .json()
12560 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
12561
12562 let resp = client
12563 .post(&dst_url)
12564 .header("content-type", "application/json")
12565 .body(
12566 serde_json::to_vec(&entry)
12567 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
12568 )
12569 .send()
12570 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
12571 let status = resp.status();
12572 if !status.is_success() {
12573 let body = resp.text().unwrap_or_default();
12574 fail(&format!(
12575 "POST {dst_url}: HTTP {status}: {}",
12576 body.chars().take(300).collect::<String>()
12577 ));
12578 }
12579 let body: serde_json::Value = resp
12580 .json()
12581 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
12582 let duplicate = body
12583 .get("duplicate")
12584 .and_then(serde_json::Value::as_bool)
12585 .unwrap_or(false);
12586 let payload = json!({
12587 "ok": true,
12588 "command": "registry.mirror",
12589 "vfr_id": vfr_id,
12590 "from": src_base,
12591 "to": dst_base,
12592 "duplicate_on_destination": duplicate,
12593 "destination_response": body,
12594 });
12595 if json {
12596 println!(
12597 "{}",
12598 serde_json::to_string_pretty(&payload).expect("serialize")
12599 );
12600 } else {
12601 println!(
12602 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
12603 style::ok("registry"),
12604 if duplicate {
12605 " (duplicate; signature already known)"
12606 } else {
12607 " (fresh insert)"
12608 }
12609 );
12610 }
12611 }
12612 RegistryAction::List { from, json } => {
12613 let (label, registry_data) = match &from {
12616 Some(loc) if loc.starts_with("http") => (
12617 loc.clone(),
12618 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12619 ),
12620 Some(loc) => {
12621 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12622 (
12623 p.display().to_string(),
12624 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12625 )
12626 }
12627 None => {
12628 let p = default_registry();
12629 (
12630 p.display().to_string(),
12631 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12632 )
12633 }
12634 };
12635 let r = registry_data;
12636 let path_label = label;
12637 if json {
12638 let payload = json!({
12639 "ok": true,
12640 "command": "registry.list",
12641 "registry": path_label,
12642 "entry_count": r.entries.len(),
12643 "entries": r.entries,
12644 });
12645 println!(
12646 "{}",
12647 serde_json::to_string_pretty(&payload)
12648 .expect("failed to serialize registry.list")
12649 );
12650 } else {
12651 println!();
12652 println!(
12653 " {}",
12654 format!("VELA · REGISTRY · LIST · {}", path_label)
12655 .to_uppercase()
12656 .dimmed()
12657 );
12658 println!(" {}", style::tick_row(60));
12659 if r.entries.is_empty() {
12660 println!(" (registry is empty)");
12661 } else {
12662 for entry in &r.entries {
12663 println!(
12664 " {} {} ({}) by {} published {}",
12665 entry.vfr_id,
12666 entry.name,
12667 entry.network_locator,
12668 entry.owner_actor_id,
12669 entry.signed_publish_at
12670 );
12671 }
12672 }
12673 }
12674 }
12675 RegistryAction::Publish {
12676 frontier,
12677 owner,
12678 key,
12679 locator,
12680 to,
12681 json,
12682 } => {
12683 let key_hex = std::fs::read_to_string(&key)
12686 .map(|s| s.trim().to_string())
12687 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
12688 let signing_key = parse_signing_key(&key_hex);
12689 let derived = hex::encode(signing_key.verifying_key().to_bytes());
12690
12691 let mut frontier_data =
12693 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12694
12695 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
12696 Some(actor) => actor.public_key.clone(),
12697 None => {
12698 eprintln!(
12706 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
12707 &derived[..16]
12708 );
12709 frontier_data.actors.push(sign::ActorRecord {
12710 id: owner.clone(),
12711 public_key: derived.clone(),
12712 algorithm: "ed25519".to_string(),
12713 created_at: chrono::Utc::now().to_rfc3339(),
12714 tier: None,
12715 orcid: None,
12716 access_clearance: None,
12717 });
12718 repo::save_to_path(&frontier, &frontier_data)
12719 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
12720 derived.clone()
12721 }
12722 };
12723
12724 let snapshot_hash = events::snapshot_hash(&frontier_data);
12728 let event_log_hash = events::event_log_hash(&frontier_data.events);
12729 let vfr_id = frontier_data.frontier_id();
12730 let name = frontier_data.project.name.clone();
12731
12732 if derived != pubkey {
12734 fail(&format!(
12735 "private key does not match registered pubkey for owner '{owner}'"
12736 ));
12737 }
12738
12739 let to_is_remote = matches!(
12747 to.as_deref(),
12748 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
12749 );
12750 let resolved_locator = match locator {
12751 Some(l) => l,
12752 None => {
12753 if to_is_remote {
12754 let hub = to.as_deref().unwrap().trim_end_matches('/');
12755 let hub_root = hub.trim_end_matches("/entries");
12756 format!("{hub_root}/entries/{vfr_id}/snapshot")
12757 } else {
12758 fail_return(
12759 "--locator is required for local publishes; pass e.g. \
12760 --locator file:///path/to/frontier.json or an HTTPS URL.",
12761 )
12762 }
12763 }
12764 };
12765
12766 let mut entry = registry::RegistryEntry {
12767 schema: registry::ENTRY_SCHEMA.to_string(),
12768 vfr_id: vfr_id.clone(),
12769 name: name.clone(),
12770 owner_actor_id: owner.clone(),
12771 owner_pubkey: pubkey,
12772 latest_snapshot_hash: snapshot_hash,
12773 latest_event_log_hash: event_log_hash,
12774 network_locator: resolved_locator,
12775 signed_publish_at: chrono::Utc::now().to_rfc3339(),
12776 signature: String::new(),
12777 };
12778 entry.signature =
12779 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
12780
12781 let (registry_label, duplicate) = if to_is_remote {
12782 let hub_url = to.clone().unwrap();
12783 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
12787 .unwrap_or_else(|e| fail_return(&e));
12788 (hub_url, resp.duplicate)
12789 } else {
12790 let registry_path = match &to {
12791 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
12792 None => default_registry(),
12793 };
12794 registry::publish_entry(®istry_path, entry.clone())
12795 .unwrap_or_else(|e| fail_return(&e));
12796 (registry_path.display().to_string(), false)
12797 };
12798
12799 let payload = json!({
12800 "ok": true,
12801 "command": "registry.publish",
12802 "registry": registry_label,
12803 "vfr_id": vfr_id,
12804 "name": name,
12805 "owner": owner,
12806 "snapshot_hash": entry.latest_snapshot_hash,
12807 "event_log_hash": entry.latest_event_log_hash,
12808 "signed_publish_at": entry.signed_publish_at,
12809 "signature": entry.signature,
12810 "duplicate": duplicate,
12811 });
12812 if json {
12813 println!(
12814 "{}",
12815 serde_json::to_string_pretty(&payload)
12816 .expect("failed to serialize registry.publish")
12817 );
12818 } else {
12819 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
12820 println!(
12821 "{} published {vfr_id} → {}{}",
12822 style::ok("registry"),
12823 registry_label,
12824 dup_suffix
12825 );
12826 println!(" snapshot: {}", entry.latest_snapshot_hash);
12827 println!(" event_log: {}", entry.latest_event_log_hash);
12828 println!(" signature: {}…", &entry.signature[..16]);
12829 }
12830 }
12831 RegistryAction::Pull {
12832 vfr_id,
12833 from,
12834 out,
12835 transitive,
12836 depth,
12837 json,
12838 } => {
12839 let (registry_label, registry_data) = match &from {
12843 Some(loc) if loc.starts_with("http") => (
12844 loc.clone(),
12845 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12846 ),
12847 Some(loc) => {
12848 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12849 (
12850 p.display().to_string(),
12851 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12852 )
12853 }
12854 None => {
12855 let p = default_registry();
12856 (
12857 p.display().to_string(),
12858 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12859 )
12860 }
12861 };
12862 let entry = registry::find_latest(®istry_data, &vfr_id)
12863 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
12864
12865 if transitive {
12866 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
12870 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
12871
12872 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
12873 result
12874 .deps
12875 .iter()
12876 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
12877 .collect(),
12878 );
12879 let payload = json!({
12880 "ok": true,
12881 "command": "registry.pull",
12882 "registry": registry_label,
12883 "vfr_id": vfr_id,
12884 "transitive": true,
12885 "depth": depth,
12886 "out_dir": out.display().to_string(),
12887 "primary": result.primary_path.display().to_string(),
12888 "verified": result.verified,
12889 "deps": dep_paths_json,
12890 });
12891 if json {
12892 println!(
12893 "{}",
12894 serde_json::to_string_pretty(&payload)
12895 .expect("failed to serialize registry.pull")
12896 );
12897 } else {
12898 println!(
12899 "{} pulled {vfr_id} (transitive) → {}",
12900 style::ok("registry"),
12901 out.display()
12902 );
12903 println!(" verified {} frontier(s):", result.verified.len());
12904 for v in &result.verified {
12905 println!(" · {v}");
12906 }
12907 println!(" every cross-frontier dependency's pinned snapshot hash matched");
12908 }
12909 return;
12910 }
12911
12912 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
12915 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
12916 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
12917 let _ = std::fs::remove_file(&out);
12918 fail_return(&format!("pull verification failed: {e}"))
12919 });
12920
12921 let payload = json!({
12922 "ok": true,
12923 "command": "registry.pull",
12924 "registry": registry_label,
12925 "vfr_id": vfr_id,
12926 "out": out.display().to_string(),
12927 "snapshot_hash": entry.latest_snapshot_hash,
12928 "event_log_hash": entry.latest_event_log_hash,
12929 "verified": true,
12930 });
12931 if json {
12932 println!(
12933 "{}",
12934 serde_json::to_string_pretty(&payload)
12935 .expect("failed to serialize registry.pull")
12936 );
12937 } else {
12938 println!(
12939 "{} pulled {vfr_id} → {}",
12940 style::ok("registry"),
12941 out.display()
12942 );
12943 println!(" verified snapshot+event_log hashes match registry; signature ok");
12944 }
12945 }
12946 }
12947}
12948
12949fn print_stats_json(path: &Path) {
12950 let frontier = load_frontier_or_fail(path);
12951 let source_hash = hash_path_or_fail(path);
12952 let payload = json!({
12953 "ok": true,
12954 "command": "stats",
12955 "schema_version": project::VELA_SCHEMA_VERSION,
12956 "frontier": {
12957 "name": &frontier.project.name,
12958 "description": &frontier.project.description,
12959 "source": path.display().to_string(),
12960 "hash": format!("sha256:{source_hash}"),
12961 "compiled_at": &frontier.project.compiled_at,
12962 "compiler": &frontier.project.compiler,
12963 "papers_processed": frontier.project.papers_processed,
12964 "errors": frontier.project.errors,
12965 },
12966 "stats": frontier.stats,
12967 "proposals": proposals::summary(&frontier),
12968 "proof_state": frontier.proof_state,
12969 });
12970 println!(
12971 "{}",
12972 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
12973 );
12974}
12975
12976fn cmd_search(
12977 source: Option<&Path>,
12978 query: &str,
12979 entity: Option<&str>,
12980 assertion_type: Option<&str>,
12981 all: Option<&Path>,
12982 limit: usize,
12983 json_output: bool,
12984) {
12985 if let Some(dir) = all {
12986 search::run_all(dir, query, entity, assertion_type, limit);
12987 return;
12988 }
12989 let Some(src) = source else {
12990 fail("Provide --source <frontier> or --all <directory>.");
12991 };
12992 if json_output {
12993 let results = search::search(src, query, entity, assertion_type, limit);
12994 let loaded = load_frontier_or_fail(src);
12995 let source_hash = hash_path_or_fail(src);
12996 let payload = json!({
12997 "ok": true,
12998 "command": "search",
12999 "schema_version": project::VELA_SCHEMA_VERSION,
13000 "query": query,
13001 "frontier": {
13002 "name": &loaded.project.name,
13003 "source": src.display().to_string(),
13004 "hash": format!("sha256:{source_hash}"),
13005 },
13006 "filters": {
13007 "entity": entity,
13008 "assertion_type": assertion_type,
13009 "limit": limit,
13010 },
13011 "count": results.len(),
13012 "results": results.iter().map(|result| json!({
13013 "id": &result.id,
13014 "score": result.score,
13015 "assertion": &result.assertion,
13016 "assertion_type": &result.assertion_type,
13017 "confidence": result.confidence,
13018 "entities": &result.entities,
13019 "doi": &result.doi,
13020 })).collect::<Vec<_>>()
13021 });
13022 println!(
13023 "{}",
13024 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
13025 );
13026 } else {
13027 search::run(src, query, entity, assertion_type, limit);
13028 }
13029}
13030
13031fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
13032 let frontier = load_frontier_or_fail(source);
13033 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
13034 if json_output {
13035 let source_hash = hash_path_or_fail(source);
13036 let payload = json!({
13037 "ok": true,
13038 "command": "tensions",
13039 "schema_version": project::VELA_SCHEMA_VERSION,
13040 "frontier": {
13041 "name": &frontier.project.name,
13042 "source": source.display().to_string(),
13043 "hash": format!("sha256:{source_hash}"),
13044 },
13045 "filters": {
13046 "both_high": both_high,
13047 "cross_domain": cross_domain,
13048 "top": top,
13049 },
13050 "count": result.len(),
13051 "tensions": result.iter().map(|t| json!({
13052 "score": t.score,
13053 "resolved": t.resolved,
13054 "superseding_id": &t.superseding_id,
13055 "finding_a": {
13056 "id": &t.finding_a.id,
13057 "assertion": &t.finding_a.assertion,
13058 "confidence": t.finding_a.confidence,
13059 "assertion_type": &t.finding_a.assertion_type,
13060 "citation_count": t.finding_a.citation_count,
13061 "contradicts_count": t.finding_a.contradicts_count,
13062 },
13063 "finding_b": {
13064 "id": &t.finding_b.id,
13065 "assertion": &t.finding_b.assertion,
13066 "confidence": t.finding_b.confidence,
13067 "assertion_type": &t.finding_b.assertion_type,
13068 "citation_count": t.finding_b.citation_count,
13069 "contradicts_count": t.finding_b.contradicts_count,
13070 }
13071 })).collect::<Vec<_>>()
13072 });
13073 println!(
13074 "{}",
13075 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
13076 );
13077 } else {
13078 tensions::print_tensions(&result);
13079 }
13080}
13081
13082fn cmd_gaps(action: GapsAction) {
13083 match action {
13084 GapsAction::Rank {
13085 frontier,
13086 top,
13087 domain,
13088 json,
13089 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
13090 }
13091}
13092
13093fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
13094 let frontier = load_frontier_or_fail(frontier_path);
13095 let mut ranked = frontier
13096 .findings
13097 .iter()
13098 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
13099 .filter(|finding| {
13100 domain.is_none_or(|domain| {
13101 finding
13102 .assertion
13103 .text
13104 .to_lowercase()
13105 .contains(&domain.to_lowercase())
13106 || finding
13107 .assertion
13108 .entities
13109 .iter()
13110 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
13111 })
13112 })
13113 .map(|finding| {
13114 let dependency_count = frontier
13115 .findings
13116 .iter()
13117 .flat_map(|candidate| candidate.links.iter())
13118 .filter(|link| link.target == finding.id)
13119 .count();
13120 let score = dependency_count as f64 + finding.confidence.score;
13121 json!({
13122 "id": &finding.id,
13123 "kind": "candidate_gap_review_lead",
13124 "assertion": &finding.assertion.text,
13125 "score": score,
13126 "dependency_count": dependency_count,
13127 "confidence": finding.confidence.score,
13128 "evidence_type": &finding.evidence.evidence_type,
13129 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
13130 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
13131 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
13132 })
13133 })
13134 .collect::<Vec<_>>();
13135 ranked.sort_by(|a, b| {
13136 b.get("score")
13137 .and_then(Value::as_f64)
13138 .partial_cmp(&a.get("score").and_then(Value::as_f64))
13139 .unwrap_or(std::cmp::Ordering::Equal)
13140 });
13141 ranked.truncate(top);
13142 if json_output {
13143 let source_hash = hash_path_or_fail(frontier_path);
13144 let payload = json!({
13145 "ok": true,
13146 "command": "gaps rank",
13147 "schema_version": project::VELA_SCHEMA_VERSION,
13148 "frontier": {
13149 "name": &frontier.project.name,
13150 "source": frontier_path.display().to_string(),
13151 "hash": format!("sha256:{source_hash}"),
13152 },
13153 "filters": {
13154 "top": top,
13155 "domain": domain,
13156 },
13157 "count": ranked.len(),
13158 "ranking_label": "candidate gap review leads",
13159 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
13160 "review_leads": ranked.clone(),
13161 "gaps": ranked,
13162 });
13163 println!(
13164 "{}",
13165 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
13166 );
13167 } else {
13168 println!();
13169 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
13170 println!(" {}", style::tick_row(60));
13171 println!(" review source scope; these are not guaranteed experiment targets.");
13172 println!();
13173 for (idx, gap) in ranked.iter().enumerate() {
13174 println!(
13175 " {}. [{}] score={} {}",
13176 idx + 1,
13177 gap["id"].as_str().unwrap_or("?"),
13178 gap["score"].as_f64().unwrap_or(0.0),
13179 gap["assertion"].as_str().unwrap_or("")
13180 );
13181 }
13182 }
13183}
13184
13185async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
13186 if inputs.len() < 2 {
13187 fail("need at least 2 frontier files for bridge detection.");
13188 }
13189 println!();
13190 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
13191 println!(" {}", style::tick_row(60));
13192 println!(" loading {} frontiers...", inputs.len());
13193 let mut named_projects = Vec::<(String, project::Project)>::new();
13194 let mut total_findings = 0;
13195 for path in inputs {
13196 let frontier = load_frontier_or_fail(path);
13197 let name = path
13198 .file_stem()
13199 .unwrap_or_default()
13200 .to_string_lossy()
13201 .to_string();
13202 println!(" {} · {} findings", name, frontier.stats.findings);
13203 total_findings += frontier.stats.findings;
13204 named_projects.push((name, frontier));
13205 }
13206 let refs = named_projects
13207 .iter()
13208 .map(|(name, frontier)| (name.as_str(), frontier))
13209 .collect::<Vec<_>>();
13210 let mut bridges = bridge::detect_bridges(&refs);
13211 if check_novelty && !bridges.is_empty() {
13212 let client = Client::new();
13213 let check_count = bridges.len().min(top_n);
13214 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
13215 for bridge_item in bridges.iter_mut().take(check_count) {
13216 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
13217 match bridge::check_novelty(&client, &query).await {
13218 Ok(count) => bridge_item.pubmed_count = Some(count),
13219 Err(e) => eprintln!(
13220 " {} prior-art check failed for {}: {e}",
13221 style::err_prefix(),
13222 bridge_item.entity_name
13223 ),
13224 }
13225 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
13226 }
13227 }
13228 print!("{}", bridge::format_report(&bridges, total_findings));
13229}
13230
13231struct BenchArgs {
13232 frontier: Option<PathBuf>,
13233 gold: Option<PathBuf>,
13234 entity_gold: Option<PathBuf>,
13235 link_gold: Option<PathBuf>,
13236 suite: Option<PathBuf>,
13237 suite_ready: bool,
13238 min_f1: Option<f64>,
13239 min_precision: Option<f64>,
13240 min_recall: Option<f64>,
13241 no_thresholds: bool,
13242 json: bool,
13243}
13244
13245fn cmd_agent_bench(
13250 gold: &Path,
13251 candidate: &Path,
13252 sources: Option<&Path>,
13253 threshold: Option<f64>,
13254 report_path: Option<&Path>,
13255 json_out: bool,
13256) {
13257 let input = crate::agent_bench::BenchInput {
13258 gold_path: gold.to_path_buf(),
13259 candidate_path: candidate.to_path_buf(),
13260 sources: sources.map(Path::to_path_buf),
13261 threshold: threshold.unwrap_or(0.0),
13262 };
13263 let report = match crate::agent_bench::run(input) {
13264 Ok(r) => r,
13265 Err(e) => {
13266 eprintln!("{} bench failed: {e}", style::err_prefix());
13267 std::process::exit(1);
13268 }
13269 };
13270
13271 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
13272 if let Some(path) = report_path
13273 && let Err(e) = std::fs::write(path, &json)
13274 {
13275 eprintln!(
13276 "{} failed to write report to {}: {e}",
13277 style::err_prefix(),
13278 path.display()
13279 );
13280 }
13281
13282 if json_out {
13283 println!("{json}");
13284 } else {
13285 println!();
13286 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
13287 println!(" {}", style::tick_row(60));
13288 print!("{}", crate::agent_bench::render_pretty(&report));
13289 println!();
13290 }
13291
13292 if !report.pass {
13293 std::process::exit(1);
13294 }
13295}
13296
13297fn cmd_bench(args: BenchArgs) {
13298 if args.suite_ready {
13299 let suite_path = args
13300 .suite
13301 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
13302 let payload =
13303 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
13304 println!(
13305 "{}",
13306 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
13307 );
13308 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13309 std::process::exit(1);
13310 }
13311 return;
13312 }
13313 if let Some(suite_path) = args.suite {
13314 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
13315 if args.json {
13316 println!(
13317 "{}",
13318 serde_json::to_string_pretty(&payload)
13319 .expect("failed to serialize benchmark suite")
13320 );
13321 } else {
13322 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13323 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
13324 println!();
13325 println!(" {}", "VELA · BENCH · SUITE".dimmed());
13326 println!(" {}", style::tick_row(60));
13327 println!(" suite: {}", suite_path.display());
13328 println!(
13329 " status: {}",
13330 if ok {
13331 style::ok("pass")
13332 } else {
13333 style::lost("fail")
13334 }
13335 );
13336 println!(
13337 " tasks: {}/{} passed",
13338 metrics
13339 .get("tasks_passed")
13340 .and_then(Value::as_u64)
13341 .unwrap_or(0),
13342 metrics
13343 .get("tasks_total")
13344 .and_then(Value::as_u64)
13345 .unwrap_or(0)
13346 );
13347 }
13348 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13349 std::process::exit(1);
13350 }
13351 return;
13352 }
13353
13354 let frontier = args
13355 .frontier
13356 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
13357 let thresholds = benchmark::BenchmarkThresholds {
13358 min_f1: if args.no_thresholds {
13359 None
13360 } else {
13361 args.min_f1.or(Some(0.05))
13362 },
13363 min_precision: if args.no_thresholds {
13364 None
13365 } else {
13366 args.min_precision
13367 },
13368 min_recall: if args.no_thresholds {
13369 None
13370 } else {
13371 args.min_recall
13372 },
13373 ..Default::default()
13374 };
13375 if let Some(path) = args.link_gold {
13376 print_benchmark_or_exit(benchmark::task_envelope(
13377 &frontier,
13378 None,
13379 benchmark::BenchmarkMode::Link,
13380 Some(&path),
13381 &thresholds,
13382 None,
13383 ));
13384 } else if let Some(path) = args.entity_gold {
13385 print_benchmark_or_exit(benchmark::task_envelope(
13386 &frontier,
13387 None,
13388 benchmark::BenchmarkMode::Entity,
13389 Some(&path),
13390 &thresholds,
13391 None,
13392 ));
13393 } else if let Some(path) = args.gold {
13394 if args.json {
13395 print_benchmark_or_exit(benchmark::task_envelope(
13396 &frontier,
13397 None,
13398 benchmark::BenchmarkMode::Finding,
13399 Some(&path),
13400 &thresholds,
13401 None,
13402 ));
13403 } else {
13404 benchmark::run(&frontier, &path, false);
13405 }
13406 } else {
13407 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
13408 }
13409}
13410
13411fn print_benchmark_or_exit(result: Result<Value, String>) {
13412 let payload = result.unwrap_or_else(|e| fail_return(&e));
13413 println!(
13414 "{}",
13415 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
13416 );
13417 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13418 std::process::exit(1);
13419 }
13420}
13421
13422fn cmd_packet(action: PacketAction) {
13423 let (result, json_output) = match action {
13424 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
13425 PacketAction::Validate { path, json } => (packet::validate(&path), json),
13426 };
13427 match result {
13428 Ok(output) if json_output => {
13429 println!(
13430 "{}",
13431 serde_json::to_string_pretty(&json!({
13432 "ok": true,
13433 "command": "packet",
13434 "result": output,
13435 }))
13436 .expect("failed to serialize packet response")
13437 );
13438 }
13439 Ok(output) => println!("{output}"),
13440 Err(e) => fail(&e),
13441 }
13442}
13443
13444fn cmd_verify(path: &Path, json_output: bool) {
13449 let result = packet::validate(path);
13450 match result {
13451 Ok(output) if json_output => {
13452 println!(
13453 "{}",
13454 serde_json::to_string_pretty(&json!({
13455 "ok": true,
13456 "command": "verify",
13457 "result": output,
13458 }))
13459 .expect("failed to serialize verify response")
13460 );
13461 }
13462 Ok(output) => {
13463 println!("{output}");
13464 println!(
13465 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
13466 );
13467 }
13468 Err(e) => fail(&e),
13469 }
13470}
13471
13472fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
13473 if path.join(".vela").exists() {
13474 fail(&format!(
13475 "already initialized: {} exists",
13476 path.join(".vela").display()
13477 ));
13478 }
13479 let payload = frontier_repo::initialize(
13480 path,
13481 frontier_repo::InitOptions {
13482 name,
13483 template,
13484 initialize_git,
13485 },
13486 )
13487 .unwrap_or_else(|e| fail_return(&e));
13488 if json_output {
13489 println!(
13490 "{}",
13491 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
13492 );
13493 } else {
13494 println!(
13495 "{} initialized frontier repository in {}",
13496 style::ok("ok"),
13497 path.display()
13498 );
13499 }
13500}
13501
13502fn cmd_quickstart(
13509 path: &Path,
13510 name: &str,
13511 reviewer: &str,
13512 assertion: Option<&str>,
13513 keys_out: Option<&Path>,
13514 json_output: bool,
13515) {
13516 use std::process::Command;
13517
13518 if path.join(".vela").exists() {
13519 fail(&format!(
13520 "already initialized: {} exists",
13521 path.join(".vela").display()
13522 ));
13523 }
13524
13525 let exe = std::env::current_exe()
13526 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
13527 let keys_dir = keys_out
13528 .map(Path::to_path_buf)
13529 .unwrap_or_else(|| path.join("keys"));
13530 let assertion_text =
13531 assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
13532
13533 let run_step = |label: &str, args: &[&str]| -> std::process::Output {
13534 let out = Command::new(&exe)
13535 .args(args)
13536 .output()
13537 .unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
13538 if !out.status.success() {
13539 let stderr = String::from_utf8_lossy(&out.stderr);
13540 fail(&format!("{label} failed:\n{stderr}"));
13541 }
13542 out
13543 };
13544
13545 run_step(
13547 "init",
13548 &[
13549 "init",
13550 path.to_string_lossy().as_ref(),
13551 "--name",
13552 name,
13553 "--no-git",
13554 "--json",
13555 ],
13556 );
13557
13558 let keys_out_str = keys_dir.to_string_lossy().into_owned();
13560 let keypair_out = run_step(
13561 "sign.generate-keypair",
13562 &[
13563 "sign",
13564 "generate-keypair",
13565 "--out",
13566 keys_out_str.as_ref(),
13567 "--json",
13568 ],
13569 );
13570 let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
13571 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
13572 let public_key = keypair_json
13573 .get("public_key")
13574 .and_then(|v| v.as_str())
13575 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
13576 .to_string();
13577
13578 run_step(
13580 "actor.add",
13581 &[
13582 "actor",
13583 "add",
13584 path.to_string_lossy().as_ref(),
13585 reviewer,
13586 "--pubkey",
13587 public_key.as_str(),
13588 "--json",
13589 ],
13590 );
13591
13592 let finding_out = run_step(
13594 "finding.add",
13595 &[
13596 "finding",
13597 "add",
13598 path.to_string_lossy().as_ref(),
13599 "--assertion",
13600 assertion_text,
13601 "--author",
13602 reviewer,
13603 "--apply",
13604 "--json",
13605 ],
13606 );
13607 let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
13608 .unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
13609 let finding_id = finding_json
13610 .get("finding_id")
13611 .and_then(|v| v.as_str())
13612 .map(str::to_string);
13613
13614 if json_output {
13615 let payload = json!({
13616 "ok": true,
13617 "command": "quickstart",
13618 "frontier": path.display().to_string(),
13619 "name": name,
13620 "reviewer": reviewer,
13621 "public_key": public_key,
13622 "keys_dir": keys_dir.display().to_string(),
13623 "finding_id": finding_id,
13624 "next_steps": [
13625 format!("vela serve {}", path.display()),
13626 format!(
13627 "vela ingest <paper.pdf|doi:...> --frontier {}",
13628 path.display()
13629 ),
13630 format!("vela log {}", path.display()),
13631 ],
13632 });
13633 println!(
13634 "{}",
13635 serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
13636 );
13637 return;
13638 }
13639
13640 println!();
13641 println!(
13642 " {}",
13643 format!("VELA · QUICKSTART · {}", path.display())
13644 .to_uppercase()
13645 .dimmed()
13646 );
13647 println!(" {}", style::tick_row(60));
13648 println!(" frontier: {}", path.display());
13649 println!(" name: {name}");
13650 println!(" reviewer: {reviewer}");
13651 println!(" keys: {}", keys_dir.display());
13652 println!(" pubkey: {}…", &public_key[..16]);
13653 if let Some(id) = finding_id.as_deref() {
13654 println!(" finding: {id}");
13655 }
13656 println!();
13657 println!(" {}", style::ok("done"));
13658 println!(" next:");
13659 println!(" vela serve {}", path.display());
13660 println!(
13661 " vela ingest <paper.pdf|doi:10.xxx|pmid:xxx> --frontier {}",
13662 path.display()
13663 );
13664 println!(" vela log {}", path.display());
13665 println!();
13666}
13667
13668fn cmd_lock(path: &Path, check: bool, json_output: bool) {
13674 if check {
13675 cmd_lock_check(path, json_output);
13676 return;
13677 }
13678 let payload = crate::frontier_repo::materialize(path).unwrap_or_else(|e| fail_return(&e));
13679 if json_output {
13680 println!(
13681 "{}",
13682 serde_json::to_string_pretty(&json!({
13683 "ok": true,
13684 "command": "lock",
13685 "path": path.display().to_string(),
13686 "snapshot_hash": payload.get("snapshot_hash"),
13687 "event_log_hash": payload.get("event_log_hash"),
13688 "proposal_state_hash": payload.get("proposal_state_hash"),
13689 }))
13690 .expect("failed to serialize lock report")
13691 );
13692 return;
13693 }
13694 println!();
13695 println!(
13696 " {}",
13697 format!("VELA · LOCK · {}", path.display())
13698 .to_uppercase()
13699 .dimmed()
13700 );
13701 println!(" {}", style::tick_row(60));
13702 println!(
13703 " snapshot_hash: {}",
13704 payload
13705 .get("snapshot_hash")
13706 .and_then(|v| v.as_str())
13707 .unwrap_or("?")
13708 );
13709 println!(
13710 " event_log_hash: {}",
13711 payload
13712 .get("event_log_hash")
13713 .and_then(|v| v.as_str())
13714 .unwrap_or("?")
13715 );
13716 println!(
13717 " proposal_state_hash: {}",
13718 payload
13719 .get("proposal_state_hash")
13720 .and_then(|v| v.as_str())
13721 .unwrap_or("?")
13722 );
13723 println!();
13724 println!(" {}", style::ok("locked"));
13725}
13726
13727fn cmd_lock_check(path: &Path, json_output: bool) {
13728 use crate::frontier_repo::read_lock;
13729 let lock = read_lock(path).unwrap_or_else(|e| fail_return(&e));
13730 let Some(lock) = lock else {
13731 fail("lock --check: no vela.lock found at path");
13732 };
13733 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
13734 let current_snapshot = format!("sha256:{}", crate::events::snapshot_hash(&project));
13735 let current_event_log = format!("sha256:{}", crate::events::event_log_hash(&project.events));
13736 let mut drift: Vec<String> = Vec::new();
13737 if lock.snapshot_hash != current_snapshot {
13738 drift.push(format!(
13739 "snapshot_hash: lock={} current={}",
13740 lock.snapshot_hash, current_snapshot
13741 ));
13742 }
13743 if lock.event_log_hash != current_event_log {
13744 drift.push(format!(
13745 "event_log_hash: lock={} current={}",
13746 lock.event_log_hash, current_event_log
13747 ));
13748 }
13749 let ok = drift.is_empty();
13750 if json_output {
13751 println!(
13752 "{}",
13753 serde_json::to_string_pretty(&json!({
13754 "ok": ok,
13755 "command": "lock.check",
13756 "path": path.display().to_string(),
13757 "drift": drift,
13758 "lock_snapshot_hash": lock.snapshot_hash,
13759 "current_snapshot_hash": current_snapshot,
13760 "lock_event_log_hash": lock.event_log_hash,
13761 "current_event_log_hash": current_event_log,
13762 "dependency_count": lock.dependencies.len(),
13763 }))
13764 .expect("failed to serialize lock check report")
13765 );
13766 } else {
13767 println!();
13768 println!(
13769 " {}",
13770 format!("VELA · LOCK · CHECK · {}", path.display())
13771 .to_uppercase()
13772 .dimmed()
13773 );
13774 println!(" {}", style::tick_row(60));
13775 if ok {
13776 println!(" snapshot_hash: {}", lock.snapshot_hash);
13777 println!(" event_log_hash: {}", lock.event_log_hash);
13778 println!(" dependencies pinned: {}", lock.dependencies.len());
13779 println!();
13780 println!(" {} on-disk state matches vela.lock", style::ok("ok"));
13781 } else {
13782 println!(" {} drift detected:", style::err_prefix());
13783 for d in &drift {
13784 println!(" - {d}");
13785 }
13786 }
13787 }
13788 if !ok {
13789 std::process::exit(1);
13790 }
13791}
13792
13793fn cmd_doc(path: &Path, out: Option<&Path>, json_output: bool) {
13798 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
13799 let out_dir = out
13800 .map(Path::to_path_buf)
13801 .unwrap_or_else(|| path.join("doc"));
13802 let report =
13803 crate::doc_render::write_site(&project, &out_dir).unwrap_or_else(|e| fail_return(&e));
13804 if json_output {
13805 println!(
13806 "{}",
13807 serde_json::to_string_pretty(&report).expect("failed to serialize doc report")
13808 );
13809 return;
13810 }
13811 println!();
13812 println!(
13813 " {}",
13814 format!("VELA · DOC · {}", path.display())
13815 .to_uppercase()
13816 .dimmed()
13817 );
13818 println!(" {}", style::tick_row(60));
13819 println!(" frontier_id: {}", report.frontier_id);
13820 println!(" out: {}", report.out);
13821 println!(" files written: {}", report.files_written);
13822 println!(" findings: {}", report.findings_documented);
13823 println!(" events: {}", report.events_documented);
13824 println!();
13825 println!(
13826 " {} open {}/index.html in a browser",
13827 style::ok("ok"),
13828 report.out
13829 );
13830}
13831
13832fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
13833 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
13834 let target = into
13835 .map(Path::to_path_buf)
13836 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
13837 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
13838 println!(
13839 "{} {} findings · {}",
13840 style::ok("imported"),
13841 frontier.findings.len(),
13842 target.display()
13843 );
13844}
13845
13846fn cmd_locator_repair(
13847 path: &Path,
13848 atom_id: &str,
13849 locator_override: Option<&str>,
13850 reviewer: &str,
13851 reason: &str,
13852 apply: bool,
13853 json_output: bool,
13854) {
13855 let report = state::repair_evidence_atom_locator(
13856 path,
13857 atom_id,
13858 locator_override,
13859 reviewer,
13860 reason,
13861 apply,
13862 )
13863 .unwrap_or_else(|e| fail_return(&e));
13864 print_state_report(&report, json_output);
13865}
13866
13867async fn cmd_source_fetch(
13872 identifier: &str,
13873 cache_root: Option<&Path>,
13874 out_path: Option<&Path>,
13875 refresh: bool,
13876 _json_output: bool,
13877) {
13878 use sha2::{Digest, Sha256};
13879
13880 let normalized = normalize_source_identifier(identifier);
13881 let cache_path = cache_root.map(|root| {
13882 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
13883 root.join("sources")
13884 .join("cache")
13885 .join(format!("{hash}.json"))
13886 });
13887
13888 if !refresh
13889 && let Some(p) = cache_path.as_ref()
13890 && p.is_file()
13891 {
13892 let body = std::fs::read_to_string(p)
13893 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
13894 emit_source_fetch_result(&body, out_path);
13895 return;
13896 }
13897
13898 let result = fetch_source_metadata(&normalized).await;
13899 let json = match result {
13900 Ok(value) => serde_json::to_string_pretty(&value)
13901 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
13902 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
13903 };
13904
13905 if let Some(p) = cache_path.as_ref() {
13906 if let Some(parent) = p.parent() {
13907 std::fs::create_dir_all(parent)
13908 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
13909 }
13910 std::fs::write(p, &json)
13911 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
13912 }
13913 emit_source_fetch_result(&json, out_path);
13914}
13915
13916fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
13917 if let Some(p) = out_path {
13918 if let Some(parent) = p.parent() {
13919 let _ = std::fs::create_dir_all(parent);
13920 }
13921 std::fs::write(p, body)
13922 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
13923 } else {
13924 println!("{body}");
13925 }
13926}
13927
13928fn normalize_source_identifier(raw: &str) -> String {
13929 let trimmed = raw.trim();
13930 if trimmed.starts_with("doi:")
13931 || trimmed.starts_with("pmid:")
13932 || trimmed.starts_with("nct:")
13933 || trimmed.starts_with("pmc:")
13934 {
13935 return trimmed.to_string();
13936 }
13937 if trimmed.starts_with("10.") {
13938 return format!("doi:{trimmed}");
13939 }
13940 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
13941 return format!(
13942 "nct:{}",
13943 trimmed
13944 .to_uppercase()
13945 .trim_start_matches("NCT")
13946 .to_string()
13947 .split_at(0)
13948 .0
13949 );
13950 }
13951 if trimmed.chars().all(|c| c.is_ascii_digit()) {
13952 return format!("pmid:{trimmed}");
13953 }
13954 trimmed.to_string()
13955}
13956
13957async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
13958 let client = Client::builder()
13959 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
13960 .timeout(std::time::Duration::from_secs(30))
13961 .build()
13962 .map_err(|e| format!("client build: {e}"))?;
13963 if let Some(rest) = normalized.strip_prefix("doi:") {
13964 let mut record = fetch_via_crossref(&client, rest).await?;
13971 let crossref_abstract = record
13972 .get("abstract")
13973 .and_then(|v| v.as_str())
13974 .unwrap_or("");
13975 if crossref_abstract.is_empty()
13976 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
13977 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
13978 {
13979 let pubmed_abstract = pubmed_record
13980 .get("abstract")
13981 .and_then(|v| v.as_str())
13982 .unwrap_or("")
13983 .to_string();
13984 if !pubmed_abstract.is_empty()
13985 && let Some(obj) = record.as_object_mut()
13986 {
13987 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
13988 obj.insert(
13989 "abstract_source".to_string(),
13990 Value::String(format!("pubmed:{pmid}")),
13991 );
13992 }
13993 }
13994 return Ok(record);
13995 }
13996 if let Some(rest) = normalized.strip_prefix("pmid:") {
13997 return fetch_via_pubmed(&client, rest).await;
13998 }
13999 if let Some(rest) = normalized.strip_prefix("nct:") {
14000 return fetch_via_ctgov(&client, rest).await;
14001 }
14002 Err(format!(
14003 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
14004 ))
14005}
14006
14007async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
14011 let url = format!(
14012 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
14013 urlencoding::encode(doi)
14014 );
14015 let resp = client.get(&url).send().await.ok()?;
14016 if !resp.status().is_success() {
14017 return None;
14018 }
14019 let body: Value = resp.json().await.ok()?;
14020 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
14021 if id_list.len() != 1 {
14022 return None;
14025 }
14026 id_list.first()?.as_str().map(|s| s.to_string())
14027}
14028
14029async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
14030 let url = format!("https://api.crossref.org/works/{doi}");
14031 let resp = client
14032 .get(&url)
14033 .send()
14034 .await
14035 .map_err(|e| format!("crossref get: {e}"))?;
14036 if !resp.status().is_success() {
14037 return Err(format!("crossref returned {}", resp.status()));
14038 }
14039 let body: Value = resp
14040 .json()
14041 .await
14042 .map_err(|e| format!("crossref json: {e}"))?;
14043 let work = body.get("message").cloned().unwrap_or(Value::Null);
14044 let title = work
14045 .get("title")
14046 .and_then(|v| v.as_array())
14047 .and_then(|a| a.first())
14048 .and_then(|v| v.as_str())
14049 .unwrap_or("")
14050 .to_string();
14051 let abstract_html = work
14052 .get("abstract")
14053 .and_then(|v| v.as_str())
14054 .unwrap_or("")
14055 .to_string();
14056 let abstract_text = strip_jats_tags(&abstract_html);
14057 let year = work
14058 .get("issued")
14059 .and_then(|v| v.get("date-parts"))
14060 .and_then(|v| v.as_array())
14061 .and_then(|a| a.first())
14062 .and_then(|v| v.as_array())
14063 .and_then(|a| a.first())
14064 .and_then(|v| v.as_i64());
14065 let journal = work
14066 .get("container-title")
14067 .and_then(|v| v.as_array())
14068 .and_then(|a| a.first())
14069 .and_then(|v| v.as_str())
14070 .unwrap_or("")
14071 .to_string();
14072 let authors = work
14073 .get("author")
14074 .and_then(|v| v.as_array())
14075 .map(|arr| {
14076 arr.iter()
14077 .filter_map(|a| {
14078 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
14079 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
14080 let combined = format!("{given} {family}").trim().to_string();
14081 if combined.is_empty() {
14082 None
14083 } else {
14084 Some(combined)
14085 }
14086 })
14087 .collect::<Vec<_>>()
14088 })
14089 .unwrap_or_default();
14090 Ok(json!({
14091 "schema": "vela.source_fetch.v0.1",
14092 "identifier": format!("doi:{doi}"),
14093 "source": "crossref",
14094 "title": title,
14095 "abstract": abstract_text,
14096 "year": year,
14097 "journal": journal,
14098 "authors": authors,
14099 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14100 }))
14101}
14102
14103async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
14104 let url = format!(
14105 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
14106 );
14107 let resp = client
14108 .get(&url)
14109 .send()
14110 .await
14111 .map_err(|e| format!("pubmed get: {e}"))?;
14112 if !resp.status().is_success() {
14113 return Err(format!("pubmed returned {}", resp.status()));
14114 }
14115 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
14116 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
14117 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
14118 let year = extract_xml_text(&xml, "<Year>", "</Year>")
14119 .parse::<i64>()
14120 .ok();
14121 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
14122 Ok(json!({
14123 "schema": "vela.source_fetch.v0.1",
14124 "identifier": format!("pmid:{pmid}"),
14125 "source": "pubmed",
14126 "title": title,
14127 "abstract": abstract_text,
14128 "year": year,
14129 "journal": journal,
14130 "authors": Vec::<String>::new(),
14131 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14132 }))
14133}
14134
14135async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
14136 let nct_clean = nct.trim();
14137 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
14138 nct_clean.to_uppercase()
14139 } else {
14140 format!("NCT{nct_clean}")
14141 };
14142 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
14143 let resp = client
14144 .get(&url)
14145 .send()
14146 .await
14147 .map_err(|e| format!("ctgov get: {e}"))?;
14148 if !resp.status().is_success() {
14149 return Err(format!("ctgov returned {}", resp.status()));
14150 }
14151 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
14152 let title = body
14153 .pointer("/protocolSection/identificationModule/briefTitle")
14154 .and_then(|v| v.as_str())
14155 .unwrap_or("")
14156 .to_string();
14157 let abstract_text = body
14158 .pointer("/protocolSection/descriptionModule/briefSummary")
14159 .and_then(|v| v.as_str())
14160 .unwrap_or("")
14161 .to_string();
14162 let phase = body
14163 .pointer("/protocolSection/designModule/phases")
14164 .and_then(|v| v.as_array())
14165 .and_then(|a| a.first())
14166 .and_then(|v| v.as_str())
14167 .unwrap_or("")
14168 .to_string();
14169 Ok(json!({
14170 "schema": "vela.source_fetch.v0.1",
14171 "identifier": format!("nct:{nct_id}"),
14172 "source": "clinicaltrials.gov",
14173 "title": title,
14174 "abstract": abstract_text,
14175 "year": Value::Null,
14176 "journal": phase,
14177 "authors": Vec::<String>::new(),
14178 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14179 }))
14180}
14181
14182fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
14183 if let Some(start) = xml.find(open) {
14184 let after = &xml[start + open.len()..];
14185 if let Some(end) = after.find(close) {
14186 return after[..end].trim().to_string();
14187 }
14188 }
14189 String::new()
14190}
14191
14192fn strip_jats_tags(html: &str) -> String {
14193 let mut out = String::with_capacity(html.len());
14194 let mut in_tag = false;
14195 for c in html.chars() {
14196 match c {
14197 '<' => in_tag = true,
14198 '>' => in_tag = false,
14199 _ if !in_tag => out.push(c),
14200 _ => {}
14201 }
14202 }
14203 out.split_whitespace().collect::<Vec<_>>().join(" ")
14204}
14205
14206fn cmd_span_repair(
14207 path: &Path,
14208 finding_id: &str,
14209 section: &str,
14210 text: &str,
14211 reviewer: &str,
14212 reason: &str,
14213 apply: bool,
14214 json_output: bool,
14215) {
14216 let report =
14217 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
14218 .unwrap_or_else(|e| fail_return(&e));
14219 print_state_report(&report, json_output);
14220}
14221
14222#[allow(clippy::too_many_arguments)]
14223fn cmd_entity_resolve(
14224 path: &Path,
14225 finding_id: &str,
14226 entity_name: &str,
14227 source: &str,
14228 id: &str,
14229 confidence: f64,
14230 matched_name: Option<&str>,
14231 resolution_method: &str,
14232 reviewer: &str,
14233 reason: &str,
14234 apply: bool,
14235 json_output: bool,
14236) {
14237 let report = state::resolve_finding_entity(
14238 path,
14239 finding_id,
14240 entity_name,
14241 source,
14242 id,
14243 confidence,
14244 matched_name,
14245 resolution_method,
14246 reviewer,
14247 reason,
14248 apply,
14249 )
14250 .unwrap_or_else(|e| fail_return(&e));
14251 print_state_report(&report, json_output);
14252}
14253
14254fn cmd_propagate(
14255 path: &Path,
14256 retract: Option<String>,
14257 reduce_confidence: Option<String>,
14258 to: Option<f64>,
14259 output: Option<&Path>,
14260) {
14261 let mut frontier = load_frontier_or_fail(path);
14262 let (finding_id, action, label) = if let Some(id) = retract {
14263 (id, propagate::PropagationAction::Retracted, "retraction")
14264 } else if let Some(id) = reduce_confidence {
14265 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
14266 if !(0.0..=1.0).contains(&score) {
14267 fail("--to must be between 0.0 and 1.0");
14268 }
14269 (
14270 id,
14271 propagate::PropagationAction::ConfidenceReduced { new_score: score },
14272 "confidence reduction",
14273 )
14274 } else {
14275 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
14276 };
14277 if !frontier.findings.iter().any(|f| f.id == finding_id) {
14278 fail(&format!("finding not found: {finding_id}"));
14279 }
14280 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
14281 frontier.review_events.extend(result.events.clone());
14286 project::recompute_stats(&mut frontier);
14287 propagate::print_result(&result, label, &finding_id);
14288 let out = output.unwrap_or(path);
14289 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
14290 println!(" output: {}", out.display());
14291}
14292
14293fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
14294 let source_desc = source
14295 .map(|p| p.display().to_string())
14296 .or_else(|| frontiers.map(|p| p.display().to_string()))
14297 .unwrap_or_else(|| "frontier.json".to_string());
14298 let args = if let Some(path) = source {
14299 format!(r#""serve", "{}""#, path.display())
14300 } else if let Some(path) = frontiers {
14301 format!(r#""serve", "--frontiers", "{}""#, path.display())
14302 } else {
14303 r#""serve", "frontier.json""#.to_string()
14304 };
14305 println!(
14306 r#"Add this MCP server configuration to your client:
14307
14308{{
14309 "mcpServers": {{
14310 "vela": {{
14311 "command": "vela",
14312 "args": [{args}]
14313 }}
14314 }}
14315}}
14316
14317Source: {source_desc}"#
14318 );
14319}
14320
14321fn parse_entities(input: &str) -> Vec<(String, String)> {
14322 if input.trim().is_empty() {
14323 return Vec::new();
14324 }
14325 input
14326 .split(',')
14327 .filter_map(|pair| {
14328 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
14329 if parts.len() == 2 {
14330 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
14331 } else {
14332 eprintln!(
14333 "{} skipping malformed entity '{}'",
14334 style::warn("warn"),
14335 pair.trim()
14336 );
14337 None
14338 }
14339 })
14340 .collect()
14341}
14342
14343fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
14344 inputs
14345 .iter()
14346 .filter_map(|input| {
14347 let trimmed = input.trim();
14348 if trimmed.is_empty() {
14349 return None;
14350 }
14351 if trimmed.starts_with('{') {
14352 match serde_json::from_str::<Value>(trimmed) {
14353 Ok(value @ Value::Object(_)) => return Some(value),
14354 Ok(_) | Err(_) => {
14355 eprintln!(
14356 "{} evidence span JSON should be an object; storing as text",
14357 style::warn("warn")
14358 );
14359 }
14360 }
14361 }
14362 Some(json!({
14363 "section": "curator_source",
14364 "text": trimmed,
14365 }))
14366 })
14367 .collect()
14368}
14369
14370fn hash_path(path: &Path) -> Result<String, String> {
14371 let mut hasher = Sha256::new();
14372 if path.is_file() {
14373 let bytes = std::fs::read(path)
14374 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
14375 hasher.update(&bytes);
14376 } else if path.is_dir() {
14377 let mut files = Vec::new();
14378 collect_hash_files(path, path, &mut files)?;
14379 files.sort();
14380 for rel in files {
14381 hasher.update(rel.to_string_lossy().as_bytes());
14382 let bytes = std::fs::read(path.join(&rel))
14383 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
14384 hasher.update(bytes);
14385 }
14386 } else {
14387 return Err(format!("Cannot hash missing path {}", path.display()));
14388 }
14389 Ok(format!("{:x}", hasher.finalize()))
14390}
14391
14392fn load_frontier_or_fail(path: &Path) -> project::Project {
14393 repo::load_from_path(path).unwrap_or_else(|e| {
14394 fail_return(&format!(
14395 "Failed to load frontier '{}': {e}",
14396 path.display()
14397 ))
14398 })
14399}
14400
14401fn hash_path_or_fail(path: &Path) -> String {
14402 hash_path(path).unwrap_or_else(|e| {
14403 fail_return(&format!(
14404 "Failed to hash frontier '{}': {e}",
14405 path.display()
14406 ))
14407 })
14408}
14409
14410fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
14411 for entry in
14412 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
14413 {
14414 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
14415 let path = entry.path();
14416 if path.is_dir() {
14417 collect_hash_files(root, &path, files)?;
14418 } else if path.is_file() {
14419 files.push(
14420 path.strip_prefix(root)
14421 .map_err(|e| e.to_string())?
14422 .to_path_buf(),
14423 );
14424 }
14425 }
14426 Ok(())
14427}
14428
14429fn schema_error_suggestion(error: &str) -> &'static str {
14430 if schema_error_action(error).is_some() {
14431 "Run `vela normalize` to repair deterministic frontier state."
14432 } else {
14433 "Inspect and correct the referenced frontier field."
14434 }
14435}
14436
14437fn schema_error_fix(error: &str) -> bool {
14438 schema_error_action(error).is_some()
14439}
14440
14441fn schema_error_action(error: &str) -> Option<&'static str> {
14442 if error.contains("stats.findings")
14443 || error.contains("stats.links")
14444 || error.contains("Invalid compiler")
14445 || error.contains("Invalid vela_version")
14446 || error.contains("Invalid schema")
14447 {
14448 Some("normalize_metadata_and_stats")
14449 } else if error.contains("does not match content-address") {
14450 Some("rewrite_ids")
14451 } else {
14452 None
14453 }
14454}
14455
14456fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
14457 let mut actions = std::collections::BTreeMap::<String, usize>::new();
14458 for diagnostic in diagnostics {
14459 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
14460 *actions.entry(action.to_string()).or_default() += 1;
14461 }
14462 }
14463 actions
14464 .into_iter()
14465 .map(|(action, count)| {
14466 let command = if action == "rewrite_ids" {
14467 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
14468 } else {
14469 "vela normalize <frontier> --write"
14470 };
14471 json!({
14472 "action": action,
14473 "count": count,
14474 "command": command,
14475 })
14476 })
14477 .collect()
14478}
14479
14480fn cmd_integrity(frontier: &Path, json: bool) {
14481 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
14482 if json {
14483 println!(
14484 "{}",
14485 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
14486 );
14487 } else {
14488 println!("vela integrity");
14489 println!(" frontier: {}", frontier.display());
14490 println!(" status: {}", report.status);
14491 println!(" proof freshness: {}", report.proof_freshness);
14492 println!(" structural errors: {}", report.structural_errors.len());
14493 for error in report.structural_errors.iter().take(8) {
14494 println!(" - {}: {}", error.rule_id, error.message);
14495 }
14496 }
14497}
14498
14499fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
14500 let report =
14501 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
14502 if json {
14503 println!(
14504 "{}",
14505 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
14506 );
14507 } else {
14508 println!("vela impact");
14509 println!(" finding: {}", report.target.id);
14510 println!(" frontier: {}", report.frontier.vfr_id);
14511 println!(" direct dependents: {}", report.summary.direct_dependents);
14512 println!(" downstream: {}", report.summary.total_downstream);
14513 println!(" open proposals: {}", report.summary.open_proposals);
14514 println!(" accepted events: {}", report.summary.accepted_events);
14515 println!(" proof: {}", report.summary.proof_status);
14516 }
14517}
14518
14519fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
14520 use crate::discord::DiscordKind;
14521 use crate::discord_compute::compute_discord_assignment;
14522
14523 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
14524 let assignment = compute_discord_assignment(&project);
14525 let support = assignment.frontier_support();
14526
14527 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
14530 for context in support.iter() {
14531 let set = assignment.get(context);
14532 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
14533 if let Some(filter) = kind_filter
14534 && !kinds.iter().any(|k| k == filter)
14535 {
14536 continue;
14537 }
14538 rows.push((context.clone(), kinds));
14539 }
14540
14541 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
14545 std::collections::BTreeMap::new();
14546 for kind in DiscordKind::ALL {
14547 let count = assignment
14548 .iter()
14549 .filter(|(_, set)| set.contains(*kind))
14550 .count();
14551 if count > 0 {
14552 histogram.insert(kind.as_str(), count);
14553 }
14554 }
14555
14556 let total_findings = project.findings.len();
14557 let frontier_id = project
14558 .frontier_id
14559 .clone()
14560 .unwrap_or_else(|| String::from("<unknown>"));
14561
14562 if json {
14563 let row_value = |row: &(String, Vec<String>)| {
14564 serde_json::json!({
14565 "finding_id": row.0,
14566 "discord_kinds": row.1,
14567 })
14568 };
14569 let report = serde_json::json!({
14570 "frontier_id": frontier_id,
14571 "total_findings": total_findings,
14572 "frontier_support_size": support.len(),
14573 "filtered_row_count": rows.len(),
14574 "filter_kind": kind_filter,
14575 "histogram": histogram,
14576 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
14577 });
14578 println!(
14579 "{}",
14580 serde_json::to_string_pretty(&report).expect("serialize discord report")
14581 );
14582 return;
14583 }
14584
14585 println!("vela discord");
14586 println!(" frontier: {frontier_id}");
14587 println!(" total findings: {total_findings}");
14588 println!(
14589 " frontier support (any discord): {} of {}",
14590 support.len(),
14591 total_findings
14592 );
14593 if let Some(k) = kind_filter {
14594 println!(" filter: kind = {k}");
14595 }
14596 println!();
14597 if histogram.is_empty() {
14598 println!(" no discord detected.");
14599 } else {
14600 println!(" discord histogram:");
14601 for (k, n) in &histogram {
14602 println!(" {n:>4} {k}");
14603 }
14604 }
14605 if !rows.is_empty() {
14606 println!();
14607 println!(" findings with discord (showing up to 50):");
14608 for (fid, kinds) in rows.iter().take(50) {
14609 println!(" {fid} · {}", kinds.join(", "));
14610 }
14611 if rows.len() > 50 {
14612 println!(" ... and {} more", rows.len() - 50);
14613 }
14614 }
14615}
14616
14617fn empty_signal_report() -> signals::SignalReport {
14618 signals::SignalReport {
14619 schema: "vela.signals.v0".to_string(),
14620 frontier: "unavailable".to_string(),
14621 signals: Vec::new(),
14622 review_queue: Vec::new(),
14623 proof_readiness: signals::ProofReadiness {
14624 status: "unavailable".to_string(),
14625 blockers: 0,
14626 warnings: 0,
14627 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
14628 },
14629 }
14630}
14631
14632fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
14633 println!();
14634 println!(" {}", "SIGNALS".dimmed());
14635 println!(" {}", style::tick_row(60));
14636 println!(" total signals: {}", report.signals.len());
14637 println!(" proof readiness: {}", report.proof_readiness.status);
14638 if !report.review_queue.is_empty() {
14639 println!(" review queue: {} items", report.review_queue.len());
14640 }
14641 if strict && report.proof_readiness.status != "ready" {
14642 println!(
14643 " {} proof readiness has blocking signals.",
14644 style::lost("strict check failed")
14645 );
14646 }
14647}
14648
14649fn append_packet_json_file(
14650 packet_dir: &Path,
14651 relative_path: &str,
14652 value: &Value,
14653) -> Result<(), String> {
14654 let content = serde_json::to_vec_pretty(value)
14655 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
14656 let path = packet_dir.join(relative_path);
14657 if let Some(parent) = path.parent() {
14658 std::fs::create_dir_all(parent)
14659 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
14660 }
14661 std::fs::write(&path, &content)
14662 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
14663 let entry = json!({
14664 "path": relative_path,
14665 "sha256": hex::encode(Sha256::digest(&content)),
14666 "bytes": content.len(),
14667 });
14668
14669 for manifest_name in ["manifest.json", "packet.lock.json"] {
14670 let manifest_path = packet_dir.join(manifest_name);
14671 let data = std::fs::read_to_string(&manifest_path)
14672 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14673 let mut manifest: Value = serde_json::from_str(&data)
14674 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14675 let array_key = if manifest_name == "manifest.json" {
14676 "included_files"
14677 } else {
14678 "files"
14679 };
14680 let files = manifest
14681 .get_mut(array_key)
14682 .and_then(Value::as_array_mut)
14683 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
14684 files.retain(|file| {
14685 file.get("path")
14686 .and_then(Value::as_str)
14687 .is_none_or(|path| path != relative_path)
14688 });
14689 files.push(entry.clone());
14690 std::fs::write(
14691 &manifest_path,
14692 serde_json::to_vec_pretty(&manifest)
14693 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14694 )
14695 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14696 }
14697
14698 let lock_path = packet_dir.join("packet.lock.json");
14699 let lock_content = std::fs::read(&lock_path)
14700 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
14701 let lock_entry = json!({
14702 "path": "packet.lock.json",
14703 "sha256": hex::encode(Sha256::digest(&lock_content)),
14704 "bytes": lock_content.len(),
14705 });
14706 let manifest_path = packet_dir.join("manifest.json");
14707 let data = std::fs::read_to_string(&manifest_path)
14708 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14709 let mut manifest: Value = serde_json::from_str(&data)
14710 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14711 let files = manifest
14712 .get_mut("included_files")
14713 .and_then(Value::as_array_mut)
14714 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
14715 files.retain(|file| {
14716 file.get("path")
14717 .and_then(Value::as_str)
14718 .is_none_or(|path| path != "packet.lock.json")
14719 });
14720 files.push(lock_entry);
14721 std::fs::write(
14722 &manifest_path,
14723 serde_json::to_vec_pretty(&manifest)
14724 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14725 )
14726 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14727 Ok(())
14728}
14729
14730fn print_tool_check_report(report: &Value) {
14731 let summary = report.get("summary").unwrap_or(&Value::Null);
14732 let frontier = report.get("frontier").unwrap_or(&Value::Null);
14733 println!();
14734 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
14735 println!(" {}", style::tick_row(60));
14736 println!(
14737 "frontier: {}",
14738 frontier
14739 .get("name")
14740 .and_then(Value::as_str)
14741 .unwrap_or("unknown")
14742 );
14743 println!(
14744 "findings: {}",
14745 frontier
14746 .get("findings")
14747 .and_then(Value::as_u64)
14748 .unwrap_or_default()
14749 );
14750 println!(
14751 "checks: {} passed, {} failed",
14752 summary
14753 .get("passed")
14754 .and_then(Value::as_u64)
14755 .unwrap_or_default(),
14756 summary
14757 .get("failed")
14758 .and_then(Value::as_u64)
14759 .unwrap_or_default()
14760 );
14761 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
14762 let names = tools
14763 .iter()
14764 .filter_map(Value::as_str)
14765 .collect::<Vec<_>>()
14766 .join(", ");
14767 println!("tools: {names}");
14768 }
14769 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
14770 for check in checks {
14771 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
14772 style::ok("ok")
14773 } else {
14774 style::lost("lost")
14775 };
14776 println!(
14777 " {} {}",
14778 status,
14779 check
14780 .get("tool")
14781 .and_then(Value::as_str)
14782 .unwrap_or("unknown")
14783 );
14784 }
14785 }
14786}
14787
14788fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
14789 if json_output {
14790 println!(
14791 "{}",
14792 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
14793 );
14794 } else {
14795 println!("{}", report.message);
14796 println!(" frontier: {}", report.frontier);
14797 println!(" finding: {}", report.finding_id);
14798 println!(" proposal: {}", report.proposal_id);
14799 println!(" status: {}", report.proposal_status);
14800 if let Some(event_id) = &report.applied_event_id {
14801 println!(" event: {}", event_id);
14802 }
14803 println!(" wrote: {}", report.wrote_to);
14804 }
14805}
14806
14807fn print_history(payload: &Value) {
14808 let finding = payload.get("finding").unwrap_or(&Value::Null);
14809 println!("vela history");
14810 println!(
14811 " finding: {}",
14812 finding
14813 .get("id")
14814 .and_then(Value::as_str)
14815 .unwrap_or("unknown")
14816 );
14817 println!(
14818 " assertion: {}",
14819 finding
14820 .get("assertion")
14821 .and_then(Value::as_str)
14822 .unwrap_or("")
14823 );
14824 println!(
14825 " confidence: {:.3}",
14826 finding
14827 .get("confidence")
14828 .and_then(Value::as_f64)
14829 .unwrap_or_default()
14830 );
14831 let reviews = payload
14832 .get("review_events")
14833 .and_then(Value::as_array)
14834 .map_or(0, Vec::len);
14835 let updates = payload
14836 .get("confidence_updates")
14837 .and_then(Value::as_array)
14838 .map_or(0, Vec::len);
14839 let annotations = finding
14840 .get("annotations")
14841 .and_then(Value::as_array)
14842 .map_or(0, Vec::len);
14843 let sources = payload
14844 .get("sources")
14845 .and_then(Value::as_array)
14846 .map_or(0, Vec::len);
14847 let atoms = payload
14848 .get("evidence_atoms")
14849 .and_then(Value::as_array)
14850 .map_or(0, Vec::len);
14851 let conditions = payload
14852 .get("condition_records")
14853 .and_then(Value::as_array)
14854 .map_or(0, Vec::len);
14855 let proposals = payload
14856 .get("proposals")
14857 .and_then(Value::as_array)
14858 .map_or(0, Vec::len);
14859 let events = payload
14860 .get("events")
14861 .and_then(Value::as_array)
14862 .map_or(0, Vec::len);
14863 println!(" review events: {reviews}");
14864 println!(" confidence updates: {updates}");
14865 println!(" annotations: {annotations}");
14866 println!(" sources: {sources}");
14867 println!(" evidence atoms: {atoms}");
14868 println!(" condition records: {conditions}");
14869 println!(" proposals: {proposals}");
14870 println!(" canonical events: {events}");
14871 if let Some(status) = payload
14872 .get("proof_state")
14873 .and_then(|value| value.get("latest_packet"))
14874 .and_then(|value| value.get("status"))
14875 .and_then(Value::as_str)
14876 {
14877 println!(" proof state: {status}");
14878 }
14879 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
14880 for event in events.iter().take(8) {
14881 println!(
14882 " - {} {} {}",
14883 event
14884 .get("reviewed_at")
14885 .and_then(Value::as_str)
14886 .unwrap_or(""),
14887 event.get("id").and_then(Value::as_str).unwrap_or(""),
14888 event.get("reason").and_then(Value::as_str).unwrap_or("")
14889 );
14890 }
14891 }
14892}
14893
14894#[derive(Debug, Serialize)]
14895pub struct ProofTrace {
14896 pub trace_version: String,
14897 pub command: Vec<String>,
14898 pub source: String,
14899 pub source_hash: String,
14900 pub schema_version: String,
14901 pub checked_artifacts: Vec<String>,
14902 pub benchmark: Option<Value>,
14903 pub packet_manifest: String,
14904 pub packet_validation: String,
14905 pub caveats: Vec<String>,
14906 pub status: String,
14907 pub trace_path: String,
14908}
14909
14910const SCIENCE_SUBCOMMANDS: &[&str] = &[
14911 "compile-notes",
14912 "compile-code",
14913 "compile-data",
14914 "review-pending",
14915 "find-tensions",
14916 "plan-experiments",
14917 "scout",
14918 "check",
14919 "normalize",
14920 "integrity",
14921 "impact",
14922 "discord",
14923 "quickstart",
14924 "proof",
14925 "repo",
14926 "serve",
14927 "stats",
14928 "search",
14929 "tensions",
14930 "gaps",
14931 "bridge",
14932 "export",
14933 "packet",
14934 "bench",
14935 "conformance",
14936 "version",
14937 "sign",
14938 "actor",
14939 "frontier",
14940 "queue",
14941 "registry",
14942 "init",
14943 "import",
14944 "lock",
14945 "doc",
14946 "diff",
14947 "proposals",
14948 "finding",
14949 "link",
14950 "entity",
14951 "review",
14952 "note",
14953 "caveat",
14954 "revise",
14955 "reject",
14956 "history",
14957 "import-events",
14958 "retract",
14959 "propagate",
14960 "replicate",
14962 "replications",
14963 "dataset-add",
14966 "datasets",
14967 "code-add",
14968 "code-artifacts",
14969 "artifact-add",
14970 "artifact-to-state",
14971 "bridge-kit",
14972 "source-adapter",
14973 "runtime-adapter",
14974 "artifacts",
14975 "artifact-audit",
14976 "decision-brief",
14977 "trial-summary",
14978 "source-verification",
14979 "source-ingest-plan",
14980 "clinical-trial-import",
14981 "negative-result-add",
14983 "negative-results",
14984 "trajectory-create",
14986 "trajectory-step",
14987 "trajectories",
14988 "tier-set",
14990 "locator-repair",
14992 "span-repair",
14994 "entity-resolve",
14996 "entity-add",
14998 "source-fetch",
15000 "predict",
15003 "resolve",
15004 "predictions",
15005 "predictions-expire",
15006 "calibration",
15007 "consensus",
15010 "federation",
15012 "causal",
15014 "status",
15018 "log",
15019 "inbox",
15020 "ask",
15021 "bridges",
15023 "workbench",
15025 "verify",
15027 "ingest",
15031 "propose",
15032 "accept",
15033 "attest",
15034 "lineage",
15035 "carina",
15038 "atlas",
15041 "constellation",
15044];
15045
15046pub fn is_science_subcommand(name: &str) -> bool {
15047 SCIENCE_SUBCOMMANDS.contains(&name)
15048}
15049
15050fn print_strict_help() {
15051 println!(
15052 r#"Vela {}
15053Version control for scientific state.
15054
15055Usage:
15056 vela <COMMAND>
15057
15058Core flow (v0.74):
15059 init Initialize a split frontier repo
15060 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
15061 propose Create a finding.review proposal
15062 diff Preview a `vpr_*` proposal, or compare two frontier files
15063 accept Apply a proposal under reviewer authority
15064 attest Sign findings under your private key
15065 log Recent canonical state events
15066 lineage State-transition replay for one finding
15067 serve Local Workbench (findings, evidence, diff, lineage)
15068
15069Read-only inspection:
15070 check Validate a frontier, repo, or proof packet
15071 integrity Check accepted frontier state integrity
15072 impact Report downstream finding impact
15073 normalize Apply deterministic frontier-state repairs
15074 proof Export and validate a proof packet
15075 repo Inspect split frontier repository status and shape
15076 stats Show frontier statistics
15077 search Search findings
15078 tensions List candidate contradictions and tensions
15079 gaps Inspect and rank candidate gap review leads
15080 bridge Find candidate cross-domain connections
15081
15082Advanced (proposal-creation, agent inboxes, federation):
15083 scout Run Literature Scout against a folder of PDFs (writes proposals)
15084 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
15085 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
15086 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
15087 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
15088 find-tensions Run Contradiction Finder: surface real contradictions among findings
15089 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
15090 export Export frontier artifacts
15091 packet Inspect or validate proof packets
15092 bench Run deterministic benchmark gates
15093 conformance Run protocol conformance vectors
15094 sign Optional signing and signature verification
15095 runtime-adapter
15096 Normalize external runtime exports into reviewable proposals
15097 version Show version information
15098 import Import frontier.json into a .vela repo
15099 proposals Inspect, validate, export, import, accept, or reject write proposals
15100 artifact-to-state
15101 Import a Carina artifact packet as reviewable proposals
15102 bridge-kit
15103 Validate Carina artifact packets before importing runtime output
15104 source-adapter
15105 Run reviewed source adapters into artifact-to-state proposals
15106 finding Add or manage finding bundles as frontier state
15107 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
15108 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
15109 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
15110 actor Register Ed25519 publisher identities in a frontier
15111 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
15112 review Create a review proposal or review interactively
15113 note Add a lightweight note to a finding
15114 caveat Create an explicit caveat proposal
15115 revise Create a confidence revision proposal
15116 reject Create a rejection proposal
15117 history Show state-transition history for one finding (v0.74 alias: `lineage`)
15118 import-events Import review/state events from a packet or JSON file
15119 retract Create a retraction proposal
15120 propagate Simulate impact over declared dependency links
15121 artifact-add Register a content-addressed artifact
15122 artifacts List content-addressed artifacts
15123 artifact-audit Audit artifact locators, hashes, references, and profiles
15124 decision-brief Show the validated decision brief projection
15125 trial-summary Show the validated trial outcome projection
15126 source-verification Show the validated source verification projection
15127 source-ingest-plan Show the validated source ingest plan
15128 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
15129 locator-repair Mechanically repair an evidence atom's missing source locator
15130 span-repair Mechanically repair a finding's missing evidence span
15131 entity-resolve Resolve a finding entity to a canonical id
15132 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
15133 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
15134 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
15135
15136Quick start (the demo):
15137 vela init demo --name "Your bounded question"
15138 vela ingest paper.pdf --frontier demo
15139 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
15140 vela diff <vpr_id> --frontier demo
15141 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
15142 vela serve --path demo
15143
15144Substrate health:
15145 vela frontier materialize my-frontier --json
15146 vela repo status my-frontier --json
15147 vela proof verify my-frontier --json
15148 vela check my-frontier --strict --json
15149
15150Monolithic frontier file:
15151 vela frontier new frontier.json --name "Your bounded question"
15152 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
15153 vela check frontier.json --json
15154 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
15155 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
15156
15157Publish your own frontier (see docs/PUBLISHING.md):
15158 vela frontier new ./frontier.json --name "Your bounded question"
15159 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
15160 vela sign generate-keypair --out keys
15161 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
15162 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
15163 --to https://vela-hub.fly.dev
15164"#,
15165 env!("CARGO_PKG_VERSION")
15166 );
15167}
15168
15169pub type ScoutHandler = fn(
15178 folder: PathBuf,
15179 frontier: PathBuf,
15180 backend: Option<String>,
15181 dry_run: bool,
15182 json: bool,
15183) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15184
15185static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
15186
15187pub fn register_scout_handler(handler: ScoutHandler) {
15191 let _ = SCOUT_HANDLER.set(handler);
15192}
15193
15194pub type AtlasInitHandler = fn(
15198 atlases_root: PathBuf,
15199 name: String,
15200 domain: String,
15201 scope_note: Option<String>,
15202 frontiers: Vec<PathBuf>,
15203 json: bool,
15204) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15205
15206static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
15207
15208pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
15209 let _ = ATLAS_INIT_HANDLER.set(handler);
15210}
15211
15212pub type AtlasMaterializeHandler =
15214 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15215
15216static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
15217
15218pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
15219 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
15220}
15221
15222pub type AtlasServeHandler = fn(
15227 atlases_root: PathBuf,
15228 name: String,
15229 port: u16,
15230 open_browser: bool,
15231) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15232
15233static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
15234
15235pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
15236 let _ = ATLAS_SERVE_HANDLER.set(handler);
15237}
15238
15239pub type AtlasUpdateHandler = fn(
15244 atlases_root: PathBuf,
15245 name: String,
15246 add_frontier: Vec<PathBuf>,
15247 remove_vfr_id: Vec<String>,
15248 json: bool,
15249) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15250
15251static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
15252
15253pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
15254 let _ = ATLAS_UPDATE_HANDLER.set(handler);
15255}
15256
15257pub type ConstellationInitHandler = fn(
15261 constellations_root: PathBuf,
15262 name: String,
15263 scope_note: Option<String>,
15264 atlases: Vec<PathBuf>,
15265 json: bool,
15266) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15267
15268static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
15269
15270pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
15271 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
15272}
15273
15274pub type ConstellationMaterializeHandler = fn(
15275 constellations_root: PathBuf,
15276 name: String,
15277 json: bool,
15278) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15279
15280static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
15281 OnceLock::new();
15282
15283pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
15284 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
15285}
15286
15287pub type ConstellationServeHandler = fn(
15288 constellations_root: PathBuf,
15289 name: String,
15290 port: u16,
15291 open_browser: bool,
15292) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15293
15294static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
15295
15296pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
15297 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
15298}
15299
15300pub type NotesHandler = fn(
15304 vault: PathBuf,
15305 frontier: PathBuf,
15306 backend: Option<String>,
15307 max_files: Option<usize>,
15308 max_items_per_category: Option<usize>,
15309 dry_run: bool,
15310 json: bool,
15311) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15312
15313static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
15314
15315pub fn register_notes_handler(handler: NotesHandler) {
15317 let _ = NOTES_HANDLER.set(handler);
15318}
15319
15320pub type CodeHandler = fn(
15322 root: PathBuf,
15323 frontier: PathBuf,
15324 backend: Option<String>,
15325 max_files: Option<usize>,
15326 dry_run: bool,
15327 json: bool,
15328) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15329
15330static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
15331
15332pub fn register_code_handler(handler: CodeHandler) {
15334 let _ = CODE_HANDLER.set(handler);
15335}
15336
15337pub type DatasetsHandler = fn(
15339 root: PathBuf,
15340 frontier: PathBuf,
15341 backend: Option<String>,
15342 sample_rows: Option<usize>,
15343 dry_run: bool,
15344 json: bool,
15345) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15346
15347static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
15348
15349pub fn register_datasets_handler(handler: DatasetsHandler) {
15351 let _ = DATASETS_HANDLER.set(handler);
15352}
15353
15354pub type ReviewerHandler = fn(
15356 frontier: PathBuf,
15357 backend: Option<String>,
15358 max_proposals: Option<usize>,
15359 batch_size: usize,
15360 dry_run: bool,
15361 json: bool,
15362) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15363
15364static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
15365
15366pub fn register_reviewer_handler(handler: ReviewerHandler) {
15368 let _ = REVIEWER_HANDLER.set(handler);
15369}
15370
15371pub type TensionsHandler = fn(
15373 frontier: PathBuf,
15374 backend: Option<String>,
15375 max_findings: Option<usize>,
15376 dry_run: bool,
15377 json: bool,
15378) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15379
15380static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
15381
15382pub fn register_tensions_handler(handler: TensionsHandler) {
15384 let _ = TENSIONS_HANDLER.set(handler);
15385}
15386
15387pub type ExperimentsHandler = fn(
15389 frontier: PathBuf,
15390 backend: Option<String>,
15391 max_findings: Option<usize>,
15392 dry_run: bool,
15393 json: bool,
15394) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15395
15396static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
15397
15398pub fn register_experiments_handler(handler: ExperimentsHandler) {
15400 let _ = EXPERIMENTS_HANDLER.set(handler);
15401}
15402
15403fn find_vela_repo() -> Option<PathBuf> {
15419 let mut cur = std::env::current_dir().ok()?;
15420 loop {
15421 if cur.join(".vela").is_dir() {
15422 return Some(cur);
15423 }
15424 if !cur.pop() {
15425 return None;
15426 }
15427 }
15428}
15429
15430fn print_session_help() {
15431 println!();
15432 println!(
15433 " Vela {} · Version control for scientific state.",
15434 env!("CARGO_PKG_VERSION")
15435 );
15436 println!();
15437 println!(" USAGE");
15438 println!(" vela Open a session against the nearest .vela/ repo");
15439 println!(" vela <command> Run a specific subcommand");
15440 println!(" vela help advanced Full subcommand list (30+ commands)");
15441 println!();
15442 println!(" CORE FLOW (v0.74)");
15443 println!(" init Initialize a split frontier repo");
15444 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
15445 println!(" propose Create a finding.review proposal");
15446 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
15447 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
15448 println!(" attest Sign findings under your private key");
15449 println!(" log Recent canonical state events");
15450 println!(" lineage <vf_id> State-transition replay for one finding");
15451 println!(" serve Local Workbench (find, evidence, diff, lineage)");
15452 println!();
15453 println!(" DAILY ALSO-RANS");
15454 println!(" status One-screen frontier health");
15455 println!(" inbox Pending review proposals");
15456 println!(" review Review a proposal interactively");
15457 println!(" ask <question> Plain-text query against the frontier");
15458 println!();
15459 println!(" REASONING (Pearl 1 → 2 → 3)");
15460 println!(" causal audit Per-finding identifiability");
15461 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
15462 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
15463 println!();
15464 println!(" COMPOSITION");
15465 println!(" bridge <a> <b> Cross-frontier hypotheses");
15466 println!(" consensus <vf> Field consensus over similar claims");
15467 println!();
15468 println!(" PUBLISH");
15469 println!(" registry publish Push a signed manifest to the hub");
15470 println!(" federation peer-add Federate with another hub");
15471 println!();
15472 println!(" In session, type a single letter for a quick verb, or any");
15473 println!(" question in plain text. `q` or `exit` quits.");
15474 println!();
15475}
15476
15477fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
15478 use crate::causal_reasoning::{audit_frontier, summarize_audit};
15479
15480 let label = frontier_label(project);
15481 let vfr = project.frontier_id();
15482 let vfr_short = vfr.chars().take(16).collect::<String>();
15483
15484 let mut pending = 0usize;
15485 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
15486 for p in &project.proposals {
15487 if p.status == "pending_review" {
15488 pending += 1;
15489 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
15490 }
15491 }
15492
15493 let audit = audit_frontier(project);
15494 let audit_summary = summarize_audit(&audit);
15495
15496 let bridges_dir = repo_path.join(".vela/bridges");
15497 let mut bridge_total = 0usize;
15498 let mut bridge_confirmed = 0usize;
15499 let mut bridge_derived = 0usize;
15500 if bridges_dir.is_dir()
15501 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
15502 {
15503 for entry in entries.flatten() {
15504 let path = entry.path();
15505 if path.extension().and_then(|s| s.to_str()) != Some("json") {
15506 continue;
15507 }
15508 bridge_total += 1;
15509 if let Ok(data) = std::fs::read_to_string(&path)
15510 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
15511 {
15512 match b.status {
15513 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
15514 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
15515 _ => {}
15516 }
15517 }
15518 }
15519 }
15520
15521 let mut targets_with_success = std::collections::HashSet::new();
15522 let mut failed_replications = 0usize;
15523 for r in &project.replications {
15524 if r.outcome == "replicated" {
15525 targets_with_success.insert(r.target_finding.clone());
15526 } else if r.outcome == "failed" {
15527 failed_replications += 1;
15528 }
15529 }
15530
15531 println!();
15532 let version = crate::project::VELA_COMPILER_VERSION
15533 .strip_prefix("vela/")
15534 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
15535 println!(
15536 " {}",
15537 format!("VELA · {version} · {label}")
15538 .to_uppercase()
15539 .dimmed()
15540 );
15541 println!(" {}", style::tick_row(60));
15542 println!(
15543 " vfr_id {}… repo {}",
15544 vfr_short,
15545 repo_path.display()
15546 );
15547 println!(
15548 " findings {:>4} events {} proposals pending {}",
15549 project.findings.len(),
15550 project.events.len(),
15551 pending
15552 );
15553
15554 if pending > 0 {
15555 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
15556 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
15557 }
15558 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
15559 println!(
15560 " {} · {} underidentified · {} conditional",
15561 if audit_summary.underidentified > 0 {
15562 style::lost("audit")
15563 } else {
15564 style::warn("audit")
15565 },
15566 audit_summary.underidentified,
15567 audit_summary.conditional,
15568 );
15569 }
15570 if bridge_total > 0 {
15571 println!(
15572 " {} · {} total · {} confirmed · {} awaiting review",
15573 style::ok("bridges"),
15574 bridge_total,
15575 bridge_confirmed,
15576 bridge_derived
15577 );
15578 }
15579 if !project.replications.is_empty() {
15580 println!(
15581 " {} · {} records · {} findings replicated · {} failed",
15582 style::ok("replications"),
15583 project.replications.len(),
15584 targets_with_success.len(),
15585 failed_replications,
15586 );
15587 }
15588
15589 println!();
15590 println!(" type a verb or ask anything:");
15591 println!(" a audit problems i inbox (pending) b bridges");
15592 println!(" g causal graph l log (recent) c counterfactuals");
15593 println!(" s refresh status h help (more verbs) q quit");
15594 println!();
15595}
15596
15597fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
15599 match verb {
15600 "a" | "audit" => {
15601 let action = CausalAction::Audit {
15602 frontier: repo_path.to_path_buf(),
15603 problems_only: true,
15604 json: false,
15605 };
15606 cmd_causal(action);
15607 true
15608 }
15609 "i" | "inbox" => {
15610 let action = ProposalAction::List {
15611 frontier: repo_path.to_path_buf(),
15612 status: Some("pending_review".into()),
15613 json: false,
15614 };
15615 cmd_proposals(action);
15616 true
15617 }
15618 "b" | "bridges" => {
15619 let action = BridgesAction::List {
15620 frontier: repo_path.to_path_buf(),
15621 status: None,
15622 json: false,
15623 };
15624 cmd_bridges(action);
15625 true
15626 }
15627 "g" | "graph" => {
15628 let action = CausalAction::Graph {
15629 frontier: repo_path.to_path_buf(),
15630 node: None,
15631 json: false,
15632 };
15633 cmd_causal(action);
15634 true
15635 }
15636 "l" | "log" => {
15637 cmd_log(repo_path, 10, None, false);
15638 true
15639 }
15640 "c" | "counterfactual" | "counterfactuals" => {
15641 let project = match repo::load_from_path(repo_path) {
15644 Ok(p) => p,
15645 Err(e) => {
15646 eprintln!("{} {e}", style::err_prefix());
15647 return true;
15648 }
15649 };
15650 println!();
15651 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
15652 println!(" {}", style::tick_row(60));
15653 let mut pairs = 0usize;
15657 for child in &project.findings {
15658 for link in &child.links {
15659 if !matches!(link.link_type.as_str(), "depends" | "supports") {
15660 continue;
15661 }
15662 if link.mechanism.is_none() {
15663 continue;
15664 }
15665 let parent = link
15666 .target
15667 .split_once(':')
15668 .map_or(link.target.as_str(), |(_, r)| r);
15669 pairs += 1;
15670 if pairs <= 10 {
15671 println!(" · do({parent}) → {}", child.id);
15672 }
15673 }
15674 }
15675 if pairs == 0 {
15676 println!(" no mechanism-annotated edges found.");
15677 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
15678 } else {
15679 println!();
15680 println!(" {pairs} live pair(s). Run with:");
15681 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
15682 }
15683 println!();
15684 true
15685 }
15686 "s" | "status" | "refresh" => {
15687 match repo::load_from_path(repo_path) {
15689 Ok(p) => print_session_dashboard(&p, repo_path),
15690 Err(e) => eprintln!("{} {e}", style::err_prefix()),
15691 }
15692 true
15693 }
15694 "h" | "help" | "?" => {
15695 print_session_help();
15696 true
15697 }
15698 _ => false,
15699 }
15700}
15701
15702fn run_session() {
15703 let repo_path = match find_vela_repo() {
15704 Some(p) => p,
15705 None => {
15706 println!();
15707 println!(
15708 " {}",
15709 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
15710 );
15711 println!(" {}", style::tick_row(60));
15712 println!(" Run `vela init` here to create a frontier, or cd into one.");
15713 println!(" Or run `vela help` for the command list.");
15714 println!();
15715 return;
15716 }
15717 };
15718
15719 let project = match repo::load_from_path(&repo_path) {
15720 Ok(p) => p,
15721 Err(e) => {
15722 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
15723 std::process::exit(1);
15724 }
15725 };
15726
15727 print_session_dashboard(&project, &repo_path);
15728
15729 use std::io::{BufRead, Write};
15730 let stdin = std::io::stdin();
15731 let mut stdout = std::io::stdout();
15732 loop {
15733 print!(" > ");
15734 stdout.flush().ok();
15735 let mut line = String::new();
15736 if stdin.lock().read_line(&mut line).is_err() {
15737 break;
15738 }
15739 let input = line.trim();
15740 if input.is_empty() {
15741 continue;
15742 }
15743 if matches!(input, "q" | "quit" | "exit") {
15744 break;
15745 }
15746 if run_session_verb(input, &repo_path) {
15747 continue;
15748 }
15749 let project = match repo::load_from_path(&repo_path) {
15751 Ok(p) => p,
15752 Err(e) => {
15753 eprintln!("{} {e}", style::err_prefix());
15754 continue;
15755 }
15756 };
15757 answer(&project, input, false);
15758 }
15759}
15760
15761pub fn run_from_args() {
15762 style::init();
15763 let args = std::env::args().collect::<Vec<_>>();
15764 match args.get(1).map(String::as_str) {
15765 None => {
15769 run_session();
15770 return;
15771 }
15772 Some("-h" | "--help" | "help") => {
15773 if args.get(2).map(String::as_str) == Some("advanced") {
15776 print_strict_help();
15777 } else {
15778 print_session_help();
15779 }
15780 return;
15781 }
15782 Some("-V" | "--version" | "version") => {
15783 println!("vela {}", env!("CARGO_PKG_VERSION"));
15784 return;
15785 }
15786 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
15787 let json = args.iter().any(|arg| arg == "--json");
15788 let frontier = args
15789 .iter()
15790 .skip(3)
15791 .find(|arg| !arg.starts_with('-'))
15792 .map(PathBuf::from)
15793 .unwrap_or_else(|| {
15794 eprintln!(
15795 "{} proof verify requires a frontier repo",
15796 style::err_prefix()
15797 );
15798 std::process::exit(2);
15799 });
15800 cmd_proof_verify(&frontier, json);
15801 return;
15802 }
15803 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
15804 let frontier = args
15805 .iter()
15806 .skip(3)
15807 .find(|arg| !arg.starts_with('-'))
15808 .map(PathBuf::from)
15809 .unwrap_or_else(|| {
15810 eprintln!(
15811 "{} proof explain requires a frontier repo",
15812 style::err_prefix()
15813 );
15814 std::process::exit(2);
15815 });
15816 cmd_proof_explain(&frontier);
15817 return;
15818 }
15819 Some(cmd) if !is_science_subcommand(cmd) => {
15820 eprintln!(
15821 "{} unknown or non-release command: {cmd}",
15822 style::err_prefix()
15823 );
15824 eprintln!("run `vela --help` for the strict v0 command surface.");
15825 std::process::exit(2);
15826 }
15827 Some(_) => {}
15828 }
15829 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
15830 runtime.block_on(run_command());
15831}
15832
15833fn fail(message: &str) -> ! {
15834 eprintln!("{} {message}", style::err_prefix());
15835 std::process::exit(1);
15836}
15837
15838fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
15843 if !valid.contains(&value) {
15844 fail(&format!(
15845 "invalid {flag} '{value}'. Valid: {}",
15846 valid.join(", ")
15847 ));
15848 }
15849}
15850
15851fn fail_return<T>(message: &str) -> T {
15852 fail(message)
15853}