1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Quickstart {
601 #[arg(default_value = "demo")]
603 path: PathBuf,
604 #[arg(long, default_value = "Quickstart frontier")]
606 name: String,
607 #[arg(long, default_value = "reviewer:you")]
611 reviewer: String,
612 #[arg(long)]
615 assertion: Option<String>,
616 #[arg(long)]
619 keys_out: Option<PathBuf>,
620 #[arg(long)]
622 json: bool,
623 },
624 Lock {
631 path: PathBuf,
633 #[arg(long)]
636 check: bool,
637 #[arg(long)]
639 json: bool,
640 },
641 Doc {
648 path: PathBuf,
650 #[arg(long)]
652 out: Option<PathBuf>,
653 #[arg(long)]
656 json: bool,
657 },
658 Import {
660 frontier: PathBuf,
661 #[arg(long)]
662 into: Option<PathBuf>,
663 },
664 Diff {
674 target: String,
677 frontier_b: Option<PathBuf>,
680 #[arg(long)]
684 frontier: Option<PathBuf>,
685 #[arg(long, default_value = "reviewer:preview")]
687 reviewer: String,
688 #[arg(long)]
689 json: bool,
690 #[arg(long)]
691 quiet: bool,
692 },
693 Proposals {
695 #[command(subcommand)]
696 action: ProposalAction,
697 },
698 ArtifactToState {
700 frontier: PathBuf,
702 packet: PathBuf,
704 #[arg(long)]
706 actor: String,
707 #[arg(long)]
709 apply_artifacts: bool,
710 #[arg(long)]
711 json: bool,
712 },
713 BridgeKit {
715 #[command(subcommand)]
716 action: BridgeKitAction,
717 },
718 SourceAdapter {
720 #[command(subcommand)]
721 action: SourceAdapterAction,
722 },
723 RuntimeAdapter {
725 #[command(subcommand)]
726 action: RuntimeAdapterAction,
727 },
728 Finding {
730 #[command(subcommand)]
731 command: FindingCommands,
732 },
733 Link {
737 #[command(subcommand)]
738 action: LinkAction,
739 },
740 Workbench {
745 #[arg(default_value = ".")]
747 path: PathBuf,
748 #[arg(long, default_value_t = 3850)]
750 port: u16,
751 #[arg(long)]
753 no_open: bool,
754 },
755 Bridges {
761 #[command(subcommand)]
762 action: BridgesAction,
763 },
764 Entity {
769 #[command(subcommand)]
770 action: EntityAction,
771 },
772 Review {
774 frontier: PathBuf,
776 finding_id: String,
778 #[arg(long)]
780 status: Option<String>,
781 #[arg(long)]
783 reason: Option<String>,
784 #[arg(long)]
786 reviewer: String,
787 #[arg(long)]
789 apply: bool,
790 #[arg(long)]
792 json: bool,
793 },
794 Note {
796 frontier: PathBuf,
797 finding_id: String,
798 #[arg(long)]
799 text: String,
800 #[arg(long)]
801 author: String,
802 #[arg(long)]
804 apply: bool,
805 #[arg(long)]
806 json: bool,
807 },
808 Caveat {
810 frontier: PathBuf,
811 finding_id: String,
812 #[arg(long)]
813 text: String,
814 #[arg(long)]
815 author: String,
816 #[arg(long)]
817 apply: bool,
818 #[arg(long)]
819 json: bool,
820 },
821 Revise {
823 frontier: PathBuf,
824 finding_id: String,
825 #[arg(long)]
827 confidence: f64,
828 #[arg(long)]
830 reason: String,
831 #[arg(long)]
833 reviewer: String,
834 #[arg(long)]
835 apply: bool,
836 #[arg(long)]
837 json: bool,
838 },
839 Reject {
841 frontier: PathBuf,
842 finding_id: String,
843 #[arg(long)]
844 reason: String,
845 #[arg(long)]
846 reviewer: String,
847 #[arg(long)]
848 apply: bool,
849 #[arg(long)]
850 json: bool,
851 },
852 History {
854 frontier: PathBuf,
855 finding_id: String,
856 #[arg(long)]
857 json: bool,
858 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
862 as_of: Option<String>,
863 },
864 ImportEvents {
866 source: PathBuf,
867 #[arg(long)]
868 into: PathBuf,
869 #[arg(long)]
870 json: bool,
871 },
872 Retract {
874 source: PathBuf,
875 finding_id: String,
876 #[arg(long)]
877 reason: String,
878 #[arg(long)]
879 reviewer: String,
880 #[arg(long)]
881 apply: bool,
882 #[arg(long)]
883 json: bool,
884 },
885 EntityAdd {
890 frontier: PathBuf,
891 finding_id: String,
892 #[arg(long)]
893 entity: String,
894 #[arg(long)]
898 entity_type: String,
899 #[arg(long)]
900 reviewer: String,
901 #[arg(long)]
902 reason: String,
903 #[arg(long)]
904 apply: bool,
905 #[arg(long)]
906 json: bool,
907 },
908 EntityResolve {
912 frontier: PathBuf,
913 finding_id: String,
914 #[arg(long)]
915 entity: String,
916 #[arg(long)]
917 source: String,
918 #[arg(long)]
919 id: String,
920 #[arg(long)]
921 confidence: f64,
922 #[arg(long)]
923 matched_name: Option<String>,
924 #[arg(long, default_value = "manual")]
925 resolution_method: String,
926 #[arg(long)]
927 reviewer: String,
928 #[arg(long)]
929 reason: String,
930 #[arg(long)]
931 apply: bool,
932 #[arg(long)]
933 json: bool,
934 },
935 SourceFetch {
943 identifier: String,
946 #[arg(long)]
950 cache: Option<PathBuf>,
951 #[arg(long)]
953 out: Option<PathBuf>,
954 #[arg(long)]
956 refresh: bool,
957 #[arg(long)]
958 json: bool,
959 },
960 SpanRepair {
963 frontier: PathBuf,
964 finding_id: String,
965 #[arg(long)]
966 section: String,
967 #[arg(long)]
968 text: String,
969 #[arg(long)]
970 reviewer: String,
971 #[arg(long)]
972 reason: String,
973 #[arg(long)]
974 apply: bool,
975 #[arg(long)]
976 json: bool,
977 },
978 LocatorRepair {
983 frontier: PathBuf,
984 atom_id: String,
985 #[arg(long)]
988 locator: Option<String>,
989 #[arg(long)]
992 reviewer: String,
993 #[arg(long)]
995 reason: String,
996 #[arg(long)]
998 apply: bool,
999 #[arg(long)]
1000 json: bool,
1001 },
1002 Propagate {
1004 frontier: PathBuf,
1005 #[arg(long)]
1006 retract: Option<String>,
1007 #[arg(long)]
1008 reduce_confidence: Option<String>,
1009 #[arg(long)]
1010 to: Option<f64>,
1011 #[arg(short, long)]
1012 output: Option<PathBuf>,
1013 },
1014 Replicate {
1023 frontier: PathBuf,
1025 target: String,
1027 #[arg(long)]
1029 outcome: String,
1030 #[arg(long)]
1032 by: String,
1033 #[arg(long)]
1037 conditions: String,
1038 #[arg(long)]
1040 source_title: String,
1041 #[arg(long)]
1043 doi: Option<String>,
1044 #[arg(long)]
1046 pmid: Option<String>,
1047 #[arg(long)]
1049 sample_size: Option<String>,
1050 #[arg(long, default_value = "")]
1053 note: String,
1054 #[arg(long)]
1056 previous_attempt: Option<String>,
1057 #[arg(long, default_value_t = false)]
1064 no_cascade: bool,
1065 #[arg(long)]
1067 json: bool,
1068 },
1069 Replications {
1072 frontier: PathBuf,
1074 #[arg(long)]
1076 target: Option<String>,
1077 #[arg(long)]
1079 json: bool,
1080 },
1081 DatasetAdd {
1088 frontier: PathBuf,
1090 #[arg(long)]
1092 name: String,
1093 #[arg(long)]
1095 version: Option<String>,
1096 #[arg(long)]
1100 content_hash: String,
1101 #[arg(long)]
1103 url: Option<String>,
1104 #[arg(long)]
1106 license: Option<String>,
1107 #[arg(long)]
1109 source_title: String,
1110 #[arg(long)]
1112 doi: Option<String>,
1113 #[arg(long)]
1115 row_count: Option<u64>,
1116 #[arg(long)]
1118 json: bool,
1119 },
1120 Datasets {
1122 frontier: PathBuf,
1123 #[arg(long)]
1124 json: bool,
1125 },
1126 CodeAdd {
1130 frontier: PathBuf,
1132 #[arg(long)]
1134 language: String,
1135 #[arg(long)]
1137 repo_url: Option<String>,
1138 #[arg(long)]
1141 commit: Option<String>,
1142 #[arg(long)]
1144 path: String,
1145 #[arg(long)]
1147 content_hash: String,
1148 #[arg(long)]
1150 line_start: Option<u32>,
1151 #[arg(long)]
1153 line_end: Option<u32>,
1154 #[arg(long)]
1156 entry_point: Option<String>,
1157 #[arg(long)]
1159 json: bool,
1160 },
1161 CodeArtifacts {
1163 frontier: PathBuf,
1164 #[arg(long)]
1165 json: bool,
1166 },
1167 ArtifactAdd {
1172 frontier: PathBuf,
1174 #[arg(long)]
1177 kind: String,
1178 #[arg(long)]
1180 name: String,
1181 #[arg(long)]
1184 file: Option<PathBuf>,
1185 #[arg(long)]
1187 url: Option<String>,
1188 #[arg(long)]
1190 content_hash: Option<String>,
1191 #[arg(long)]
1193 media_type: Option<String>,
1194 #[arg(long)]
1196 license: Option<String>,
1197 #[arg(long)]
1199 source_title: Option<String>,
1200 #[arg(long)]
1202 source_url: Option<String>,
1203 #[arg(long)]
1205 doi: Option<String>,
1206 #[arg(long)]
1208 target: Vec<String>,
1209 #[arg(long)]
1211 metadata: Vec<String>,
1212 #[arg(long, default_value = "public")]
1214 access_tier: String,
1215 #[arg(long, default_value = "reviewer:manual")]
1217 deposited_by: String,
1218 #[arg(long, default_value = "artifact deposit")]
1220 reason: String,
1221 #[arg(long)]
1223 json: bool,
1224 },
1225 Artifacts {
1227 frontier: PathBuf,
1228 #[arg(long)]
1230 target: Option<String>,
1231 #[arg(long)]
1232 json: bool,
1233 },
1234 ArtifactAudit {
1236 frontier: PathBuf,
1237 #[arg(long)]
1239 json: bool,
1240 },
1241 DecisionBrief {
1243 frontier: PathBuf,
1244 #[arg(long)]
1246 json: bool,
1247 },
1248 TrialSummary {
1250 frontier: PathBuf,
1251 #[arg(long)]
1253 json: bool,
1254 },
1255 SourceVerification {
1257 frontier: PathBuf,
1258 #[arg(long)]
1260 json: bool,
1261 },
1262 SourceIngestPlan {
1264 frontier: PathBuf,
1265 #[arg(long)]
1267 json: bool,
1268 },
1269 ClinicalTrialImport {
1272 frontier: PathBuf,
1274 nct_id: String,
1276 #[arg(long)]
1279 input_json: Option<PathBuf>,
1280 #[arg(long)]
1282 target: Vec<String>,
1283 #[arg(long, default_value = "reviewer:manual")]
1285 deposited_by: String,
1286 #[arg(long, default_value = "clinical trial record import")]
1288 reason: String,
1289 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1291 license: String,
1292 #[arg(long)]
1294 json: bool,
1295 },
1296 NegativeResultAdd {
1304 frontier: PathBuf,
1306 #[arg(long)]
1308 kind: String,
1309 #[arg(long)]
1311 deposited_by: String,
1312 #[arg(long)]
1314 reason: String,
1315 #[arg(long)]
1318 conditions_text: String,
1319 #[arg(long, default_value = "")]
1321 notes: String,
1322 #[arg(long)]
1325 target: Vec<String>,
1326 #[arg(long)]
1330 endpoint: Option<String>,
1331 #[arg(long)]
1333 intervention: Option<String>,
1334 #[arg(long)]
1336 comparator: Option<String>,
1337 #[arg(long)]
1339 population: Option<String>,
1340 #[arg(long)]
1342 n_enrolled: Option<u32>,
1343 #[arg(long)]
1345 power: Option<f64>,
1346 #[arg(long)]
1348 ci_lower: Option<f64>,
1349 #[arg(long)]
1351 ci_upper: Option<f64>,
1352 #[arg(long)]
1354 effect_size_threshold: Option<f64>,
1355 #[arg(long)]
1357 registry_id: Option<String>,
1358 #[arg(long)]
1361 reagent: Option<String>,
1362 #[arg(long)]
1364 observation: Option<String>,
1365 #[arg(long)]
1367 attempts: Option<u32>,
1368 #[arg(long)]
1371 source_title: String,
1372 #[arg(long)]
1374 doi: Option<String>,
1375 #[arg(long)]
1377 url: Option<String>,
1378 #[arg(long)]
1380 year: Option<i32>,
1381 #[arg(long)]
1383 json: bool,
1384 },
1385 NegativeResults {
1387 frontier: PathBuf,
1388 #[arg(long)]
1390 target: Option<String>,
1391 #[arg(long)]
1392 json: bool,
1393 },
1394 TrajectoryCreate {
1399 frontier: PathBuf,
1401 #[arg(long)]
1403 deposited_by: String,
1404 #[arg(long)]
1406 reason: String,
1407 #[arg(long)]
1412 target: Vec<String>,
1413 #[arg(long, default_value = "")]
1415 notes: String,
1416 #[arg(long)]
1417 json: bool,
1418 },
1419 TrajectoryStep {
1422 frontier: PathBuf,
1424 trajectory_id: String,
1426 #[arg(long)]
1428 kind: String,
1429 #[arg(long)]
1433 description: String,
1434 #[arg(long)]
1436 actor: String,
1437 #[arg(long)]
1439 reason: String,
1440 #[arg(long)]
1443 reference: Vec<String>,
1444 #[arg(long)]
1445 json: bool,
1446 },
1447 Trajectories {
1449 frontier: PathBuf,
1450 #[arg(long)]
1452 target: Option<String>,
1453 #[arg(long)]
1454 json: bool,
1455 },
1456 TierSet {
1462 frontier: PathBuf,
1464 #[arg(long)]
1466 object_type: String,
1467 #[arg(long)]
1469 object_id: String,
1470 #[arg(long)]
1472 tier: String,
1473 #[arg(long)]
1476 actor: String,
1477 #[arg(long)]
1480 reason: String,
1481 #[arg(long)]
1482 json: bool,
1483 },
1484 Predict {
1491 frontier: PathBuf,
1493 #[arg(long)]
1495 by: String,
1496 #[arg(long)]
1499 claim: String,
1500 #[arg(long)]
1502 criterion: String,
1503 #[arg(long)]
1505 resolves_by: Option<String>,
1506 #[arg(long)]
1508 confidence: f64,
1509 #[arg(long, default_value = "")]
1511 target: String,
1512 #[arg(long, default_value = "affirmed")]
1514 outcome: String,
1515 #[arg(long, default_value = "")]
1517 conditions: String,
1518 #[arg(long)]
1520 json: bool,
1521 },
1522 Resolve {
1527 frontier: PathBuf,
1529 prediction: String,
1531 #[arg(long)]
1533 outcome: String,
1534 #[arg(long)]
1536 matched: bool,
1537 #[arg(long)]
1540 by: String,
1541 #[arg(long, default_value = "1.0")]
1543 confidence: f64,
1544 #[arg(long, default_value = "")]
1546 source_title: String,
1547 #[arg(long)]
1549 doi: Option<String>,
1550 #[arg(long)]
1552 json: bool,
1553 },
1554 Predictions {
1556 frontier: PathBuf,
1557 #[arg(long)]
1559 by: Option<String>,
1560 #[arg(long)]
1562 open: bool,
1563 #[arg(long)]
1565 json: bool,
1566 },
1567 Calibration {
1570 frontier: PathBuf,
1571 #[arg(long)]
1573 actor: Option<String>,
1574 #[arg(long)]
1576 json: bool,
1577 },
1578 PredictionsExpire {
1586 frontier: PathBuf,
1587 #[arg(long)]
1590 now: Option<String>,
1591 #[arg(long)]
1594 dry_run: bool,
1595 #[arg(long)]
1596 json: bool,
1597 },
1598 Consensus {
1607 frontier: PathBuf,
1609 target: String,
1611 #[arg(long, default_value = "composite")]
1614 weighting: String,
1615 #[arg(long)]
1620 causal_claim: Option<String>,
1621 #[arg(long)]
1626 causal_grade_min: Option<String>,
1627 #[arg(long)]
1629 json: bool,
1630 },
1631
1632 Ingest {
1648 path: String,
1651 #[arg(long)]
1654 frontier: PathBuf,
1655 #[arg(short, long)]
1659 backend: Option<String>,
1660 #[arg(long)]
1664 actor: Option<String>,
1665 #[arg(long)]
1667 dry_run: bool,
1668 #[arg(long)]
1669 json: bool,
1670 },
1671
1672 Propose {
1678 frontier: PathBuf,
1679 finding_id: String,
1680 #[arg(long)]
1682 status: String,
1683 #[arg(long)]
1684 reason: String,
1685 #[arg(long)]
1686 reviewer: String,
1687 #[arg(long)]
1690 apply: bool,
1691 #[arg(long)]
1692 json: bool,
1693 },
1694
1695 Accept {
1699 frontier: PathBuf,
1700 proposal_id: String,
1701 #[arg(long)]
1702 reviewer: String,
1703 #[arg(long)]
1704 reason: String,
1705 #[arg(long)]
1706 json: bool,
1707 },
1708
1709 Attest {
1721 frontier: PathBuf,
1723 #[arg(long)]
1727 event: Option<String>,
1728 #[arg(long)]
1731 attester: Option<String>,
1732 #[arg(long)]
1735 scope_note: Option<String>,
1736 #[arg(long)]
1739 proof_id: Option<String>,
1740 #[arg(long)]
1745 signature: Option<String>,
1746 #[arg(long)]
1749 key: Option<PathBuf>,
1750 #[arg(long)]
1751 json: bool,
1752 },
1753
1754 Lineage {
1757 frontier: PathBuf,
1758 finding_id: String,
1759 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1760 as_of: Option<String>,
1761 #[arg(long)]
1762 json: bool,
1763 },
1764
1765 Carina {
1768 #[command(subcommand)]
1769 action: CarinaAction,
1770 },
1771
1772 Atlas {
1777 #[command(subcommand)]
1778 action: AtlasAction,
1779 },
1780
1781 Constellation {
1787 #[command(subcommand)]
1788 action: ConstellationAction,
1789 },
1790}
1791
1792#[derive(Subcommand)]
1797enum AtlasAction {
1798 Init {
1803 name: String,
1806 #[arg(long, value_delimiter = ',', num_args = 1..)]
1808 frontiers: Vec<PathBuf>,
1809 #[arg(long, default_value = "general")]
1812 domain: String,
1813 #[arg(long)]
1815 scope_note: Option<String>,
1816 #[arg(long, default_value = "atlases")]
1818 atlases_root: PathBuf,
1819 #[arg(long)]
1820 json: bool,
1821 },
1822 Materialize {
1826 name: String,
1828 #[arg(long, default_value = "atlases")]
1829 atlases_root: PathBuf,
1830 #[arg(long)]
1831 json: bool,
1832 },
1833 Serve {
1838 name: String,
1839 #[arg(long, default_value = "atlases")]
1840 atlases_root: PathBuf,
1841 #[arg(long, default_value_t = 3848)]
1842 port: u16,
1843 #[arg(long)]
1844 no_open: bool,
1845 },
1846 Update {
1853 name: String,
1854 #[arg(long, value_delimiter = ',')]
1857 add_frontier: Vec<PathBuf>,
1858 #[arg(long, value_delimiter = ',')]
1861 remove_vfr_id: Vec<String>,
1862 #[arg(long, default_value = "atlases")]
1863 atlases_root: PathBuf,
1864 #[arg(long)]
1865 json: bool,
1866 },
1867}
1868
1869#[derive(Subcommand)]
1873enum ConstellationAction {
1874 Init {
1878 name: String,
1879 #[arg(long, value_delimiter = ',', num_args = 1..)]
1881 atlases: Vec<PathBuf>,
1882 #[arg(long)]
1883 scope_note: Option<String>,
1884 #[arg(long, default_value = "constellations")]
1885 constellations_root: PathBuf,
1886 #[arg(long)]
1887 json: bool,
1888 },
1889 Materialize {
1894 name: String,
1895 #[arg(long, default_value = "constellations")]
1896 constellations_root: PathBuf,
1897 #[arg(long)]
1898 json: bool,
1899 },
1900 Serve {
1904 name: String,
1905 #[arg(long, default_value = "constellations")]
1906 constellations_root: PathBuf,
1907 #[arg(long, default_value_t = 3849)]
1908 port: u16,
1909 #[arg(long)]
1910 no_open: bool,
1911 },
1912}
1913
1914#[derive(Subcommand)]
1918enum CarinaAction {
1919 Validate {
1924 path: PathBuf,
1928 #[arg(long)]
1931 primitive: Option<String>,
1932 #[arg(long)]
1933 json: bool,
1934 },
1935 List {
1937 #[arg(long)]
1938 json: bool,
1939 },
1940 Schema { primitive: String },
1942}
1943
1944#[derive(Subcommand)]
1945enum PacketAction {
1946 Inspect {
1948 path: PathBuf,
1949 #[arg(long)]
1950 json: bool,
1951 },
1952 Validate {
1954 path: PathBuf,
1955 #[arg(long)]
1956 json: bool,
1957 },
1958}
1959
1960#[derive(Subcommand)]
1961enum SignAction {
1962 GenerateKeypair {
1964 #[arg(long, default_value = ".vela/keys")]
1965 out: PathBuf,
1966 #[arg(long)]
1967 json: bool,
1968 },
1969 Apply {
1971 frontier: PathBuf,
1972 #[arg(long)]
1973 private_key: PathBuf,
1974 #[arg(long)]
1975 json: bool,
1976 },
1977 Verify {
1979 frontier: PathBuf,
1980 #[arg(long)]
1981 public_key: Option<PathBuf>,
1982 #[arg(long)]
1983 json: bool,
1984 },
1985 ThresholdSet {
1990 frontier: PathBuf,
1991 finding_id: String,
1993 #[arg(long)]
1995 to: u32,
1996 #[arg(long)]
1997 json: bool,
1998 },
1999}
2000
2001#[derive(Subcommand)]
2002enum ActorAction {
2003 Add {
2005 frontier: PathBuf,
2006 id: String,
2008 #[arg(long)]
2010 pubkey: String,
2011 #[arg(long)]
2015 tier: Option<String>,
2016 #[arg(long)]
2020 orcid: Option<String>,
2021 #[arg(long)]
2026 clearance: Option<String>,
2027 #[arg(long)]
2028 json: bool,
2029 },
2030 List {
2032 frontier: PathBuf,
2033 #[arg(long)]
2034 json: bool,
2035 },
2036}
2037
2038#[derive(Subcommand)]
2039enum CausalAction {
2040 Audit {
2044 frontier: PathBuf,
2045 #[arg(long)]
2048 problems_only: bool,
2049 #[arg(long)]
2050 json: bool,
2051 },
2052 Effect {
2065 frontier: PathBuf,
2066 source: String,
2068 #[arg(long)]
2070 on: String,
2071 #[arg(long)]
2072 json: bool,
2073 },
2074 Graph {
2077 frontier: PathBuf,
2078 #[arg(long)]
2080 node: Option<String>,
2081 #[arg(long)]
2082 json: bool,
2083 },
2084 Counterfactual {
2091 frontier: PathBuf,
2092 intervene_on: String,
2094 #[arg(long)]
2096 set_to: f64,
2097 #[arg(long)]
2099 target: String,
2100 #[arg(long)]
2101 json: bool,
2102 },
2103}
2104
2105#[derive(Subcommand)]
2106enum BridgesAction {
2107 Derive {
2111 frontier_a: PathBuf,
2114 #[arg(long, default_value = "a")]
2116 label_a: String,
2117 frontier_b: PathBuf,
2119 #[arg(long, default_value = "b")]
2121 label_b: String,
2122 #[arg(long)]
2123 json: bool,
2124 },
2125 List {
2127 frontier: PathBuf,
2129 #[arg(long)]
2131 status: Option<String>,
2132 #[arg(long)]
2133 json: bool,
2134 },
2135 Show {
2137 frontier: PathBuf,
2138 bridge_id: String,
2139 #[arg(long)]
2140 json: bool,
2141 },
2142 Confirm {
2147 frontier: PathBuf,
2148 bridge_id: String,
2149 #[arg(long)]
2152 reviewer: Option<String>,
2153 #[arg(long)]
2155 note: Option<String>,
2156 #[arg(long)]
2157 json: bool,
2158 },
2159 Refute {
2162 frontier: PathBuf,
2163 bridge_id: String,
2164 #[arg(long)]
2165 reviewer: Option<String>,
2166 #[arg(long)]
2167 note: Option<String>,
2168 #[arg(long)]
2169 json: bool,
2170 },
2171}
2172
2173#[derive(Subcommand)]
2174enum FederationAction {
2175 PeerAdd {
2179 frontier: PathBuf,
2180 id: String,
2182 #[arg(long)]
2184 url: String,
2185 #[arg(long)]
2187 pubkey: String,
2188 #[arg(long, default_value = "")]
2190 note: String,
2191 #[arg(long)]
2192 json: bool,
2193 },
2194 PeerList {
2196 frontier: PathBuf,
2197 #[arg(long)]
2198 json: bool,
2199 },
2200 PeerRemove {
2204 frontier: PathBuf,
2205 id: String,
2206 #[arg(long)]
2207 json: bool,
2208 },
2209 Sync {
2226 frontier: PathBuf,
2227 peer_id: String,
2229 #[arg(long)]
2231 url: Option<String>,
2232 #[arg(long)]
2236 via_hub: bool,
2237 #[arg(long)]
2240 vfr_id: Option<String>,
2241 #[arg(long)]
2248 allow_cross_vfr: bool,
2249 #[arg(long)]
2251 dry_run: bool,
2252 #[arg(long)]
2253 json: bool,
2254 },
2255 PushResolution {
2268 frontier: PathBuf,
2269 conflict_event_id: String,
2273 #[arg(long = "to")]
2275 to: String,
2276 #[arg(long)]
2280 key: Option<PathBuf>,
2281 #[arg(long)]
2284 vfr_id: Option<String>,
2285 #[arg(long)]
2286 json: bool,
2287 },
2288}
2289
2290#[derive(Subcommand)]
2291enum FrontierAction {
2292 New {
2299 path: PathBuf,
2301 #[arg(long)]
2303 name: String,
2304 #[arg(long, default_value = "")]
2306 description: String,
2307 #[arg(long)]
2309 force: bool,
2310 #[arg(long)]
2311 json: bool,
2312 },
2313 Materialize {
2315 frontier: PathBuf,
2317 #[arg(long)]
2318 json: bool,
2319 },
2320 AddDep {
2324 frontier: PathBuf,
2326 vfr_id: String,
2328 #[arg(long)]
2331 locator: String,
2332 #[arg(long)]
2335 snapshot: String,
2336 #[arg(long)]
2338 name: Option<String>,
2339 #[arg(long)]
2340 json: bool,
2341 },
2342 ListDeps {
2344 frontier: PathBuf,
2345 #[arg(long)]
2346 json: bool,
2347 },
2348 RemoveDep {
2351 frontier: PathBuf,
2352 vfr_id: String,
2353 #[arg(long)]
2354 json: bool,
2355 },
2356 RefreshDeps {
2363 frontier: PathBuf,
2364 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2366 from: String,
2367 #[arg(long)]
2369 dry_run: bool,
2370 #[arg(long)]
2371 json: bool,
2372 },
2373 Diff {
2385 frontier: PathBuf,
2387 #[arg(long)]
2390 since: Option<String>,
2391 #[arg(long)]
2394 week: Option<String>,
2395 #[arg(long)]
2397 json: bool,
2398 },
2399}
2400
2401#[derive(Subcommand)]
2402enum RepoAction {
2403 Status {
2405 frontier: PathBuf,
2407 #[arg(long)]
2409 json: bool,
2410 },
2411 Doctor {
2413 frontier: PathBuf,
2415 #[arg(long)]
2417 json: bool,
2418 },
2419}
2420
2421#[derive(Subcommand)]
2422enum QueueAction {
2423 List {
2425 #[arg(long)]
2426 queue_file: Option<PathBuf>,
2427 #[arg(long)]
2428 json: bool,
2429 },
2430 Sign {
2433 #[arg(long)]
2435 actor: String,
2436 #[arg(long)]
2438 key: PathBuf,
2439 #[arg(long)]
2441 queue_file: Option<PathBuf>,
2442 #[arg(long, alias = "all")]
2448 yes_to_all: bool,
2449 #[arg(long)]
2450 json: bool,
2451 },
2452 Clear {
2454 #[arg(long)]
2455 queue_file: Option<PathBuf>,
2456 #[arg(long)]
2457 json: bool,
2458 },
2459}
2460
2461#[derive(Subcommand)]
2462enum RegistryAction {
2463 List {
2465 #[arg(long)]
2467 from: Option<String>,
2468 #[arg(long)]
2469 json: bool,
2470 },
2471 Publish {
2473 frontier: PathBuf,
2475 #[arg(long)]
2477 owner: String,
2478 #[arg(long)]
2480 key: PathBuf,
2481 #[arg(long)]
2488 locator: Option<String>,
2489 #[arg(long)]
2491 to: Option<String>,
2492 #[arg(long)]
2493 json: bool,
2494 },
2495 DependsOn {
2502 vfr_id: String,
2504 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2506 from: String,
2507 #[arg(long)]
2508 json: bool,
2509 },
2510 Mirror {
2518 vfr_id: String,
2520 #[arg(long)]
2522 from: String,
2523 #[arg(long)]
2525 to: String,
2526 #[arg(long)]
2527 json: bool,
2528 },
2529 Pull {
2531 vfr_id: String,
2533 #[arg(long)]
2535 from: Option<String>,
2536 #[arg(long)]
2540 out: PathBuf,
2541 #[arg(long)]
2544 transitive: bool,
2545 #[arg(long, default_value = "4")]
2548 depth: usize,
2549 #[arg(long)]
2550 json: bool,
2551 },
2552}
2553
2554#[derive(Subcommand)]
2555enum GapsAction {
2556 Rank {
2558 frontier: PathBuf,
2559 #[arg(long, default_value = "10")]
2560 top: usize,
2561 #[arg(long)]
2562 domain: Option<String>,
2563 #[arg(long)]
2564 json: bool,
2565 },
2566}
2567
2568#[derive(Subcommand)]
2569enum LinkAction {
2570 Add {
2575 frontier: PathBuf,
2577 #[arg(long)]
2579 from: String,
2580 #[arg(long)]
2582 to: String,
2583 #[arg(long, default_value = "supports")]
2585 r#type: String,
2586 #[arg(long, default_value = "")]
2588 note: String,
2589 #[arg(long, default_value = "reviewer")]
2591 inferred_by: String,
2592 #[arg(long)]
2601 no_check_target: bool,
2602 #[arg(long)]
2603 json: bool,
2604 },
2605}
2606
2607#[derive(Subcommand)]
2608enum EntityAction {
2609 Resolve {
2616 frontier: PathBuf,
2617 #[arg(long)]
2619 force: bool,
2620 #[arg(long)]
2621 json: bool,
2622 },
2623 List {
2625 #[arg(long)]
2626 json: bool,
2627 },
2628}
2629
2630#[derive(Subcommand)]
2631enum FindingCommands {
2632 Add {
2634 frontier: PathBuf,
2636 #[arg(long)]
2638 assertion: String,
2639 #[arg(long, default_value = "mechanism")]
2641 r#type: String,
2642 #[arg(long, default_value = "manual finding")]
2644 source: String,
2645 #[arg(long, default_value = "expert_assertion")]
2647 source_type: String,
2648 #[arg(long)]
2650 author: String,
2651 #[arg(long, default_value = "0.3")]
2653 confidence: f64,
2654 #[arg(long, default_value = "theoretical")]
2656 evidence_type: String,
2657 #[arg(long, default_value = "")]
2659 entities: String,
2660 #[arg(long)]
2662 entities_reviewed: bool,
2663 #[arg(long)]
2665 evidence_span: Vec<String>,
2666 #[arg(long)]
2668 gap: bool,
2669 #[arg(long)]
2671 negative_space: bool,
2672 #[arg(long)]
2674 doi: Option<String>,
2675 #[arg(long)]
2677 pmid: Option<String>,
2678 #[arg(long)]
2680 year: Option<i32>,
2681 #[arg(long)]
2683 journal: Option<String>,
2684 #[arg(long)]
2686 url: Option<String>,
2687 #[arg(long)]
2689 source_authors: Option<String>,
2690 #[arg(long)]
2692 conditions_text: Option<String>,
2693 #[arg(long)]
2695 species: Option<String>,
2696 #[arg(long)]
2698 in_vivo: bool,
2699 #[arg(long)]
2701 in_vitro: bool,
2702 #[arg(long)]
2704 human_data: bool,
2705 #[arg(long)]
2707 clinical_trial: bool,
2708 #[arg(long)]
2710 json: bool,
2711 #[arg(long)]
2713 apply: bool,
2714 },
2715 Supersede {
2722 frontier: PathBuf,
2724 old_id: String,
2726 #[arg(long)]
2728 assertion: String,
2729 #[arg(long, default_value = "mechanism")]
2731 r#type: String,
2732 #[arg(long, default_value = "manual finding")]
2734 source: String,
2735 #[arg(long, default_value = "expert_assertion")]
2737 source_type: String,
2738 #[arg(long)]
2740 author: String,
2741 #[arg(long)]
2743 reason: String,
2744 #[arg(long, default_value = "0.5")]
2746 confidence: f64,
2747 #[arg(long, default_value = "experimental")]
2749 evidence_type: String,
2750 #[arg(long, default_value = "")]
2752 entities: String,
2753 #[arg(long)]
2755 doi: Option<String>,
2756 #[arg(long)]
2758 pmid: Option<String>,
2759 #[arg(long)]
2761 year: Option<i32>,
2762 #[arg(long)]
2764 journal: Option<String>,
2765 #[arg(long)]
2767 url: Option<String>,
2768 #[arg(long)]
2770 source_authors: Option<String>,
2771 #[arg(long)]
2773 conditions_text: Option<String>,
2774 #[arg(long)]
2776 species: Option<String>,
2777 #[arg(long)]
2778 in_vivo: bool,
2779 #[arg(long)]
2780 in_vitro: bool,
2781 #[arg(long)]
2782 human_data: bool,
2783 #[arg(long)]
2784 clinical_trial: bool,
2785 #[arg(long)]
2786 json: bool,
2787 #[arg(long)]
2789 apply: bool,
2790 },
2791 CausalSet {
2797 frontier: PathBuf,
2799 finding_id: String,
2801 #[arg(long)]
2803 claim: String,
2804 #[arg(long)]
2807 grade: Option<String>,
2808 #[arg(long)]
2811 actor: String,
2812 #[arg(long)]
2815 reason: String,
2816 #[arg(long)]
2817 json: bool,
2818 },
2819}
2820
2821#[derive(Subcommand)]
2822enum ProposalAction {
2823 List {
2825 frontier: PathBuf,
2826 #[arg(long)]
2827 status: Option<String>,
2828 #[arg(long)]
2829 json: bool,
2830 },
2831 Show {
2833 frontier: PathBuf,
2834 proposal_id: String,
2835 #[arg(long)]
2836 json: bool,
2837 },
2838 Preview {
2840 frontier: PathBuf,
2841 proposal_id: String,
2842 #[arg(long, default_value = "reviewer:preview")]
2843 reviewer: String,
2844 #[arg(long)]
2845 json: bool,
2846 },
2847 Import {
2849 frontier: PathBuf,
2850 source: PathBuf,
2851 #[arg(long)]
2852 json: bool,
2853 },
2854 Validate {
2856 source: PathBuf,
2857 #[arg(long)]
2858 json: bool,
2859 },
2860 Export {
2862 frontier: PathBuf,
2863 output: PathBuf,
2864 #[arg(long)]
2865 status: Option<String>,
2866 #[arg(long)]
2867 json: bool,
2868 },
2869 Accept {
2871 frontier: PathBuf,
2872 proposal_id: String,
2873 #[arg(long)]
2874 reviewer: String,
2875 #[arg(long)]
2876 reason: String,
2877 #[arg(long)]
2878 json: bool,
2879 },
2880 Reject {
2882 frontier: PathBuf,
2883 proposal_id: String,
2884 #[arg(long)]
2885 reviewer: String,
2886 #[arg(long)]
2887 reason: String,
2888 #[arg(long)]
2889 json: bool,
2890 },
2891}
2892
2893#[derive(Subcommand)]
2894enum SourceAdapterAction {
2895 Run {
2897 frontier: PathBuf,
2899 adapter: String,
2901 #[arg(long)]
2903 actor: String,
2904 #[arg(long = "entry")]
2906 entries: Vec<String>,
2907 #[arg(long)]
2909 priority: Option<String>,
2910 #[arg(long)]
2912 include_excluded: bool,
2913 #[arg(long)]
2915 allow_partial: bool,
2916 #[arg(long)]
2918 dry_run: bool,
2919 #[arg(long)]
2921 input_dir: Option<PathBuf>,
2922 #[arg(long)]
2924 apply_artifacts: bool,
2925 #[arg(long)]
2927 json: bool,
2928 },
2929}
2930
2931#[derive(Subcommand)]
2932enum RuntimeAdapterAction {
2933 Run {
2935 frontier: PathBuf,
2937 adapter: String,
2939 #[arg(long)]
2941 input: PathBuf,
2942 #[arg(long)]
2944 actor: String,
2945 #[arg(long)]
2947 dry_run: bool,
2948 #[arg(long)]
2950 apply_artifacts: bool,
2951 #[arg(long)]
2953 json: bool,
2954 },
2955}
2956
2957#[derive(Subcommand)]
2958enum BridgeKitAction {
2959 Validate {
2961 source: PathBuf,
2963 #[arg(long)]
2965 json: bool,
2966 },
2967 VerifyProvenance {
2974 packet: PathBuf,
2976 #[arg(long)]
2978 json: bool,
2979 },
2980}
2981
2982pub async fn run_command() {
2983 dotenvy::dotenv().ok();
2984
2985 match Cli::parse().command {
2986 Commands::Scout {
2987 folder,
2988 frontier,
2989 backend,
2990 dry_run,
2991 json,
2992 } => {
2993 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
2994 }
2995 Commands::CompileNotes {
2996 vault,
2997 frontier,
2998 backend,
2999 max_files,
3000 max_items_per_category,
3001 dry_run,
3002 json,
3003 } => {
3004 cmd_compile_notes(
3005 &vault,
3006 &frontier,
3007 backend.as_deref(),
3008 max_files,
3009 max_items_per_category,
3010 dry_run,
3011 json,
3012 )
3013 .await;
3014 }
3015 Commands::CompileCode {
3016 root,
3017 frontier,
3018 backend,
3019 max_files,
3020 dry_run,
3021 json,
3022 } => {
3023 cmd_compile_code(
3024 &root,
3025 &frontier,
3026 backend.as_deref(),
3027 max_files,
3028 dry_run,
3029 json,
3030 )
3031 .await;
3032 }
3033 Commands::CompileData {
3034 root,
3035 frontier,
3036 backend,
3037 sample_rows,
3038 dry_run,
3039 json,
3040 } => {
3041 cmd_compile_data(
3042 &root,
3043 &frontier,
3044 backend.as_deref(),
3045 sample_rows,
3046 dry_run,
3047 json,
3048 )
3049 .await;
3050 }
3051 Commands::ReviewPending {
3052 frontier,
3053 backend,
3054 max_proposals,
3055 batch_size,
3056 dry_run,
3057 json,
3058 } => {
3059 cmd_review_pending(
3060 &frontier,
3061 backend.as_deref(),
3062 max_proposals,
3063 batch_size,
3064 dry_run,
3065 json,
3066 )
3067 .await;
3068 }
3069 Commands::FindTensions {
3070 frontier,
3071 backend,
3072 max_findings,
3073 dry_run,
3074 json,
3075 } => {
3076 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3077 }
3078 Commands::PlanExperiments {
3079 frontier,
3080 backend,
3081 max_findings,
3082 dry_run,
3083 json,
3084 } => {
3085 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3086 }
3087 Commands::Check {
3088 source,
3089 schema,
3090 stats,
3091 conformance,
3092 conformance_dir,
3093 all,
3094 schema_only,
3095 strict,
3096 fix,
3097 json,
3098 } => cmd_check(
3099 source.as_deref(),
3100 schema,
3101 stats,
3102 conformance,
3103 &conformance_dir,
3104 all,
3105 schema_only,
3106 strict,
3107 fix,
3108 json,
3109 ),
3110 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3111 Commands::Impact {
3112 frontier,
3113 finding_id,
3114 depth,
3115 json,
3116 } => cmd_impact(&frontier, &finding_id, depth, json),
3117 Commands::Discord {
3118 frontier,
3119 json,
3120 kind,
3121 } => cmd_discord(&frontier, json, kind.as_deref()),
3122 Commands::Normalize {
3123 source,
3124 out,
3125 write,
3126 dry_run,
3127 rewrite_ids,
3128 id_map,
3129 resync_provenance,
3130 json,
3131 } => cmd_normalize(
3132 &source,
3133 out.as_deref(),
3134 write,
3135 dry_run,
3136 rewrite_ids,
3137 id_map.as_deref(),
3138 resync_provenance,
3139 json,
3140 ),
3141 Commands::Proof {
3142 frontier,
3143 out,
3144 template,
3145 gold,
3146 record_proof_state,
3147 json,
3148 } => cmd_proof(
3149 &frontier,
3150 &out,
3151 &template,
3152 gold.as_deref(),
3153 record_proof_state,
3154 json,
3155 ),
3156 Commands::Repo { action } => cmd_repo(action),
3157 Commands::Serve {
3158 frontier,
3159 frontiers,
3160 backend,
3161 http,
3162 setup,
3163 check_tools,
3164 json,
3165 workbench,
3166 } => {
3167 if setup {
3168 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3169 } else if check_tools {
3170 let source =
3171 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3172 match serve::check_tools(source) {
3173 Ok(report) => {
3174 if json {
3175 println!(
3176 "{}",
3177 serde_json::to_string_pretty(&report)
3178 .expect("failed to serialize tool check report")
3179 );
3180 } else {
3181 print_tool_check_report(&report);
3182 }
3183 }
3184 Err(e) => fail(&format!("Tool check failed: {e}")),
3185 }
3186 } else {
3187 let source =
3188 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3189 let resolved_port = if workbench {
3191 Some(http.unwrap_or(3848))
3192 } else {
3193 http
3194 };
3195 if let Some(port) = resolved_port {
3196 serve::run_http(source, backend.as_deref(), port, workbench).await;
3197 } else {
3198 serve::run(source, backend.as_deref()).await;
3199 }
3200 }
3201 }
3202 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3203 Commands::Log {
3204 frontier,
3205 limit,
3206 kind,
3207 json,
3208 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3209 Commands::Inbox {
3210 frontier,
3211 kind,
3212 limit,
3213 json,
3214 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3215 Commands::Ask {
3216 frontier,
3217 question,
3218 json,
3219 } => cmd_ask(&frontier, &question.join(" "), json),
3220 Commands::Stats { frontier, json } => {
3221 if json {
3222 print_stats_json(&frontier);
3223 } else {
3224 cmd_stats(&frontier);
3225 }
3226 }
3227 Commands::Search {
3228 source,
3229 query,
3230 entity,
3231 r#type,
3232 all,
3233 limit,
3234 json,
3235 } => cmd_search(
3236 source.as_deref(),
3237 &query,
3238 entity.as_deref(),
3239 r#type.as_deref(),
3240 all.as_deref(),
3241 limit,
3242 json,
3243 ),
3244 Commands::Tensions {
3245 source,
3246 both_high,
3247 cross_domain,
3248 top,
3249 json,
3250 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3251 Commands::Gaps { action } => cmd_gaps(action),
3252 Commands::Bridge {
3253 inputs,
3254 novelty,
3255 top,
3256 } => cmd_bridge(&inputs, novelty, top).await,
3257 Commands::Export {
3258 frontier,
3259 format,
3260 output,
3261 } => export::run(&frontier, &format, output.as_deref()),
3262 Commands::Packet { action } => cmd_packet(action),
3263 Commands::Verify { path, json } => cmd_verify(&path, json),
3264 Commands::Bench {
3265 frontier,
3266 gold,
3267 candidate,
3268 sources,
3269 threshold,
3270 report,
3271 entity_gold,
3272 link_gold,
3273 suite,
3274 suite_ready,
3275 min_f1,
3276 min_precision,
3277 min_recall,
3278 no_thresholds,
3279 json,
3280 } => {
3281 if let Some(cand) = candidate.clone() {
3286 let Some(g) = gold.clone() else {
3287 eprintln!(
3288 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3289 style::err_prefix()
3290 );
3291 std::process::exit(2);
3292 };
3293 cmd_agent_bench(
3294 &g,
3295 &cand,
3296 sources.as_deref(),
3297 threshold,
3298 report.as_deref(),
3299 json,
3300 );
3301 } else {
3302 cmd_bench(BenchArgs {
3303 frontier,
3304 gold,
3305 entity_gold,
3306 link_gold,
3307 suite,
3308 suite_ready,
3309 min_f1,
3310 min_precision,
3311 min_recall,
3312 no_thresholds,
3313 json,
3314 });
3315 }
3316 }
3317 Commands::Conformance { dir } => {
3318 let _ = conformance::run(&dir);
3319 }
3320 Commands::Version => println!("vela 0.36.0"),
3321 Commands::Sign { action } => cmd_sign(action),
3322 Commands::Actor { action } => cmd_actor(action),
3323 Commands::Federation { action } => cmd_federation(action),
3324 Commands::Causal { action } => cmd_causal(action),
3325 Commands::Frontier { action } => cmd_frontier(action),
3326 Commands::Queue { action } => cmd_queue(action),
3327 Commands::Registry { action } => cmd_registry(action),
3328 Commands::Init {
3329 path,
3330 name,
3331 template,
3332 no_git,
3333 json,
3334 } => cmd_init(&path, &name, &template, !no_git, json),
3335 Commands::Quickstart {
3336 path,
3337 name,
3338 reviewer,
3339 assertion,
3340 keys_out,
3341 json,
3342 } => cmd_quickstart(
3343 &path,
3344 &name,
3345 &reviewer,
3346 assertion.as_deref(),
3347 keys_out.as_deref(),
3348 json,
3349 ),
3350 Commands::Lock { path, check, json } => cmd_lock(&path, check, json),
3351 Commands::Doc { path, out, json } => cmd_doc(&path, out.as_deref(), json),
3352 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3353 Commands::Diff {
3354 target,
3355 frontier_b,
3356 frontier,
3357 reviewer,
3358 json,
3359 quiet,
3360 } => {
3361 if target.starts_with("vpr_") {
3366 let frontier_root = frontier
3367 .clone()
3368 .or_else(|| frontier_b.clone())
3369 .unwrap_or_else(|| std::path::PathBuf::from("."));
3370 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3371 .unwrap_or_else(|e| fail_return(&e));
3372 let payload = json!({
3373 "ok": true,
3374 "command": "diff.proposal",
3375 "frontier": frontier_root.display().to_string(),
3376 "proposal_id": target,
3377 "preview": preview,
3378 });
3379 if json {
3380 println!(
3381 "{}",
3382 serde_json::to_string_pretty(&payload)
3383 .expect("failed to serialize diff preview")
3384 );
3385 } else {
3386 println!("vela diff · proposal preview");
3387 println!(" proposal: {}", target);
3388 println!(" kind: {}", preview.kind);
3389 println!(
3390 " findings: {} -> {}",
3391 preview.findings_before, preview.findings_after
3392 );
3393 println!(
3394 " artifacts: {} -> {}",
3395 preview.artifacts_before, preview.artifacts_after
3396 );
3397 println!(
3398 " events: {} -> {}",
3399 preview.events_before, preview.events_after
3400 );
3401 if !preview.changed_findings.is_empty() {
3402 println!(
3403 " findings changed: {}",
3404 preview.changed_findings.join(", ")
3405 );
3406 }
3407 }
3408 } else {
3409 let frontier_a = std::path::PathBuf::from(&target);
3410 let b = frontier_b.unwrap_or_else(|| {
3411 fail_return(
3412 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3413 )
3414 });
3415 diff::run(&frontier_a, &b, json, quiet);
3416 }
3417 }
3418 Commands::Proposals { action } => cmd_proposals(action),
3419 Commands::ArtifactToState {
3420 frontier,
3421 packet,
3422 actor,
3423 apply_artifacts,
3424 json,
3425 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3426 Commands::BridgeKit { action } => cmd_bridge_kit(action).await,
3427 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3428 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3429 Commands::Link { action } => cmd_link(action),
3430 Commands::Workbench {
3431 path,
3432 port,
3433 no_open,
3434 } => {
3435 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3436 fail(&e);
3437 }
3438 }
3439 Commands::Bridges { action } => cmd_bridges(action),
3440 Commands::Entity { action } => cmd_entity(action),
3441 Commands::Finding { command } => match command {
3442 FindingCommands::Add {
3443 frontier,
3444 assertion,
3445 r#type,
3446 source,
3447 source_type,
3448 author,
3449 confidence,
3450 evidence_type,
3451 entities,
3452 entities_reviewed,
3453 evidence_span,
3454 gap,
3455 negative_space,
3456 doi,
3457 pmid,
3458 year,
3459 journal,
3460 url,
3461 source_authors,
3462 conditions_text,
3463 species,
3464 in_vivo,
3465 in_vitro,
3466 human_data,
3467 clinical_trial,
3468 json,
3469 apply,
3470 } => {
3471 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3472 validate_enum_arg(
3473 "--evidence-type",
3474 &evidence_type,
3475 bundle::VALID_EVIDENCE_TYPES,
3476 );
3477 validate_enum_arg(
3478 "--source-type",
3479 &source_type,
3480 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3481 );
3482 let parsed_entities = parse_entities(&entities);
3483 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3484 for (name, etype) in &parsed_entities {
3485 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3486 fail(&format!(
3487 "invalid entity type '{}' for '{}'. Valid: {}",
3488 etype,
3489 name,
3490 bundle::VALID_ENTITY_TYPES.join(", "),
3491 ));
3492 }
3493 }
3494 let parsed_source_authors = source_authors
3495 .map(|s| {
3496 s.split(';')
3497 .map(|a| a.trim().to_string())
3498 .filter(|a| !a.is_empty())
3499 .collect()
3500 })
3501 .unwrap_or_default();
3502 let parsed_species = species
3503 .map(|s| {
3504 s.split(';')
3505 .map(|a| a.trim().to_string())
3506 .filter(|a| !a.is_empty())
3507 .collect()
3508 })
3509 .unwrap_or_default();
3510 let report = state::add_finding(
3511 &frontier,
3512 state::FindingDraftOptions {
3513 text: assertion,
3514 assertion_type: r#type,
3515 source,
3516 source_type,
3517 author,
3518 confidence,
3519 evidence_type,
3520 entities: parsed_entities,
3521 doi,
3522 pmid,
3523 year,
3524 journal,
3525 url,
3526 source_authors: parsed_source_authors,
3527 conditions_text,
3528 species: parsed_species,
3529 in_vivo,
3530 in_vitro,
3531 human_data,
3532 clinical_trial,
3533 entities_reviewed,
3534 evidence_spans: parsed_evidence_spans,
3535 gap,
3536 negative_space,
3537 },
3538 apply,
3539 )
3540 .unwrap_or_else(|e| fail_return(&e));
3541 print_state_report(&report, json);
3542 }
3543 FindingCommands::Supersede {
3544 frontier,
3545 old_id,
3546 assertion,
3547 r#type,
3548 source,
3549 source_type,
3550 author,
3551 reason,
3552 confidence,
3553 evidence_type,
3554 entities,
3555 doi,
3556 pmid,
3557 year,
3558 journal,
3559 url,
3560 source_authors,
3561 conditions_text,
3562 species,
3563 in_vivo,
3564 in_vitro,
3565 human_data,
3566 clinical_trial,
3567 json,
3568 apply,
3569 } => {
3570 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3571 validate_enum_arg(
3572 "--evidence-type",
3573 &evidence_type,
3574 bundle::VALID_EVIDENCE_TYPES,
3575 );
3576 validate_enum_arg(
3577 "--source-type",
3578 &source_type,
3579 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3580 );
3581 let parsed_entities = parse_entities(&entities);
3582 for (name, etype) in &parsed_entities {
3583 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3584 fail(&format!(
3585 "invalid entity type '{}' for '{}'. Valid: {}",
3586 etype,
3587 name,
3588 bundle::VALID_ENTITY_TYPES.join(", "),
3589 ));
3590 }
3591 }
3592 let parsed_source_authors = source_authors
3593 .map(|s| {
3594 s.split(';')
3595 .map(|a| a.trim().to_string())
3596 .filter(|a| !a.is_empty())
3597 .collect()
3598 })
3599 .unwrap_or_default();
3600 let parsed_species = species
3601 .map(|s| {
3602 s.split(';')
3603 .map(|a| a.trim().to_string())
3604 .filter(|a| !a.is_empty())
3605 .collect()
3606 })
3607 .unwrap_or_default();
3608 let report = state::supersede_finding(
3609 &frontier,
3610 &old_id,
3611 &reason,
3612 state::FindingDraftOptions {
3613 text: assertion,
3614 assertion_type: r#type,
3615 source,
3616 source_type,
3617 author,
3618 confidence,
3619 evidence_type,
3620 entities: parsed_entities,
3621 doi,
3622 pmid,
3623 year,
3624 journal,
3625 url,
3626 source_authors: parsed_source_authors,
3627 conditions_text,
3628 species: parsed_species,
3629 in_vivo,
3630 in_vitro,
3631 human_data,
3632 clinical_trial,
3633 entities_reviewed: false,
3634 evidence_spans: Vec::new(),
3635 gap: false,
3636 negative_space: false,
3637 },
3638 apply,
3639 )
3640 .unwrap_or_else(|e| fail_return(&e));
3641 print_state_report(&report, json);
3642 }
3643 FindingCommands::CausalSet {
3644 frontier,
3645 finding_id,
3646 claim,
3647 grade,
3648 actor,
3649 reason,
3650 json,
3651 } => {
3652 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3653 fail(&format!(
3654 "invalid --claim '{claim}'; valid: {:?}",
3655 bundle::VALID_CAUSAL_CLAIMS
3656 ));
3657 }
3658 if let Some(g) = grade.as_deref()
3659 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3660 {
3661 fail(&format!(
3662 "invalid --grade '{g}'; valid: {:?}",
3663 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3664 ));
3665 }
3666 let report = state::set_causal(
3667 &frontier,
3668 &finding_id,
3669 &claim,
3670 grade.as_deref(),
3671 &actor,
3672 &reason,
3673 )
3674 .unwrap_or_else(|e| fail_return(&e));
3675 print_state_report(&report, json);
3676 }
3677 },
3678 Commands::Review {
3679 frontier,
3680 finding_id,
3681 status,
3682 reason,
3683 reviewer,
3684 apply,
3685 json,
3686 } => {
3687 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3688 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3689 let report = state::review_finding(
3690 &frontier,
3691 &finding_id,
3692 state::ReviewOptions {
3693 status,
3694 reason,
3695 reviewer,
3696 },
3697 apply,
3698 )
3699 .unwrap_or_else(|e| fail_return(&e));
3700 print_state_report(&report, json);
3701 }
3702 Commands::Note {
3703 frontier,
3704 finding_id,
3705 text,
3706 author,
3707 apply,
3708 json,
3709 } => {
3710 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3711 .unwrap_or_else(|e| fail_return(&e));
3712 print_state_report(&report, json);
3713 }
3714 Commands::Caveat {
3715 frontier,
3716 finding_id,
3717 text,
3718 author,
3719 apply,
3720 json,
3721 } => {
3722 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3723 .unwrap_or_else(|e| fail_return(&e));
3724 print_state_report(&report, json);
3725 }
3726 Commands::Revise {
3727 frontier,
3728 finding_id,
3729 confidence,
3730 reason,
3731 reviewer,
3732 apply,
3733 json,
3734 } => {
3735 let report = state::revise_confidence(
3736 &frontier,
3737 &finding_id,
3738 state::ReviseOptions {
3739 confidence,
3740 reason,
3741 reviewer,
3742 },
3743 apply,
3744 )
3745 .unwrap_or_else(|e| fail_return(&e));
3746 print_state_report(&report, json);
3747 }
3748 Commands::Reject {
3749 frontier,
3750 finding_id,
3751 reason,
3752 reviewer,
3753 apply,
3754 json,
3755 } => {
3756 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3757 .unwrap_or_else(|e| fail_return(&e));
3758 print_state_report(&report, json);
3759 }
3760 Commands::History {
3761 frontier,
3762 finding_id,
3763 json,
3764 as_of,
3765 } => {
3766 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3767 .unwrap_or_else(|e| fail_return(&e));
3768 if json {
3769 println!(
3770 "{}",
3771 serde_json::to_string_pretty(&payload)
3772 .expect("failed to serialize history response")
3773 );
3774 } else {
3775 print_history(&payload);
3776 }
3777 }
3778 Commands::ImportEvents { source, into, json } => {
3779 let report =
3780 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3781 if json {
3782 println!(
3783 "{}",
3784 serde_json::to_string_pretty(&json!({
3785 "ok": true,
3786 "command": "import-events",
3787 "source": report.source,
3788 "target": into.display().to_string(),
3789 "summary": {
3790 "imported": report.imported,
3791 "new": report.new,
3792 "duplicate": report.duplicate,
3793 "canonical_events_imported": report.events_imported,
3794 "canonical_events_new": report.events_new,
3795 "canonical_events_duplicate": report.events_duplicate,
3796 }
3797 }))
3798 .expect("failed to serialize import-events response")
3799 );
3800 } else {
3801 println!("{report}");
3802 }
3803 }
3804 Commands::Retract {
3805 source,
3806 finding_id,
3807 reason,
3808 reviewer,
3809 apply,
3810 json,
3811 } => {
3812 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3813 .unwrap_or_else(|e| fail_return(&e));
3814 print_state_report(&report, json);
3815 }
3816 Commands::LocatorRepair {
3817 frontier,
3818 atom_id,
3819 locator,
3820 reviewer,
3821 reason,
3822 apply,
3823 json,
3824 } => {
3825 cmd_locator_repair(
3826 &frontier,
3827 &atom_id,
3828 locator.as_deref(),
3829 &reviewer,
3830 &reason,
3831 apply,
3832 json,
3833 );
3834 }
3835 Commands::SourceFetch {
3836 identifier,
3837 cache,
3838 out,
3839 refresh,
3840 json,
3841 } => {
3842 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3843 }
3844 Commands::SpanRepair {
3845 frontier,
3846 finding_id,
3847 section,
3848 text,
3849 reviewer,
3850 reason,
3851 apply,
3852 json,
3853 } => {
3854 cmd_span_repair(
3855 &frontier,
3856 &finding_id,
3857 §ion,
3858 &text,
3859 &reviewer,
3860 &reason,
3861 apply,
3862 json,
3863 );
3864 }
3865 Commands::EntityAdd {
3866 frontier,
3867 finding_id,
3868 entity,
3869 entity_type,
3870 reviewer,
3871 reason,
3872 apply,
3873 json,
3874 } => {
3875 let report = state::add_finding_entity(
3876 &frontier,
3877 &finding_id,
3878 &entity,
3879 &entity_type,
3880 &reviewer,
3881 &reason,
3882 apply,
3883 )
3884 .unwrap_or_else(|e| fail_return(&e));
3885 print_state_report(&report, json);
3886 }
3887 Commands::EntityResolve {
3888 frontier,
3889 finding_id,
3890 entity,
3891 source,
3892 id,
3893 confidence,
3894 matched_name,
3895 resolution_method,
3896 reviewer,
3897 reason,
3898 apply,
3899 json,
3900 } => {
3901 cmd_entity_resolve(
3902 &frontier,
3903 &finding_id,
3904 &entity,
3905 &source,
3906 &id,
3907 confidence,
3908 matched_name.as_deref(),
3909 &resolution_method,
3910 &reviewer,
3911 &reason,
3912 apply,
3913 json,
3914 );
3915 }
3916 Commands::Propagate {
3917 frontier,
3918 retract,
3919 reduce_confidence,
3920 to,
3921 output,
3922 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3923 Commands::Replicate {
3924 frontier,
3925 target,
3926 outcome,
3927 by,
3928 conditions,
3929 source_title,
3930 doi,
3931 pmid,
3932 sample_size,
3933 note,
3934 previous_attempt,
3935 no_cascade,
3936 json,
3937 } => cmd_replicate(
3938 &frontier,
3939 &target,
3940 &outcome,
3941 &by,
3942 &conditions,
3943 &source_title,
3944 doi.as_deref(),
3945 pmid.as_deref(),
3946 sample_size.as_deref(),
3947 ¬e,
3948 previous_attempt.as_deref(),
3949 no_cascade,
3950 json,
3951 ),
3952 Commands::Replications {
3953 frontier,
3954 target,
3955 json,
3956 } => cmd_replications(&frontier, target.as_deref(), json),
3957 Commands::DatasetAdd {
3958 frontier,
3959 name,
3960 version,
3961 content_hash,
3962 url,
3963 license,
3964 source_title,
3965 doi,
3966 row_count,
3967 json,
3968 } => cmd_dataset_add(
3969 &frontier,
3970 &name,
3971 version.as_deref(),
3972 &content_hash,
3973 url.as_deref(),
3974 license.as_deref(),
3975 &source_title,
3976 doi.as_deref(),
3977 row_count,
3978 json,
3979 ),
3980 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
3981 Commands::CodeAdd {
3982 frontier,
3983 language,
3984 repo_url,
3985 commit,
3986 path,
3987 content_hash,
3988 line_start,
3989 line_end,
3990 entry_point,
3991 json,
3992 } => cmd_code_add(
3993 &frontier,
3994 &language,
3995 repo_url.as_deref(),
3996 commit.as_deref(),
3997 &path,
3998 &content_hash,
3999 line_start,
4000 line_end,
4001 entry_point.as_deref(),
4002 json,
4003 ),
4004 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
4005 Commands::ArtifactAdd {
4006 frontier,
4007 kind,
4008 name,
4009 file,
4010 url,
4011 content_hash,
4012 media_type,
4013 license,
4014 source_title,
4015 source_url,
4016 doi,
4017 target,
4018 metadata,
4019 access_tier,
4020 deposited_by,
4021 reason,
4022 json,
4023 } => cmd_artifact_add(
4024 &frontier,
4025 &kind,
4026 &name,
4027 file.as_deref(),
4028 url.as_deref(),
4029 content_hash.as_deref(),
4030 media_type.as_deref(),
4031 license.as_deref(),
4032 source_title.as_deref(),
4033 source_url.as_deref(),
4034 doi.as_deref(),
4035 target,
4036 metadata,
4037 &access_tier,
4038 &deposited_by,
4039 &reason,
4040 json,
4041 ),
4042 Commands::Artifacts {
4043 frontier,
4044 target,
4045 json,
4046 } => cmd_artifacts(&frontier, target.as_deref(), json),
4047 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
4048 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
4049 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
4050 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
4051 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
4052 Commands::ClinicalTrialImport {
4053 frontier,
4054 nct_id,
4055 input_json,
4056 target,
4057 deposited_by,
4058 reason,
4059 license,
4060 json,
4061 } => {
4062 cmd_clinical_trial_import(
4063 &frontier,
4064 &nct_id,
4065 input_json.as_deref(),
4066 target,
4067 &deposited_by,
4068 &reason,
4069 &license,
4070 json,
4071 )
4072 .await
4073 }
4074 Commands::NegativeResultAdd {
4075 frontier,
4076 kind,
4077 deposited_by,
4078 reason,
4079 conditions_text,
4080 notes,
4081 target,
4082 endpoint,
4083 intervention,
4084 comparator,
4085 population,
4086 n_enrolled,
4087 power,
4088 ci_lower,
4089 ci_upper,
4090 effect_size_threshold,
4091 registry_id,
4092 reagent,
4093 observation,
4094 attempts,
4095 source_title,
4096 doi,
4097 url,
4098 year,
4099 json,
4100 } => cmd_negative_result_add(
4101 &frontier,
4102 &kind,
4103 &deposited_by,
4104 &reason,
4105 &conditions_text,
4106 ¬es,
4107 target,
4108 endpoint.as_deref(),
4109 intervention.as_deref(),
4110 comparator.as_deref(),
4111 population.as_deref(),
4112 n_enrolled,
4113 power,
4114 ci_lower,
4115 ci_upper,
4116 effect_size_threshold,
4117 registry_id.as_deref(),
4118 reagent.as_deref(),
4119 observation.as_deref(),
4120 attempts,
4121 &source_title,
4122 doi.as_deref(),
4123 url.as_deref(),
4124 year,
4125 json,
4126 ),
4127 Commands::NegativeResults {
4128 frontier,
4129 target,
4130 json,
4131 } => cmd_negative_results(&frontier, target.as_deref(), json),
4132 Commands::TrajectoryCreate {
4133 frontier,
4134 deposited_by,
4135 reason,
4136 target,
4137 notes,
4138 json,
4139 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4140 Commands::TrajectoryStep {
4141 frontier,
4142 trajectory_id,
4143 kind,
4144 description,
4145 actor,
4146 reason,
4147 reference,
4148 json,
4149 } => cmd_trajectory_step(
4150 &frontier,
4151 &trajectory_id,
4152 &kind,
4153 &description,
4154 &actor,
4155 &reason,
4156 reference,
4157 json,
4158 ),
4159 Commands::Trajectories {
4160 frontier,
4161 target,
4162 json,
4163 } => cmd_trajectories(&frontier, target.as_deref(), json),
4164 Commands::TierSet {
4165 frontier,
4166 object_type,
4167 object_id,
4168 tier,
4169 actor,
4170 reason,
4171 json,
4172 } => cmd_tier_set(
4173 &frontier,
4174 &object_type,
4175 &object_id,
4176 &tier,
4177 &actor,
4178 &reason,
4179 json,
4180 ),
4181 Commands::Predict {
4182 frontier,
4183 by,
4184 claim,
4185 criterion,
4186 resolves_by,
4187 confidence,
4188 target,
4189 outcome,
4190 conditions,
4191 json,
4192 } => cmd_predict(
4193 &frontier,
4194 &by,
4195 &claim,
4196 &criterion,
4197 resolves_by.as_deref(),
4198 confidence,
4199 &target,
4200 &outcome,
4201 &conditions,
4202 json,
4203 ),
4204 Commands::Resolve {
4205 frontier,
4206 prediction,
4207 outcome,
4208 matched,
4209 by,
4210 confidence,
4211 source_title,
4212 doi,
4213 json,
4214 } => cmd_resolve(
4215 &frontier,
4216 &prediction,
4217 &outcome,
4218 matched,
4219 &by,
4220 confidence,
4221 &source_title,
4222 doi.as_deref(),
4223 json,
4224 ),
4225 Commands::Predictions {
4226 frontier,
4227 by,
4228 open,
4229 json,
4230 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4231 Commands::Calibration {
4232 frontier,
4233 actor,
4234 json,
4235 } => cmd_calibration(&frontier, actor.as_deref(), json),
4236 Commands::PredictionsExpire {
4237 frontier,
4238 now,
4239 dry_run,
4240 json,
4241 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4242 Commands::Consensus {
4243 frontier,
4244 target,
4245 weighting,
4246 causal_claim,
4247 causal_grade_min,
4248 json,
4249 } => cmd_consensus(
4250 &frontier,
4251 &target,
4252 &weighting,
4253 causal_claim.as_deref(),
4254 causal_grade_min.as_deref(),
4255 json,
4256 ),
4257
4258 Commands::Ingest {
4261 path,
4262 frontier,
4263 backend,
4264 actor,
4265 dry_run,
4266 json,
4267 } => {
4268 cmd_ingest(
4269 &path,
4270 &frontier,
4271 backend.as_deref(),
4272 actor.as_deref(),
4273 dry_run,
4274 json,
4275 )
4276 .await
4277 }
4278
4279 Commands::Propose {
4280 frontier,
4281 finding_id,
4282 status,
4283 reason,
4284 reviewer,
4285 apply,
4286 json,
4287 } => {
4288 let options = state::ReviewOptions {
4291 status: status.clone(),
4292 reason: reason.clone(),
4293 reviewer: reviewer.clone(),
4294 };
4295 let report = state::review_finding(&frontier, &finding_id, options, apply)
4296 .unwrap_or_else(|e| fail_return(&e));
4297 print_state_report(&report, json);
4298 }
4299
4300 Commands::Accept {
4301 frontier,
4302 proposal_id,
4303 reviewer,
4304 reason,
4305 json,
4306 } => {
4307 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4309 .unwrap_or_else(|e| fail_return(&e));
4310 let payload = json!({
4311 "ok": true,
4312 "command": "accept",
4313 "frontier": frontier.display().to_string(),
4314 "proposal_id": proposal_id,
4315 "reviewer": reviewer,
4316 "applied_event_id": event_id,
4317 });
4318 if json {
4319 println!(
4320 "{}",
4321 serde_json::to_string_pretty(&payload)
4322 .expect("failed to serialize accept response")
4323 );
4324 } else {
4325 println!(
4326 "{} accepted and applied proposal {}",
4327 style::ok("ok"),
4328 proposal_id
4329 );
4330 println!(" event: {}", event_id);
4331 }
4332 }
4333
4334 Commands::Attest {
4335 frontier,
4336 event,
4337 attester,
4338 scope_note,
4339 proof_id,
4340 signature,
4341 key,
4342 json,
4343 } => {
4344 if let Some(target_event_id) = event {
4348 let attester_id = attester.unwrap_or_else(|| {
4349 fail_return("attest: --attester is required in per-event mode")
4350 });
4351 let scope = scope_note.unwrap_or_else(|| {
4352 fail_return("attest: --scope-note is required in per-event mode")
4353 });
4354 let attestation_event_id = state::record_attestation(
4355 &frontier,
4356 &target_event_id,
4357 &attester_id,
4358 &scope,
4359 proof_id.as_deref(),
4360 signature.as_deref(),
4361 )
4362 .unwrap_or_else(|e| fail_return(&e));
4363 if json {
4364 let payload = json!({
4365 "ok": true,
4366 "command": "attest.event",
4367 "frontier": frontier.display().to_string(),
4368 "target_event_id": target_event_id,
4369 "attestation_event_id": attestation_event_id,
4370 "attester_id": attester_id,
4371 });
4372 println!(
4373 "{}",
4374 serde_json::to_string_pretty(&payload)
4375 .expect("failed to serialize attest.event response")
4376 );
4377 } else {
4378 println!(
4379 "{} attested {} by {} ({})",
4380 style::ok("ok"),
4381 target_event_id,
4382 attester_id,
4383 attestation_event_id
4384 );
4385 }
4386 return;
4387 }
4388 let key_path = key.unwrap_or_else(|| {
4390 fail_return(
4391 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
4392 )
4393 });
4394 let count =
4395 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
4396 let payload = json!({
4397 "ok": true,
4398 "command": "attest",
4399 "frontier": frontier.display().to_string(),
4400 "private_key": key_path.display().to_string(),
4401 "signed": count,
4402 });
4403 if json {
4404 println!(
4405 "{}",
4406 serde_json::to_string_pretty(&payload)
4407 .expect("failed to serialize attest response")
4408 );
4409 } else {
4410 println!(
4411 "{} {count} findings in {}",
4412 style::ok("attested"),
4413 frontier.display()
4414 );
4415 }
4416 }
4417
4418 Commands::Lineage {
4419 frontier,
4420 finding_id,
4421 as_of,
4422 json,
4423 } => {
4424 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4426 .unwrap_or_else(|e| fail_return(&e));
4427 if json {
4428 println!(
4429 "{}",
4430 serde_json::to_string_pretty(&payload)
4431 .expect("failed to serialize lineage response")
4432 );
4433 } else {
4434 print_history(&payload);
4435 }
4436 }
4437
4438 Commands::Carina { action } => cmd_carina(action),
4439
4440 Commands::Atlas { action } => cmd_atlas(action).await,
4441
4442 Commands::Constellation { action } => cmd_constellation(action).await,
4443 }
4444}
4445
4446async fn cmd_atlas(action: AtlasAction) {
4451 match action {
4452 AtlasAction::Init {
4453 name,
4454 frontiers,
4455 domain,
4456 scope_note,
4457 atlases_root,
4458 json,
4459 } => match ATLAS_INIT_HANDLER.get() {
4460 Some(handler) => {
4461 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
4462 }
4463 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
4464 },
4465 AtlasAction::Materialize {
4466 name,
4467 atlases_root,
4468 json,
4469 } => match ATLAS_MATERIALIZE_HANDLER.get() {
4470 Some(handler) => handler(atlases_root, name, json).await,
4471 None => fail("vela atlas materialize: handler not registered"),
4472 },
4473 AtlasAction::Serve {
4474 name,
4475 atlases_root,
4476 port,
4477 no_open,
4478 } => {
4479 match ATLAS_SERVE_HANDLER.get() {
4483 Some(handler) => handler(atlases_root, name, port, !no_open).await,
4484 None => fail("vela atlas serve: handler not registered"),
4485 }
4486 }
4487 AtlasAction::Update {
4488 name,
4489 add_frontier,
4490 remove_vfr_id,
4491 atlases_root,
4492 json,
4493 } => match ATLAS_UPDATE_HANDLER.get() {
4494 Some(handler) => {
4495 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
4496 }
4497 None => fail("vela atlas update: handler not registered"),
4498 },
4499 }
4500}
4501
4502async fn cmd_constellation(action: ConstellationAction) {
4506 match action {
4507 ConstellationAction::Init {
4508 name,
4509 atlases,
4510 scope_note,
4511 constellations_root,
4512 json,
4513 } => match CONSTELLATION_INIT_HANDLER.get() {
4514 Some(handler) => {
4515 handler(constellations_root, name, scope_note, atlases, json).await;
4516 }
4517 None => fail(
4518 "vela constellation init: handler not registered (built without vela-constellation)",
4519 ),
4520 },
4521 ConstellationAction::Materialize {
4522 name,
4523 constellations_root,
4524 json,
4525 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
4526 Some(handler) => handler(constellations_root, name, json).await,
4527 None => fail("vela constellation materialize: handler not registered"),
4528 },
4529 ConstellationAction::Serve {
4530 name,
4531 constellations_root,
4532 port,
4533 no_open,
4534 } => match CONSTELLATION_SERVE_HANDLER.get() {
4535 Some(handler) => handler(constellations_root, name, port, !no_open).await,
4536 None => fail("vela constellation serve: handler not registered"),
4537 },
4538 }
4539}
4540
4541fn cmd_carina(action: CarinaAction) {
4544 match action {
4545 CarinaAction::List { json } => {
4546 if json {
4547 println!(
4548 "{}",
4549 serde_json::to_string_pretty(&json!({
4550 "ok": true,
4551 "command": "carina.list",
4552 "primitives": carina_validate::PRIMITIVE_NAMES,
4553 }))
4554 .expect("failed to serialize carina.list")
4555 );
4556 } else {
4557 println!("Carina primitives bundled with this build:");
4558 for name in carina_validate::PRIMITIVE_NAMES {
4559 println!(" · {name}");
4560 }
4561 }
4562 }
4563 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4564 Some(text) => print!("{text}"),
4565 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4566 },
4567 CarinaAction::Validate {
4568 path,
4569 primitive,
4570 json,
4571 } => {
4572 let text = std::fs::read_to_string(&path)
4573 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4574 let value: Value = serde_json::from_str(&text)
4575 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4576 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4582 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4583 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4584 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4585 for (key, child) in primitives {
4586 let outcome = carina_validate::validate(key, child)
4587 .map(|()| carina_validate::detect_primitive(child));
4588 report.push((key.clone(), outcome));
4589 }
4590 } else {
4591 let outcome = match primitive.as_deref() {
4592 Some(name) => carina_validate::validate(name, &value).map(|()| {
4593 carina_validate::PRIMITIVE_NAMES
4594 .iter()
4595 .copied()
4596 .find(|p| *p == name)
4597 }),
4598 None => carina_validate::validate_auto(&value).map(Some),
4599 };
4600 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4601 report.push((label, outcome));
4602 }
4603
4604 let total = report.len();
4605 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4606 let fail = total - pass;
4607
4608 if json {
4609 let entries: Vec<Value> = report
4610 .iter()
4611 .map(|(label, r)| match r {
4612 Ok(name) => json!({
4613 "key": label,
4614 "primitive": name,
4615 "ok": true,
4616 }),
4617 Err(errs) => json!({
4618 "key": label,
4619 "ok": false,
4620 "errors": errs,
4621 }),
4622 })
4623 .collect();
4624 println!(
4625 "{}",
4626 serde_json::to_string_pretty(&json!({
4627 "ok": fail == 0,
4628 "command": "carina.validate",
4629 "file": path.display().to_string(),
4630 "total": total,
4631 "passed": pass,
4632 "failed": fail,
4633 "entries": entries,
4634 }))
4635 .expect("failed to serialize carina.validate")
4636 );
4637 } else {
4638 for (label, r) in &report {
4639 match r {
4640 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4641 Ok(None) => println!(" {} {label}", style::ok("ok")),
4642 Err(errs) => {
4643 println!(" {} {label}", style::lost("fail"));
4644 for e in errs {
4645 println!(" {e}");
4646 }
4647 }
4648 }
4649 }
4650 println!();
4651 if fail == 0 {
4652 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4653 } else {
4654 println!(
4655 "{} {pass}/{total} valid · {fail} failed",
4656 style::lost("carina.validate")
4657 );
4658 }
4659 }
4660
4661 if fail > 0 {
4662 std::process::exit(1);
4663 }
4664 }
4665 }
4666}
4667
4668fn cmd_consensus(
4671 frontier: &Path,
4672 target: &str,
4673 weighting_str: &str,
4674 causal_claim: Option<&str>,
4675 causal_grade_min: Option<&str>,
4676 json: bool,
4677) {
4678 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4679
4680 if !target.starts_with("vf_") {
4681 fail(&format!("target `{target}` is not a vf_ finding id"));
4682 }
4683 let scheme =
4684 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4685
4686 let parsed_claim = match causal_claim {
4687 None => None,
4688 Some("correlation") => Some(CausalClaim::Correlation),
4689 Some("mediation") => Some(CausalClaim::Mediation),
4690 Some("intervention") => Some(CausalClaim::Intervention),
4691 Some(other) => fail_return(&format!(
4692 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4693 )),
4694 };
4695 let parsed_grade = match causal_grade_min {
4696 None => None,
4697 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4698 Some("observational") => Some(CausalEvidenceGrade::Observational),
4699 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4700 Some("rct") => Some(CausalEvidenceGrade::Rct),
4701 Some(other) => fail_return(&format!(
4702 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4703 )),
4704 };
4705 let filter = crate::aggregate::AggregateFilter {
4706 causal_claim: parsed_claim,
4707 causal_grade_min: parsed_grade,
4708 };
4709 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4710
4711 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4712 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4713
4714 if json {
4715 println!(
4716 "{}",
4717 serde_json::to_string_pretty(&result).expect("serialize consensus")
4718 );
4719 return;
4720 }
4721
4722 println!();
4723 println!(
4724 " {}",
4725 format!(
4726 "VELA · CONSENSUS · {} ({})",
4727 result.target, result.weighting
4728 )
4729 .to_uppercase()
4730 .dimmed()
4731 );
4732 println!(" {}", style::tick_row(60));
4733 println!(
4734 " target: {}",
4735 truncate(&result.target_assertion, 80)
4736 );
4737 println!(" similar findings: {}", result.n_findings);
4738 println!(
4739 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
4740 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
4741 );
4742 println!();
4743 println!(" constituents (sorted by weight):");
4744 let mut sorted = result.constituents.clone();
4745 sorted.sort_by(|a, b| {
4746 b.weight
4747 .partial_cmp(&a.weight)
4748 .unwrap_or(std::cmp::Ordering::Equal)
4749 });
4750 for c in sorted.iter().take(10) {
4751 let repls = if c.n_replications > 0 {
4752 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
4753 } else {
4754 String::new()
4755 };
4756 println!(
4757 " · w={:.2} raw={:.2} adj={:.2}{}",
4758 c.weight, c.raw_score, c.adjusted_score, repls
4759 );
4760 println!(" {}", truncate(&c.assertion_text, 88));
4761 }
4762 if result.constituents.len() > 10 {
4763 println!(" ... ({} more)", result.constituents.len() - 10);
4764 }
4765}
4766
4767fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
4773 let trimmed = s.trim();
4774 if trimmed.eq_ignore_ascii_case("affirmed") {
4775 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
4776 }
4777 if trimmed.eq_ignore_ascii_case("falsified") {
4778 return Ok(crate::bundle::ExpectedOutcome::Falsified);
4779 }
4780 if let Some(rest) = trimmed.strip_prefix("cat:") {
4781 return Ok(crate::bundle::ExpectedOutcome::Categorical {
4782 value: rest.to_string(),
4783 });
4784 }
4785 if let Some(rest) = trimmed.strip_prefix("quant:") {
4786 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
4787 let (val_s, tol_s) = vt
4788 .split_once('±')
4789 .or_else(|| vt.split_once("+/-"))
4790 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
4791 let value: f64 = val_s
4792 .parse()
4793 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
4794 let tolerance: f64 = tol_s
4795 .parse()
4796 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
4797 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
4798 value,
4799 tolerance,
4800 units: units.to_string(),
4801 });
4802 }
4803 Err(format!(
4804 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
4805 ))
4806}
4807
4808#[allow(clippy::too_many_arguments)]
4810fn cmd_predict(
4811 frontier: &Path,
4812 by: &str,
4813 claim: &str,
4814 criterion: &str,
4815 resolves_by: Option<&str>,
4816 confidence: f64,
4817 target_csv: &str,
4818 outcome: &str,
4819 conditions_text: &str,
4820 json: bool,
4821) {
4822 if !(0.0..=1.0).contains(&confidence) {
4823 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4824 }
4825 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
4826
4827 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4828
4829 let targets: Vec<String> = target_csv
4830 .split(',')
4831 .map(|s| s.trim().to_string())
4832 .filter(|s| !s.is_empty())
4833 .collect();
4834 for t in &targets {
4835 if !t.starts_with("vf_") {
4836 fail(&format!("target `{t}` is not a vf_ id"));
4837 }
4838 if !project.findings.iter().any(|f| f.id == *t) {
4839 fail(&format!("target `{t}` not present in frontier"));
4840 }
4841 }
4842
4843 let lower = conditions_text.to_lowercase();
4844 let conditions = crate::bundle::Conditions {
4845 text: conditions_text.to_string(),
4846 species_verified: Vec::new(),
4847 species_unverified: Vec::new(),
4848 in_vitro: lower.contains("in vitro"),
4849 in_vivo: lower.contains("in vivo"),
4850 human_data: lower.contains("human") || lower.contains("clinical"),
4851 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
4852 concentration_range: None,
4853 duration: None,
4854 age_group: None,
4855 cell_type: None,
4856 };
4857
4858 let prediction = crate::bundle::Prediction::new(
4859 claim.to_string(),
4860 targets,
4861 None,
4862 resolves_by.map(|s| s.to_string()),
4863 criterion.to_string(),
4864 expected,
4865 by.to_string(),
4866 confidence,
4867 conditions,
4868 );
4869
4870 if project.predictions.iter().any(|p| p.id == prediction.id) {
4871 if json {
4872 println!(
4873 "{}",
4874 serde_json::to_string_pretty(&json!({
4875 "ok": false,
4876 "command": "predict",
4877 "reason": "prediction_already_exists",
4878 "id": prediction.id,
4879 }))
4880 .expect("serialize")
4881 );
4882 } else {
4883 println!(
4884 "{} prediction {} already exists in {}; skipping.",
4885 style::warn("predict"),
4886 prediction.id,
4887 frontier.display()
4888 );
4889 }
4890 return;
4891 }
4892
4893 let new_id = prediction.id.clone();
4894 project.predictions.push(prediction);
4895 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4896
4897 if json {
4898 println!(
4899 "{}",
4900 serde_json::to_string_pretty(&json!({
4901 "ok": true,
4902 "command": "predict",
4903 "id": new_id,
4904 "made_by": by,
4905 "confidence": confidence,
4906 "frontier": frontier.display().to_string(),
4907 }))
4908 .expect("serialize predict result")
4909 );
4910 } else {
4911 println!();
4912 println!(
4913 " {}",
4914 format!("VELA · PREDICT · {}", new_id)
4915 .to_uppercase()
4916 .dimmed()
4917 );
4918 println!(" {}", style::tick_row(60));
4919 println!(" by: {by}");
4920 println!(" confidence: {confidence:.3}");
4921 if let Some(d) = resolves_by {
4922 println!(" resolves by: {d}");
4923 }
4924 println!(" outcome: {outcome}");
4925 println!(" claim: {}", truncate(claim, 88));
4926 println!();
4927 println!(
4928 " {} prediction recorded in {}",
4929 style::ok("ok"),
4930 frontier.display()
4931 );
4932 }
4933}
4934
4935#[allow(clippy::too_many_arguments)]
4937fn cmd_resolve(
4938 frontier: &Path,
4939 prediction_id: &str,
4940 actual_outcome: &str,
4941 matched: bool,
4942 by: &str,
4943 confidence: f64,
4944 source_title: &str,
4945 doi: Option<&str>,
4946 json: bool,
4947) {
4948 if !prediction_id.starts_with("vpred_") {
4949 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
4950 }
4951 if !(0.0..=1.0).contains(&confidence) {
4952 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4953 }
4954 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4955 if !project.predictions.iter().any(|p| p.id == prediction_id) {
4956 fail(&format!(
4957 "prediction `{prediction_id}` not present in frontier"
4958 ));
4959 }
4960
4961 let evidence = crate::bundle::Evidence {
4962 evidence_type: "experimental".to_string(),
4963 model_system: String::new(),
4964 species: None,
4965 method: "prediction_resolution".to_string(),
4966 sample_size: None,
4967 effect_size: None,
4968 p_value: None,
4969 replicated: false,
4970 replication_count: None,
4971 evidence_spans: if source_title.is_empty() {
4972 Vec::new()
4973 } else {
4974 vec![serde_json::json!({"text": source_title})]
4975 },
4976 };
4977
4978 let _ = doi; let resolution = crate::bundle::Resolution::new(
4985 prediction_id.to_string(),
4986 actual_outcome.to_string(),
4987 matched,
4988 by.to_string(),
4989 evidence,
4990 confidence,
4991 );
4992
4993 if project.resolutions.iter().any(|r| r.id == resolution.id) {
4994 if json {
4995 println!(
4996 "{}",
4997 serde_json::to_string_pretty(&json!({
4998 "ok": false,
4999 "command": "resolve",
5000 "reason": "resolution_already_exists",
5001 "id": resolution.id,
5002 }))
5003 .expect("serialize")
5004 );
5005 } else {
5006 println!(
5007 "{} resolution {} already exists in {}; skipping.",
5008 style::warn("resolve"),
5009 resolution.id,
5010 frontier.display()
5011 );
5012 }
5013 return;
5014 }
5015
5016 let new_id = resolution.id.clone();
5017 project.resolutions.push(resolution);
5018 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5019
5020 if json {
5021 println!(
5022 "{}",
5023 serde_json::to_string_pretty(&json!({
5024 "ok": true,
5025 "command": "resolve",
5026 "id": new_id,
5027 "prediction": prediction_id,
5028 "matched": matched,
5029 "frontier": frontier.display().to_string(),
5030 }))
5031 .expect("serialize resolve result")
5032 );
5033 } else {
5034 println!();
5035 println!(
5036 " {}",
5037 format!("VELA · RESOLVE · {}", new_id)
5038 .to_uppercase()
5039 .dimmed()
5040 );
5041 println!(" {}", style::tick_row(60));
5042 println!(" prediction: {prediction_id}");
5043 println!(
5044 " matched: {}",
5045 if matched {
5046 style::ok("yes")
5047 } else {
5048 style::lost("no")
5049 }
5050 );
5051 println!(" by: {by}");
5052 println!(" outcome: {}", truncate(actual_outcome, 80));
5053 println!();
5054 println!(
5055 " {} resolution recorded in {}",
5056 style::ok("ok"),
5057 frontier.display()
5058 );
5059 }
5060}
5061
5062fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
5064 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5065
5066 let resolved_ids: std::collections::HashSet<&str> = project
5067 .resolutions
5068 .iter()
5069 .map(|r| r.prediction_id.as_str())
5070 .collect();
5071
5072 let mut filtered: Vec<&crate::bundle::Prediction> = project
5073 .predictions
5074 .iter()
5075 .filter(|p| by.is_none_or(|b| p.made_by == b))
5076 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
5077 .collect();
5078 filtered.sort_by(|a, b| {
5079 a.resolves_by
5080 .as_deref()
5081 .unwrap_or("9999")
5082 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
5083 });
5084
5085 if json {
5086 let payload: Vec<serde_json::Value> = filtered
5087 .iter()
5088 .map(|p| {
5089 json!({
5090 "id": p.id,
5091 "claim_text": p.claim_text,
5092 "made_by": p.made_by,
5093 "confidence": p.confidence,
5094 "predicted_at": p.predicted_at,
5095 "resolves_by": p.resolves_by,
5096 "expected_outcome": p.expected_outcome,
5097 "resolved": resolved_ids.contains(p.id.as_str()),
5098 })
5099 })
5100 .collect();
5101 println!(
5102 "{}",
5103 serde_json::to_string_pretty(&json!({
5104 "ok": true,
5105 "command": "predictions",
5106 "frontier": frontier.display().to_string(),
5107 "count": payload.len(),
5108 "predictions": payload,
5109 }))
5110 .expect("serialize predictions")
5111 );
5112 return;
5113 }
5114
5115 println!();
5116 println!(
5117 " {}",
5118 format!("VELA · PREDICTIONS · {}", frontier.display())
5119 .to_uppercase()
5120 .dimmed()
5121 );
5122 println!(" {}", style::tick_row(60));
5123 if filtered.is_empty() {
5124 println!(" (no predictions matching filters)");
5125 return;
5126 }
5127 for p in &filtered {
5128 let resolved = resolved_ids.contains(p.id.as_str());
5129 let chip = if resolved {
5130 style::ok("resolved")
5131 } else {
5132 style::warn("open")
5133 };
5134 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
5135 println!(
5136 " · {} {} by {} → {}",
5137 p.id.dimmed(),
5138 chip,
5139 p.made_by,
5140 deadline,
5141 );
5142 println!(" claim: {}", truncate(&p.claim_text, 90));
5143 println!(" confidence: {:.2}", p.confidence);
5144 }
5145}
5146
5147fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
5152 use chrono::DateTime;
5153
5154 let now_dt = match now_override {
5155 Some(s) => DateTime::parse_from_rfc3339(s)
5156 .map(|dt| dt.with_timezone(&chrono::Utc))
5157 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
5158 None => chrono::Utc::now(),
5159 };
5160
5161 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5162 if dry_run {
5163 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5165 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
5166 if json {
5167 println!(
5168 "{}",
5169 serde_json::to_string_pretty(&json!({
5170 "ok": true,
5171 "command": "predictions.expire",
5172 "dry_run": true,
5173 "report": report,
5174 }))
5175 .expect("serialize predictions.expire (dry-run)")
5176 );
5177 } else {
5178 println!(
5179 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
5180 style::ok("ok"),
5181 report.now,
5182 report.newly_expired.len(),
5183 report.already_expired.len(),
5184 report.already_resolved.len(),
5185 report.still_open.len(),
5186 );
5187 for id in &report.newly_expired {
5188 println!(" · {id}");
5189 }
5190 }
5191 return;
5192 }
5193
5194 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
5195 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5196
5197 if json {
5198 println!(
5199 "{}",
5200 serde_json::to_string_pretty(&json!({
5201 "ok": true,
5202 "command": "predictions.expire",
5203 "report": report,
5204 }))
5205 .expect("serialize predictions.expire")
5206 );
5207 } else {
5208 println!(
5209 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
5210 style::ok("expired"),
5211 report.now,
5212 report.newly_expired.len(),
5213 report.already_expired.len(),
5214 report.already_resolved.len(),
5215 report.still_open.len(),
5216 );
5217 for id in &report.newly_expired {
5218 println!(" · {id}");
5219 }
5220 }
5221}
5222
5223fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
5224 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5225 let records = match actor {
5226 Some(a) => {
5227 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
5228 .map(|r| vec![r])
5229 .unwrap_or_default()
5230 }
5231 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
5232 };
5233
5234 if json {
5235 println!(
5236 "{}",
5237 serde_json::to_string_pretty(&json!({
5238 "ok": true,
5239 "command": "calibration",
5240 "frontier": frontier.display().to_string(),
5241 "filter_actor": actor,
5242 "records": records,
5243 }))
5244 .expect("serialize calibration")
5245 );
5246 return;
5247 }
5248
5249 println!();
5250 println!(
5251 " {}",
5252 format!("VELA · CALIBRATION · {}", frontier.display())
5253 .to_uppercase()
5254 .dimmed()
5255 );
5256 println!(" {}", style::tick_row(60));
5257 if records.is_empty() {
5258 println!(" (no calibration records)");
5259 return;
5260 }
5261 for r in &records {
5262 println!(" · {}", r.actor);
5263 println!(
5264 " predictions: {} resolved: {} hits: {}",
5265 r.n_predictions, r.n_resolved, r.n_hit
5266 );
5267 match r.hit_rate {
5268 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
5269 None => println!(" hit rate: n/a"),
5270 }
5271 match r.brier_score {
5272 Some(b) => println!(
5273 " brier: {:.4} (lower is better; 0.25 = chance)",
5274 b
5275 ),
5276 None => println!(" brier: n/a"),
5277 }
5278 match r.log_score {
5279 Some(l) => println!(
5280 " log score: {:.4} (higher is better; 0 = perfect)",
5281 l
5282 ),
5283 None => println!(" log score: n/a"),
5284 }
5285 }
5286}
5287
5288#[allow(clippy::too_many_arguments)]
5290fn cmd_dataset_add(
5291 frontier: &Path,
5292 name: &str,
5293 version: Option<&str>,
5294 content_hash: &str,
5295 url: Option<&str>,
5296 license: Option<&str>,
5297 source_title: &str,
5298 doi: Option<&str>,
5299 row_count: Option<u64>,
5300 json: bool,
5301) {
5302 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5303
5304 let provenance = crate::bundle::Provenance {
5305 source_type: "data_release".to_string(),
5306 doi: doi.map(|s| s.to_string()),
5307 pmid: None,
5308 pmc: None,
5309 openalex_id: None,
5310 url: url.map(|s| s.to_string()),
5311 title: source_title.to_string(),
5312 authors: Vec::new(),
5313 year: None,
5314 journal: None,
5315 license: license.map(|s| s.to_string()),
5316 publisher: None,
5317 funders: Vec::new(),
5318 extraction: crate::bundle::Extraction {
5319 method: "manual_curation".to_string(),
5320 model: None,
5321 model_version: None,
5322 extracted_at: chrono::Utc::now().to_rfc3339(),
5323 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5324 },
5325 review: None,
5326 citation_count: None,
5327 };
5328
5329 let mut dataset = crate::bundle::Dataset::new(
5330 name.to_string(),
5331 version.map(|s| s.to_string()),
5332 content_hash.to_string(),
5333 url.map(|s| s.to_string()),
5334 license.map(|s| s.to_string()),
5335 provenance,
5336 );
5337 dataset.row_count = row_count;
5338
5339 if project.datasets.iter().any(|d| d.id == dataset.id) {
5340 if json {
5341 println!(
5342 "{}",
5343 serde_json::to_string_pretty(&json!({
5344 "ok": false,
5345 "command": "dataset.add",
5346 "reason": "dataset_already_exists",
5347 "id": dataset.id,
5348 }))
5349 .expect("serialize")
5350 );
5351 } else {
5352 println!(
5353 "{} dataset {} already exists in {}; skipping.",
5354 style::warn("dataset"),
5355 dataset.id,
5356 frontier.display()
5357 );
5358 }
5359 return;
5360 }
5361
5362 let new_id = dataset.id.clone();
5363 project.datasets.push(dataset);
5364 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5365
5366 if json {
5367 println!(
5368 "{}",
5369 serde_json::to_string_pretty(&json!({
5370 "ok": true,
5371 "command": "dataset.add",
5372 "id": new_id,
5373 "name": name,
5374 "version": version,
5375 "frontier": frontier.display().to_string(),
5376 }))
5377 .expect("failed to serialize dataset.add result")
5378 );
5379 } else {
5380 println!();
5381 println!(
5382 " {}",
5383 format!("VELA · DATASET · {}", new_id)
5384 .to_uppercase()
5385 .dimmed()
5386 );
5387 println!(" {}", style::tick_row(60));
5388 println!(" name: {name}");
5389 if let Some(v) = version {
5390 println!(" version: {v}");
5391 }
5392 println!(" content_hash: {content_hash}");
5393 if let Some(u) = url {
5394 println!(" url: {u}");
5395 }
5396 println!(" source: {source_title}");
5397 println!();
5398 println!(
5399 " {} dataset recorded in {}",
5400 style::ok("ok"),
5401 frontier.display()
5402 );
5403 }
5404}
5405
5406#[allow(clippy::too_many_arguments)]
5412fn cmd_negative_result_add(
5413 frontier: &Path,
5414 kind: &str,
5415 deposited_by: &str,
5416 reason: &str,
5417 conditions_text: &str,
5418 notes: &str,
5419 targets: Vec<String>,
5420 endpoint: Option<&str>,
5421 intervention: Option<&str>,
5422 comparator: Option<&str>,
5423 population: Option<&str>,
5424 n_enrolled: Option<u32>,
5425 power: Option<f64>,
5426 ci_lower: Option<f64>,
5427 ci_upper: Option<f64>,
5428 effect_size_threshold: Option<f64>,
5429 registry_id: Option<&str>,
5430 reagent: Option<&str>,
5431 observation: Option<&str>,
5432 attempts: Option<u32>,
5433 source_title: &str,
5434 doi: Option<&str>,
5435 url: Option<&str>,
5436 year: Option<i32>,
5437 json: bool,
5438) {
5439 let nr_kind = match kind {
5440 "registered_trial" => {
5441 let endpoint =
5442 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
5443 let intervention = intervention
5444 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
5445 let comparator = comparator
5446 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
5447 let population = population
5448 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
5449 let n_enrolled = n_enrolled
5450 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
5451 let power =
5452 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
5453 let ci_lower =
5454 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
5455 let ci_upper =
5456 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
5457 crate::bundle::NegativeResultKind::RegisteredTrial {
5458 endpoint: endpoint.to_string(),
5459 intervention: intervention.to_string(),
5460 comparator: comparator.to_string(),
5461 population: population.to_string(),
5462 n_enrolled,
5463 power,
5464 effect_size_ci: (ci_lower, ci_upper),
5465 effect_size_threshold,
5466 registry_id: registry_id.map(|s| s.to_string()),
5467 }
5468 }
5469 "exploratory" => {
5470 let reagent =
5471 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
5472 let observation = observation
5473 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
5474 let attempts =
5475 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
5476 crate::bundle::NegativeResultKind::Exploratory {
5477 reagent: reagent.to_string(),
5478 observation: observation.to_string(),
5479 attempts,
5480 }
5481 }
5482 other => fail_return(&format!(
5483 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
5484 )),
5485 };
5486
5487 let conditions = crate::bundle::Conditions {
5488 text: conditions_text.to_string(),
5489 species_verified: Vec::new(),
5490 species_unverified: Vec::new(),
5491 in_vitro: false,
5492 in_vivo: false,
5493 human_data: false,
5494 clinical_trial: matches!(kind, "registered_trial"),
5495 concentration_range: None,
5496 duration: None,
5497 age_group: None,
5498 cell_type: None,
5499 };
5500
5501 let provenance = crate::bundle::Provenance {
5502 source_type: if matches!(kind, "registered_trial") {
5503 "clinical_trial".to_string()
5504 } else {
5505 "lab_notebook".to_string()
5506 },
5507 doi: doi.map(|s| s.to_string()),
5508 pmid: None,
5509 pmc: None,
5510 openalex_id: None,
5511 url: url.map(|s| s.to_string()),
5512 title: source_title.to_string(),
5513 authors: Vec::new(),
5514 year,
5515 journal: None,
5516 license: None,
5517 publisher: None,
5518 funders: Vec::new(),
5519 extraction: crate::bundle::Extraction {
5520 method: "manual_curation".to_string(),
5521 model: None,
5522 model_version: None,
5523 extracted_at: chrono::Utc::now().to_rfc3339(),
5524 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5525 },
5526 review: None,
5527 citation_count: None,
5528 };
5529
5530 let report = state::add_negative_result(
5531 frontier,
5532 nr_kind,
5533 targets,
5534 deposited_by,
5535 conditions,
5536 provenance,
5537 notes,
5538 reason,
5539 )
5540 .unwrap_or_else(|e| fail_return(&e));
5541
5542 if json {
5543 println!(
5544 "{}",
5545 serde_json::to_string_pretty(&report).expect("serialize report")
5546 );
5547 } else {
5548 println!();
5549 println!(
5550 " {}",
5551 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5552 .to_uppercase()
5553 .dimmed()
5554 );
5555 println!(" {}", style::tick_row(60));
5556 println!(" kind: {kind}");
5557 println!(" deposited_by: {deposited_by}");
5558 if let Some(ev) = &report.applied_event_id {
5559 println!(" event: {ev}");
5560 }
5561 println!(
5562 " {} negative_result deposited in {}",
5563 style::ok("ok"),
5564 frontier.display()
5565 );
5566 }
5567}
5568
5569fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5572 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5573 let filtered: Vec<&crate::bundle::NegativeResult> = project
5574 .negative_results
5575 .iter()
5576 .filter(|nr| {
5577 target
5578 .map(|t| nr.target_findings.iter().any(|f| f == t))
5579 .unwrap_or(true)
5580 })
5581 .collect();
5582
5583 if json {
5584 println!(
5585 "{}",
5586 serde_json::to_string_pretty(&json!({
5587 "ok": true,
5588 "command": "negative_results",
5589 "frontier": frontier.display().to_string(),
5590 "count": filtered.len(),
5591 "negative_results": filtered,
5592 }))
5593 .expect("serialize negative_results")
5594 );
5595 return;
5596 }
5597
5598 if filtered.is_empty() {
5599 println!(" no negative_results in {}", frontier.display());
5600 return;
5601 }
5602
5603 println!();
5604 println!(
5605 " {} ({})",
5606 "VELA · NEGATIVE RESULTS".dimmed(),
5607 filtered.len()
5608 );
5609 println!(" {}", style::tick_row(60));
5610 for nr in &filtered {
5611 let kind_label = match &nr.kind {
5612 crate::bundle::NegativeResultKind::RegisteredTrial {
5613 endpoint, power, ..
5614 } => format!("trial · {endpoint} · power {power:.2}"),
5615 crate::bundle::NegativeResultKind::Exploratory {
5616 reagent, attempts, ..
5617 } => format!("exploratory · {reagent} · {attempts} attempts"),
5618 };
5619 let retracted = if nr.retracted { " [retracted]" } else { "" };
5620 let review = nr
5621 .review_state
5622 .as_ref()
5623 .map(|s| format!(" [{s:?}]"))
5624 .unwrap_or_default();
5625 println!(" {}{}{}", nr.id, retracted, review);
5626 println!(" {kind_label}");
5627 if !nr.target_findings.is_empty() {
5628 println!(" targets: {}", nr.target_findings.join(", "));
5629 }
5630 }
5631 println!();
5632}
5633
5634#[allow(clippy::too_many_arguments)]
5636fn cmd_tier_set(
5637 frontier: &Path,
5638 object_type: &str,
5639 object_id: &str,
5640 tier: &str,
5641 actor: &str,
5642 reason: &str,
5643 json: bool,
5644) {
5645 let parsed_tier =
5646 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5647 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5648 .unwrap_or_else(|e| fail_return(&e));
5649
5650 if json {
5651 println!(
5652 "{}",
5653 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5654 );
5655 } else {
5656 println!();
5657 println!(
5658 " {}",
5659 format!("VELA · TIER · {}", object_id)
5660 .to_uppercase()
5661 .dimmed()
5662 );
5663 println!(" {}", style::tick_row(60));
5664 println!(" object_type: {object_type}");
5665 println!(" new_tier: {}", parsed_tier.canonical());
5666 println!(" actor: {actor}");
5667 if let Some(ev) = &report.applied_event_id {
5668 println!(" event: {ev}");
5669 }
5670 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5671 }
5672}
5673
5674#[allow(clippy::too_many_arguments)]
5676fn cmd_trajectory_create(
5677 frontier: &Path,
5678 deposited_by: &str,
5679 reason: &str,
5680 targets: Vec<String>,
5681 notes: &str,
5682 json: bool,
5683) {
5684 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5685 .unwrap_or_else(|e| fail_return(&e));
5686
5687 if json {
5688 println!(
5689 "{}",
5690 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5691 );
5692 } else {
5693 println!();
5694 println!(
5695 " {}",
5696 format!("VELA · TRAJECTORY · {}", report.finding_id)
5697 .to_uppercase()
5698 .dimmed()
5699 );
5700 println!(" {}", style::tick_row(60));
5701 println!(" deposited_by: {deposited_by}");
5702 if let Some(ev) = &report.applied_event_id {
5703 println!(" event: {ev}");
5704 }
5705 println!(
5706 " {} trajectory opened in {}",
5707 style::ok("ok"),
5708 frontier.display()
5709 );
5710 }
5711}
5712
5713#[allow(clippy::too_many_arguments)]
5715fn cmd_trajectory_step(
5716 frontier: &Path,
5717 trajectory_id: &str,
5718 kind: &str,
5719 description: &str,
5720 actor: &str,
5721 reason: &str,
5722 references: Vec<String>,
5723 json: bool,
5724) {
5725 let parsed_kind = match kind {
5726 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5727 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5728 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5729 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5730 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5731 other => fail_return(&format!(
5732 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5733 )),
5734 };
5735 let report = state::append_trajectory_step(
5736 frontier,
5737 trajectory_id,
5738 parsed_kind,
5739 description,
5740 actor,
5741 references,
5742 reason,
5743 )
5744 .unwrap_or_else(|e| fail_return(&e));
5745
5746 if json {
5747 println!(
5748 "{}",
5749 serde_json::to_string_pretty(&report).expect("serialize step report")
5750 );
5751 } else {
5752 println!();
5753 println!(
5754 " {}",
5755 format!("VELA · STEP · {}", report.finding_id)
5756 .to_uppercase()
5757 .dimmed()
5758 );
5759 println!(" {}", style::tick_row(60));
5760 println!(" trajectory: {trajectory_id}");
5761 println!(" kind: {kind}");
5762 println!(" actor: {actor}");
5763 println!(
5764 " {} step appended in {}",
5765 style::ok("ok"),
5766 frontier.display()
5767 );
5768 }
5769}
5770
5771fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
5773 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5774 let filtered: Vec<&crate::bundle::Trajectory> = project
5775 .trajectories
5776 .iter()
5777 .filter(|t| {
5778 target
5779 .map(|tg| t.target_findings.iter().any(|f| f == tg))
5780 .unwrap_or(true)
5781 })
5782 .collect();
5783
5784 if json {
5785 println!(
5786 "{}",
5787 serde_json::to_string_pretty(&json!({
5788 "ok": true,
5789 "command": "trajectories",
5790 "frontier": frontier.display().to_string(),
5791 "count": filtered.len(),
5792 "trajectories": filtered,
5793 }))
5794 .expect("serialize trajectories")
5795 );
5796 return;
5797 }
5798
5799 if filtered.is_empty() {
5800 println!(" no trajectories in {}", frontier.display());
5801 return;
5802 }
5803
5804 println!();
5805 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
5806 println!(" {}", style::tick_row(60));
5807 for t in &filtered {
5808 let retracted = if t.retracted { " [retracted]" } else { "" };
5809 let review = t
5810 .review_state
5811 .as_ref()
5812 .map(|s| format!(" [{s:?}]"))
5813 .unwrap_or_default();
5814 println!(" {}{}{}", t.id, retracted, review);
5815 println!(
5816 " {} step(s){}",
5817 t.steps.len(),
5818 if t.target_findings.is_empty() {
5819 String::new()
5820 } else {
5821 format!(" · targets: {}", t.target_findings.join(", "))
5822 }
5823 );
5824 for step in &t.steps {
5825 let label = match step.kind {
5826 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
5827 crate::bundle::TrajectoryStepKind::Tried => "tried",
5828 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
5829 crate::bundle::TrajectoryStepKind::Observed => "observed",
5830 crate::bundle::TrajectoryStepKind::Refined => "refined",
5831 };
5832 let preview: String = step.description.chars().take(80).collect();
5833 println!(" [{label}] {preview}");
5834 }
5835 }
5836 println!();
5837}
5838
5839fn cmd_datasets(frontier: &Path, json: bool) {
5841 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5842 if json {
5843 println!(
5844 "{}",
5845 serde_json::to_string_pretty(&json!({
5846 "ok": true,
5847 "command": "datasets",
5848 "frontier": frontier.display().to_string(),
5849 "count": project.datasets.len(),
5850 "datasets": project.datasets,
5851 }))
5852 .expect("serialize datasets")
5853 );
5854 return;
5855 }
5856 println!();
5857 println!(
5858 " {}",
5859 format!("VELA · DATASETS · {}", frontier.display())
5860 .to_uppercase()
5861 .dimmed()
5862 );
5863 println!(" {}", style::tick_row(60));
5864 if project.datasets.is_empty() {
5865 println!(" (no datasets registered)");
5866 return;
5867 }
5868 for ds in &project.datasets {
5869 let v = ds
5870 .version
5871 .as_deref()
5872 .map(|s| format!("@{s}"))
5873 .unwrap_or_default();
5874 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
5875 if let Some(u) = &ds.url {
5876 println!(" url: {}", truncate(u, 80));
5877 }
5878 println!(" hash: {}", truncate(&ds.content_hash, 80));
5879 }
5880}
5881
5882#[allow(clippy::too_many_arguments)]
5884fn cmd_code_add(
5885 frontier: &Path,
5886 language: &str,
5887 repo_url: Option<&str>,
5888 commit: Option<&str>,
5889 path: &str,
5890 content_hash: &str,
5891 line_start: Option<u32>,
5892 line_end: Option<u32>,
5893 entry_point: Option<&str>,
5894 json: bool,
5895) {
5896 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5897
5898 let line_range = match (line_start, line_end) {
5899 (Some(a), Some(b)) => Some((a, b)),
5900 (Some(a), None) => Some((a, a)),
5901 _ => None,
5902 };
5903
5904 let artifact = crate::bundle::CodeArtifact::new(
5905 language.to_string(),
5906 repo_url.map(|s| s.to_string()),
5907 commit.map(|s| s.to_string()),
5908 path.to_string(),
5909 line_range,
5910 content_hash.to_string(),
5911 entry_point.map(|s| s.to_string()),
5912 );
5913
5914 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
5915 if json {
5916 println!(
5917 "{}",
5918 serde_json::to_string_pretty(&json!({
5919 "ok": false,
5920 "command": "code.add",
5921 "reason": "artifact_already_exists",
5922 "id": artifact.id,
5923 }))
5924 .expect("serialize")
5925 );
5926 } else {
5927 println!(
5928 "{} code artifact {} already exists in {}; skipping.",
5929 style::warn("code"),
5930 artifact.id,
5931 frontier.display()
5932 );
5933 }
5934 return;
5935 }
5936
5937 let new_id = artifact.id.clone();
5938 project.code_artifacts.push(artifact);
5939 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5940
5941 if json {
5942 println!(
5943 "{}",
5944 serde_json::to_string_pretty(&json!({
5945 "ok": true,
5946 "command": "code.add",
5947 "id": new_id,
5948 "language": language,
5949 "path": path,
5950 "frontier": frontier.display().to_string(),
5951 }))
5952 .expect("failed to serialize code.add result")
5953 );
5954 } else {
5955 println!();
5956 println!(
5957 " {}",
5958 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
5959 );
5960 println!(" {}", style::tick_row(60));
5961 println!(" language: {language}");
5962 if let Some(r) = repo_url {
5963 println!(" repo: {r}");
5964 }
5965 if let Some(c) = commit {
5966 println!(" commit: {c}");
5967 }
5968 println!(" path: {path}");
5969 if let Some((a, b)) = line_range {
5970 println!(" lines: {a}-{b}");
5971 }
5972 println!(" content_hash: {content_hash}");
5973 println!();
5974 println!(
5975 " {} code artifact recorded in {}",
5976 style::ok("ok"),
5977 frontier.display()
5978 );
5979 }
5980}
5981
5982fn cmd_code_artifacts(frontier: &Path, json: bool) {
5984 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5985 if json {
5986 println!(
5987 "{}",
5988 serde_json::to_string_pretty(&json!({
5989 "ok": true,
5990 "command": "code-artifacts",
5991 "frontier": frontier.display().to_string(),
5992 "count": project.code_artifacts.len(),
5993 "code_artifacts": project.code_artifacts,
5994 }))
5995 .expect("serialize code-artifacts")
5996 );
5997 return;
5998 }
5999 println!();
6000 println!(
6001 " {}",
6002 format!("VELA · CODE · {}", frontier.display())
6003 .to_uppercase()
6004 .dimmed()
6005 );
6006 println!(" {}", style::tick_row(60));
6007 if project.code_artifacts.is_empty() {
6008 println!(" (no code artifacts registered)");
6009 return;
6010 }
6011 for c in &project.code_artifacts {
6012 let lr = c
6013 .line_range
6014 .map(|(a, b)| format!(":{a}-{b}"))
6015 .unwrap_or_default();
6016 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
6017 if let Some(r) = &c.repo_url {
6018 println!(" repo: {}", truncate(r, 80));
6019 }
6020 if let Some(g) = &c.git_commit {
6021 println!(" commit: {g}");
6022 }
6023 }
6024}
6025
6026fn sha256_for_bytes(bytes: &[u8]) -> String {
6027 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
6028}
6029
6030fn sha256_hex_part(content_hash: &str) -> &str {
6031 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
6032}
6033
6034fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
6035 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
6036 return None;
6037 };
6038 let hex = sha256_hex_part(content_hash);
6039 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
6040 let path = root.join(&rel);
6041 if let Some(parent) = path.parent() {
6042 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
6043 fail(&format!(
6044 "Failed to create artifact blob directory {}: {e}",
6045 parent.display()
6046 ))
6047 });
6048 }
6049 if !path.is_file() {
6050 std::fs::write(&path, bytes)
6051 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
6052 }
6053 Some(rel)
6054}
6055
6056fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
6057 let mut out = BTreeMap::new();
6058 for pair in pairs {
6059 let Some((key, value)) = pair.split_once('=') else {
6060 fail(&format!("--metadata must be key=value, got {pair:?}"));
6061 };
6062 let key = key.trim();
6063 if key.is_empty() {
6064 fail("--metadata key must be non-empty");
6065 }
6066 out.insert(key.to_string(), Value::String(value.trim().to_string()));
6067 }
6068 out
6069}
6070
6071fn artifact_source_type(kind: &str) -> &'static str {
6072 match kind {
6073 "clinical_trial_record" | "protocol" => "clinical_trial",
6074 "dataset" => "data_release",
6075 "model_output" => "model_output",
6076 "registry_record" => "database_record",
6077 "lab_file" => "lab_notebook",
6078 _ => "database_record",
6079 }
6080}
6081
6082fn artifact_provenance(
6083 kind: &str,
6084 title: &str,
6085 url: Option<&str>,
6086 doi: Option<&str>,
6087 license: Option<&str>,
6088) -> crate::bundle::Provenance {
6089 crate::bundle::Provenance {
6090 source_type: artifact_source_type(kind).to_string(),
6091 doi: doi.map(str::to_string),
6092 pmid: None,
6093 pmc: None,
6094 openalex_id: None,
6095 url: url.map(str::to_string),
6096 title: title.to_string(),
6097 authors: Vec::new(),
6098 year: None,
6099 journal: None,
6100 license: license.map(str::to_string),
6101 publisher: None,
6102 funders: Vec::new(),
6103 extraction: crate::bundle::Extraction {
6104 method: "artifact_deposit".to_string(),
6105 model: None,
6106 model_version: None,
6107 extracted_at: chrono::Utc::now().to_rfc3339(),
6108 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6109 },
6110 review: None,
6111 citation_count: None,
6112 }
6113}
6114
6115#[allow(clippy::too_many_arguments)]
6116fn cmd_artifact_add(
6117 frontier: &Path,
6118 kind: &str,
6119 name: &str,
6120 file: Option<&Path>,
6121 url: Option<&str>,
6122 content_hash: Option<&str>,
6123 media_type: Option<&str>,
6124 license: Option<&str>,
6125 source_title: Option<&str>,
6126 source_url: Option<&str>,
6127 doi: Option<&str>,
6128 target: Vec<String>,
6129 metadata: Vec<String>,
6130 access_tier: &str,
6131 deposited_by: &str,
6132 reason: &str,
6133 json_out: bool,
6134) {
6135 let tier =
6136 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
6137 let mut size_bytes = None;
6138 let mut storage_mode = "pointer".to_string();
6139 let mut locator = url.map(str::to_string);
6140 let mut computed_hash = content_hash.map(str::to_string);
6141
6142 if let Some(path) = file {
6143 let bytes = std::fs::read(path)
6144 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
6145 let actual_hash = sha256_for_bytes(&bytes);
6146 if let Some(expected) = content_hash {
6147 let expected_hex = sha256_hex_part(expected);
6148 let actual_hex = sha256_hex_part(&actual_hash);
6149 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
6150 fail(&format!(
6151 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
6152 ));
6153 }
6154 }
6155 size_bytes = Some(bytes.len() as u64);
6156 computed_hash = Some(actual_hash.clone());
6157 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
6158 storage_mode = "local_blob".to_string();
6159 locator = Some(rel);
6160 } else {
6161 storage_mode = "local_file".to_string();
6162 locator = Some(path.display().to_string());
6163 }
6164 }
6165
6166 let Some(content_hash) = computed_hash else {
6167 fail("Provide --content-hash unless --file is present.");
6168 };
6169 let content_hash_for_print = content_hash.clone();
6170 if file.is_none() && url.is_some() {
6171 storage_mode = "remote".to_string();
6172 }
6173
6174 let source_url_effective = source_url.or(url);
6175 let source_title = source_title.unwrap_or(name);
6176 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
6177 let metadata = parse_metadata_pairs(metadata);
6178 let artifact = crate::bundle::Artifact::new(
6179 kind.to_string(),
6180 name.to_string(),
6181 content_hash,
6182 size_bytes,
6183 media_type.map(str::to_string),
6184 storage_mode,
6185 locator,
6186 source_url_effective.map(str::to_string),
6187 license.map(str::to_string),
6188 target,
6189 provenance,
6190 metadata,
6191 tier,
6192 )
6193 .unwrap_or_else(|e| fail_return(&e));
6194
6195 let artifact_id = artifact.id.clone();
6196 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6197 .unwrap_or_else(|e| fail_return(&e));
6198
6199 if json_out {
6200 println!(
6201 "{}",
6202 serde_json::to_string_pretty(&json!({
6203 "ok": true,
6204 "command": "artifact.add",
6205 "id": artifact_id,
6206 "frontier": frontier.display().to_string(),
6207 "event": report.applied_event_id,
6208 }))
6209 .expect("serialize artifact.add")
6210 );
6211 } else {
6212 println!();
6213 println!(
6214 " {}",
6215 format!("VELA · ARTIFACT · {}", artifact_id)
6216 .to_uppercase()
6217 .dimmed()
6218 );
6219 println!(" {}", style::tick_row(60));
6220 println!(" kind: {kind}");
6221 println!(" name: {name}");
6222 println!(" hash: {content_hash_for_print}");
6223 println!(
6224 " {} artifact recorded in {}",
6225 style::ok("ok"),
6226 frontier.display()
6227 );
6228 }
6229}
6230
6231fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
6232 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6233 let filtered: Vec<&crate::bundle::Artifact> = project
6234 .artifacts
6235 .iter()
6236 .filter(|artifact| {
6237 target
6238 .map(|t| artifact.target_findings.iter().any(|f| f == t))
6239 .unwrap_or(true)
6240 })
6241 .collect();
6242
6243 if json_out {
6244 println!(
6245 "{}",
6246 serde_json::to_string_pretty(&json!({
6247 "ok": true,
6248 "command": "artifacts",
6249 "frontier": frontier.display().to_string(),
6250 "count": filtered.len(),
6251 "artifacts": filtered,
6252 }))
6253 .expect("serialize artifacts")
6254 );
6255 return;
6256 }
6257
6258 println!();
6259 println!(
6260 " {}",
6261 format!("VELA · ARTIFACTS · {}", frontier.display())
6262 .to_uppercase()
6263 .dimmed()
6264 );
6265 println!(" {}", style::tick_row(60));
6266 if filtered.is_empty() {
6267 println!(" (no artifacts registered)");
6268 return;
6269 }
6270 for artifact in filtered {
6271 println!(
6272 " · {} {} · {}",
6273 artifact.id.dimmed(),
6274 artifact.kind,
6275 artifact.name
6276 );
6277 if let Some(locator) = &artifact.locator {
6278 println!(" locator: {}", truncate(locator, 88));
6279 }
6280 if !artifact.target_findings.is_empty() {
6281 println!(" targets: {}", artifact.target_findings.join(", "));
6282 }
6283 }
6284}
6285
6286fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
6287 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6288 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
6289 if json_out {
6290 println!(
6291 "{}",
6292 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
6293 );
6294 if !audit.ok {
6295 std::process::exit(1);
6296 }
6297 return;
6298 }
6299
6300 println!();
6301 println!(
6302 " {}",
6303 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
6304 .to_uppercase()
6305 .dimmed()
6306 );
6307 println!(" {}", style::tick_row(60));
6308 println!(" artifacts: {}", audit.artifact_count);
6309 println!(" checked local blobs: {}", audit.checked_local_blobs);
6310 println!(" local blob bytes: {}", audit.local_blob_bytes);
6311 if !audit.by_kind.is_empty() {
6312 let kinds = audit
6313 .by_kind
6314 .iter()
6315 .map(|(kind, count)| format!("{kind}:{count}"))
6316 .collect::<Vec<_>>()
6317 .join(", ");
6318 println!(" kinds: {kinds}");
6319 }
6320 if audit.ok {
6321 println!(" {} artifact audit passed.", style::ok("ok"));
6322 return;
6323 }
6324 for issue in &audit.issues {
6325 println!(
6326 " {} {} {}: {}",
6327 style::lost("invalid"),
6328 issue.id,
6329 issue.field,
6330 issue.message
6331 );
6332 }
6333 std::process::exit(1);
6334}
6335
6336fn cmd_decision_brief(frontier: &Path, json_out: bool) {
6337 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6338 let report = decision::load_decision_brief(frontier, &project);
6339 if json_out {
6340 println!(
6341 "{}",
6342 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
6343 );
6344 if !report.ok {
6345 std::process::exit(1);
6346 }
6347 return;
6348 }
6349 println!();
6350 println!(
6351 " {}",
6352 format!("VELA · DECISION BRIEF · {}", project.project.name)
6353 .to_uppercase()
6354 .dimmed()
6355 );
6356 println!(" {}", style::tick_row(60));
6357 if !report.ok {
6358 print_projection_issues(&report.issues, report.error.as_deref());
6359 std::process::exit(1);
6360 }
6361 let brief = report
6362 .projection
6363 .as_ref()
6364 .expect("ok decision report carries projection");
6365 for question in &brief.questions {
6366 println!(" · {} · {}", question.id.dimmed(), question.title);
6367 println!(" answer: {}", wrap_line(&question.short_answer, 82));
6368 println!(" caveat: {}", wrap_line(&question.caveat, 82));
6369 println!(" support: {}", question.supporting_findings.join(", "));
6370 if !question.tension_findings.is_empty() {
6371 println!(" tensions: {}", question.tension_findings.join(", "));
6372 }
6373 if !question.gap_findings.is_empty() {
6374 println!(" gaps: {}", question.gap_findings.join(", "));
6375 }
6376 if !question.artifact_ids.is_empty() {
6377 println!(" artifacts: {}", question.artifact_ids.join(", "));
6378 }
6379 println!(
6380 " would change: {}",
6381 wrap_line(&question.what_would_change_this_answer, 82)
6382 );
6383 }
6384}
6385
6386fn cmd_trial_summary(frontier: &Path, json_out: bool) {
6387 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6388 let report = decision::load_trial_outcomes(frontier, &project);
6389 if json_out {
6390 println!(
6391 "{}",
6392 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
6393 );
6394 if !report.ok {
6395 std::process::exit(1);
6396 }
6397 return;
6398 }
6399 println!();
6400 println!(
6401 " {}",
6402 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
6403 .to_uppercase()
6404 .dimmed()
6405 );
6406 println!(" {}", style::tick_row(60));
6407 if !report.ok {
6408 print_projection_issues(&report.issues, report.error.as_deref());
6409 std::process::exit(1);
6410 }
6411 let outcomes = report
6412 .projection
6413 .as_ref()
6414 .expect("ok trial report carries projection");
6415 for row in &outcomes.rows {
6416 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
6417 println!(" population: {}", wrap_line(&row.population, 82));
6418 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
6419 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
6420 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
6421 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
6422 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
6423 if !row.finding_ids.is_empty() {
6424 println!(" findings: {}", row.finding_ids.join(", "));
6425 }
6426 if !row.artifact_ids.is_empty() {
6427 println!(" artifacts: {}", row.artifact_ids.join(", "));
6428 }
6429 }
6430}
6431
6432fn cmd_source_verification(frontier: &Path, json_out: bool) {
6433 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6434 let report = decision::load_source_verification(frontier, &project);
6435 if json_out {
6436 println!(
6437 "{}",
6438 serde_json::to_string_pretty(&report).expect("serialize source verification report")
6439 );
6440 if !report.ok {
6441 std::process::exit(1);
6442 }
6443 return;
6444 }
6445 println!();
6446 println!(
6447 " {}",
6448 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
6449 .to_uppercase()
6450 .dimmed()
6451 );
6452 println!(" {}", style::tick_row(60));
6453 if !report.ok {
6454 print_projection_issues(&report.issues, report.error.as_deref());
6455 std::process::exit(1);
6456 }
6457 let verification = report
6458 .projection
6459 .as_ref()
6460 .expect("ok source verification report carries projection");
6461 println!(" verified_at: {}", verification.verified_at);
6462 for source in &verification.sources {
6463 println!(" · {} · {}", source.id.dimmed(), source.title);
6464 println!(" agency: {}", source.agency);
6465 println!(" url: {}", truncate(&source.url, 88));
6466 println!(" status: {}", wrap_line(&source.current_status, 82));
6467 }
6468}
6469
6470fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
6471 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6472 let report = decision::load_source_ingest_plan(frontier, &project);
6473 if json_out {
6474 println!(
6475 "{}",
6476 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
6477 );
6478 if !report.ok {
6479 std::process::exit(1);
6480 }
6481 return;
6482 }
6483 println!();
6484 println!(
6485 " {}",
6486 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6487 .to_uppercase()
6488 .dimmed()
6489 );
6490 println!(" {}", style::tick_row(60));
6491 if !report.ok {
6492 print_projection_issues(&report.issues, report.error.as_deref());
6493 std::process::exit(1);
6494 }
6495 let plan = report
6496 .projection
6497 .as_ref()
6498 .expect("ok source ingest plan report carries projection");
6499 println!(" verified_at: {}", plan.verified_at);
6500 println!(" entries: {}", plan.entries.len());
6501 for entry in &plan.entries {
6502 println!(
6503 " · {} · {} · {} · {}",
6504 entry.id.dimmed(),
6505 entry.category,
6506 entry.priority,
6507 entry.ingest_status
6508 );
6509 println!(" name: {}", wrap_line(&entry.name, 82));
6510 println!(" locator: {}", truncate(&entry.locator, 88));
6511 println!(" use: {}", wrap_line(&entry.target_use, 82));
6512 if let Some(id) = &entry.current_frontier_artifact_id {
6513 println!(" artifact: {id}");
6514 }
6515 if !entry.target_findings.is_empty() {
6516 println!(" findings: {}", entry.target_findings.join(", "));
6517 }
6518 }
6519}
6520
6521fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6522 if let Some(error) = error {
6523 println!(" {} {error}", style::lost("unavailable"));
6524 }
6525 for issue in issues {
6526 println!(
6527 " {} {}: {}",
6528 style::lost("invalid"),
6529 issue.path,
6530 issue.message
6531 );
6532 }
6533}
6534
6535fn wrap_line(text: &str, max_chars: usize) -> String {
6536 if text.chars().count() <= max_chars {
6537 return text.to_string();
6538 }
6539 let mut out = String::new();
6540 let mut line_len = 0usize;
6541 for word in text.split_whitespace() {
6542 let word_len = word.chars().count();
6543 if line_len > 0 && line_len + 1 + word_len > max_chars {
6544 out.push('\n');
6545 out.push_str(" ");
6546 out.push_str(word);
6547 line_len = word_len;
6548 } else {
6549 if line_len > 0 {
6550 out.push(' ');
6551 line_len += 1;
6552 }
6553 out.push_str(word);
6554 line_len += word_len;
6555 }
6556 }
6557 out
6558}
6559
6560fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6561 study.pointer(pointer).and_then(Value::as_str)
6562}
6563
6564fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6565 study
6566 .pointer(pointer)
6567 .and_then(Value::as_array)
6568 .map(|items| {
6569 items
6570 .iter()
6571 .filter_map(Value::as_str)
6572 .map(str::to_string)
6573 .collect()
6574 })
6575 .unwrap_or_default()
6576}
6577
6578fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6579 study
6580 .pointer(pointer)
6581 .and_then(Value::as_array)
6582 .map(|items| {
6583 items
6584 .iter()
6585 .filter_map(|item| item.get(field).and_then(Value::as_str))
6586 .map(str::to_string)
6587 .collect()
6588 })
6589 .unwrap_or_default()
6590}
6591
6592fn insert_string_vec_metadata(
6593 metadata: &mut BTreeMap<String, Value>,
6594 key: &str,
6595 values: Vec<String>,
6596) {
6597 if values.is_empty() {
6598 return;
6599 }
6600 metadata.insert(
6601 key.to_string(),
6602 Value::Array(values.into_iter().map(Value::String).collect()),
6603 );
6604}
6605
6606async fn cmd_clinical_trial_import(
6607 frontier: &Path,
6608 nct_id: &str,
6609 input_json: Option<&Path>,
6610 target: Vec<String>,
6611 deposited_by: &str,
6612 reason: &str,
6613 license: &str,
6614 json_out: bool,
6615) {
6616 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6617 let raw = if let Some(path) = input_json {
6618 std::fs::read_to_string(path)
6619 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6620 } else {
6621 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6622 fail(&format!(
6623 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6624 ))
6625 });
6626 let response = response.error_for_status().unwrap_or_else(|e| {
6627 fail(&format!(
6628 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6629 ))
6630 });
6631 response.text().await.unwrap_or_else(|e| {
6632 fail(&format!(
6633 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6634 ))
6635 })
6636 };
6637 let study: Value = serde_json::from_str(&raw)
6638 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6639 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6640 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6641 let content_hash = sha256_for_bytes(&canonical_bytes);
6642 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6643 .unwrap_or_else(|| api_url.clone());
6644 let storage_mode = if locator.starts_with(".vela/") {
6645 "local_blob"
6646 } else {
6647 "remote"
6648 };
6649
6650 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6651 .unwrap_or(nct_id)
6652 .to_string();
6653 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6654 .or_else(|| {
6655 clinical_str(
6656 &study,
6657 "/protocolSection/identificationModule/officialTitle",
6658 )
6659 })
6660 .unwrap_or(nct_id);
6661 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6662 let mut metadata = BTreeMap::new();
6663 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6664 metadata.insert(
6665 "source_api".to_string(),
6666 Value::String("clinicaltrials.gov-v2".to_string()),
6667 );
6668 metadata.insert(
6669 "retrieved_at".to_string(),
6670 Value::String(chrono::Utc::now().to_rfc3339()),
6671 );
6672 for (key, pointer) in [
6673 (
6674 "overall_status",
6675 "/protocolSection/statusModule/overallStatus",
6676 ),
6677 (
6678 "start_date",
6679 "/protocolSection/statusModule/startDateStruct/date",
6680 ),
6681 (
6682 "completion_date",
6683 "/protocolSection/statusModule/completionDateStruct/date",
6684 ),
6685 ] {
6686 if let Some(value) = clinical_str(&study, pointer) {
6687 metadata.insert(key.to_string(), Value::String(value.to_string()));
6688 }
6689 }
6690 insert_string_vec_metadata(
6691 &mut metadata,
6692 "phases",
6693 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6694 );
6695 insert_string_vec_metadata(
6696 &mut metadata,
6697 "conditions",
6698 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6699 );
6700 insert_string_vec_metadata(
6701 &mut metadata,
6702 "interventions",
6703 clinical_named_array(
6704 &study,
6705 "/protocolSection/armsInterventionsModule/interventions",
6706 "name",
6707 ),
6708 );
6709 insert_string_vec_metadata(
6710 &mut metadata,
6711 "primary_outcomes",
6712 clinical_named_array(
6713 &study,
6714 "/protocolSection/outcomesModule/primaryOutcomes",
6715 "measure",
6716 ),
6717 );
6718 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6719 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6720 }
6721
6722 let provenance = artifact_provenance(
6723 "clinical_trial_record",
6724 title,
6725 Some(&public_url),
6726 None,
6727 Some(license),
6728 );
6729 let artifact = crate::bundle::Artifact::new(
6730 "clinical_trial_record",
6731 title.to_string(),
6732 content_hash,
6733 Some(canonical_bytes.len() as u64),
6734 Some("application/json".to_string()),
6735 storage_mode.to_string(),
6736 Some(locator),
6737 Some(public_url.clone()),
6738 Some(license.to_string()),
6739 target,
6740 provenance,
6741 metadata,
6742 crate::access_tier::AccessTier::Public,
6743 )
6744 .unwrap_or_else(|e| fail_return(&e));
6745 let artifact_id = artifact.id.clone();
6746 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6747 .unwrap_or_else(|e| fail_return(&e));
6748
6749 if json_out {
6750 println!(
6751 "{}",
6752 serde_json::to_string_pretty(&json!({
6753 "ok": true,
6754 "command": "clinical-trial-import",
6755 "nct_id": parsed_nct,
6756 "id": artifact_id,
6757 "frontier": frontier.display().to_string(),
6758 "event": report.applied_event_id,
6759 "source_url": public_url,
6760 }))
6761 .expect("serialize clinical-trial-import")
6762 );
6763 } else {
6764 println!();
6765 println!(
6766 " {}",
6767 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
6768 .to_uppercase()
6769 .dimmed()
6770 );
6771 println!(" {}", style::tick_row(60));
6772 println!(" nct_id: {parsed_nct}");
6773 println!(" title: {}", truncate(title, 96));
6774 println!(" source: {public_url}");
6775 println!(
6776 " {} trial record imported into {}",
6777 style::ok("ok"),
6778 frontier.display()
6779 );
6780 }
6781}
6782
6783#[allow(clippy::too_many_arguments)]
6790fn cmd_replicate(
6791 frontier: &Path,
6792 target: &str,
6793 outcome: &str,
6794 attempted_by: &str,
6795 conditions_text: &str,
6796 source_title: &str,
6797 doi: Option<&str>,
6798 pmid: Option<&str>,
6799 sample_size: Option<&str>,
6800 note: &str,
6801 previous_attempt: Option<&str>,
6802 no_cascade: bool,
6803 json: bool,
6804) {
6805 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
6806 fail(&format!(
6807 "invalid outcome '{outcome}'; valid: {:?}",
6808 crate::bundle::VALID_REPLICATION_OUTCOMES
6809 ));
6810 }
6811 if !target.starts_with("vf_") {
6812 fail(&format!("target '{target}' is not a vf_ finding id"));
6813 }
6814
6815 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6816
6817 if !project.findings.iter().any(|f| f.id == target) {
6818 fail(&format!(
6819 "target finding '{target}' not present in frontier '{}'",
6820 frontier.display()
6821 ));
6822 }
6823
6824 let lower = conditions_text.to_lowercase();
6829 let conditions = crate::bundle::Conditions {
6830 text: conditions_text.to_string(),
6831 species_verified: Vec::new(),
6832 species_unverified: Vec::new(),
6833 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
6834 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
6835 human_data: lower.contains("human")
6836 || lower.contains("clinical")
6837 || lower.contains("patient"),
6838 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
6839 concentration_range: None,
6840 duration: None,
6841 age_group: None,
6842 cell_type: None,
6843 };
6844
6845 let evidence = crate::bundle::Evidence {
6846 evidence_type: "experimental".to_string(),
6847 model_system: String::new(),
6848 species: None,
6849 method: "replication_attempt".to_string(),
6850 sample_size: sample_size.map(|s| s.to_string()),
6851 effect_size: None,
6852 p_value: None,
6853 replicated: outcome == "replicated",
6854 replication_count: None,
6855 evidence_spans: Vec::new(),
6856 };
6857
6858 let provenance = crate::bundle::Provenance {
6859 source_type: "published_paper".to_string(),
6860 doi: doi.map(|s| s.to_string()),
6861 pmid: pmid.map(|s| s.to_string()),
6862 pmc: None,
6863 openalex_id: None,
6864 url: None,
6865 title: source_title.to_string(),
6866 authors: Vec::new(),
6867 year: None,
6868 journal: None,
6869 license: None,
6870 publisher: None,
6871 funders: Vec::new(),
6872 extraction: crate::bundle::Extraction {
6873 method: "manual_curation".to_string(),
6874 model: None,
6875 model_version: None,
6876 extracted_at: chrono::Utc::now().to_rfc3339(),
6877 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6878 },
6879 review: None,
6880 citation_count: None,
6881 };
6882
6883 let mut rep = crate::bundle::Replication::new(
6884 target.to_string(),
6885 attempted_by.to_string(),
6886 outcome.to_string(),
6887 evidence,
6888 conditions,
6889 provenance,
6890 note.to_string(),
6891 );
6892 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
6893
6894 if project.replications.iter().any(|r| r.id == rep.id) {
6897 if json {
6898 println!(
6899 "{}",
6900 serde_json::to_string_pretty(&json!({
6901 "ok": false,
6902 "command": "replicate",
6903 "reason": "replication_already_exists",
6904 "id": rep.id,
6905 }))
6906 .expect("serialize")
6907 );
6908 } else {
6909 println!(
6910 "{} replication {} already exists in {}; skipping.",
6911 style::warn("replicate"),
6912 rep.id,
6913 frontier.display()
6914 );
6915 }
6916 return;
6917 }
6918
6919 let new_id = rep.id.clone();
6920 project.replications.push(rep);
6921
6922 let cascade_result = if no_cascade {
6929 None
6930 } else {
6931 let result = propagate::propagate_correction(
6932 &mut project,
6933 target,
6934 propagate::PropagationAction::ReplicationOutcome {
6935 outcome: outcome.to_string(),
6936 vrep_id: new_id.clone(),
6937 },
6938 );
6939 project.review_events.extend(result.events.clone());
6942 project::recompute_stats(&mut project);
6943 Some(result)
6944 };
6945
6946 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6947
6948 if json {
6949 let cascade_json = cascade_result.as_ref().map(|r| {
6950 json!({
6951 "affected": r.affected,
6952 "events": r.events.len(),
6953 })
6954 });
6955 println!(
6956 "{}",
6957 serde_json::to_string_pretty(&json!({
6958 "ok": true,
6959 "command": "replicate",
6960 "id": new_id,
6961 "target": target,
6962 "outcome": outcome,
6963 "attempted_by": attempted_by,
6964 "cascade": cascade_json,
6965 "frontier": frontier.display().to_string(),
6966 }))
6967 .expect("failed to serialize replicate result")
6968 );
6969 } else {
6970 println!();
6971 println!(
6972 " {}",
6973 format!("VELA · REPLICATE · {}", new_id)
6974 .to_uppercase()
6975 .dimmed()
6976 );
6977 println!(" {}", style::tick_row(60));
6978 println!(" target: {target}");
6979 println!(" outcome: {outcome}");
6980 println!(" attempted by: {attempted_by}");
6981 println!(" conditions: {conditions_text}");
6982 println!(" source: {source_title}");
6983 if let Some(d) = doi {
6984 println!(" doi: {d}");
6985 }
6986 println!();
6987 println!(
6988 " {} replication recorded in {}",
6989 style::ok("ok"),
6990 frontier.display()
6991 );
6992 if let Some(result) = cascade_result {
6993 println!(
6994 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
6995 style::ok("ok"),
6996 result.affected,
6997 result.events.len()
6998 );
6999 } else {
7000 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
7001 }
7002 }
7003}
7004
7005fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
7007 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7008 let filtered: Vec<&crate::bundle::Replication> = project
7009 .replications
7010 .iter()
7011 .filter(|r| target.is_none_or(|t| r.target_finding == t))
7012 .collect();
7013
7014 if json {
7015 let payload = json!({
7016 "ok": true,
7017 "command": "replications",
7018 "frontier": frontier.display().to_string(),
7019 "filter_target": target,
7020 "count": filtered.len(),
7021 "replications": filtered,
7022 });
7023 println!(
7024 "{}",
7025 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
7026 );
7027 return;
7028 }
7029
7030 println!();
7031 let header = match target {
7032 Some(t) => format!("VELA · REPLICATIONS · {t}"),
7033 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
7034 };
7035 println!(" {}", header.to_uppercase().dimmed());
7036 println!(" {}", style::tick_row(60));
7037 if filtered.is_empty() {
7038 println!(" (no replications recorded)");
7039 return;
7040 }
7041 for rep in &filtered {
7042 let outcome_chip = match rep.outcome.as_str() {
7043 "replicated" => style::ok(&rep.outcome),
7044 "failed" => style::lost(&rep.outcome),
7045 "partial" => style::warn(&rep.outcome),
7046 _ => rep.outcome.clone().normal().to_string(),
7047 };
7048 println!(
7049 " · {} {} by {}",
7050 rep.id.dimmed(),
7051 outcome_chip,
7052 rep.attempted_by
7053 );
7054 println!(" target: {}", rep.target_finding);
7055 if !rep.conditions.text.is_empty() {
7056 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
7057 }
7058 if !rep.provenance.title.is_empty() {
7059 println!(" source: {}", truncate(&rep.provenance.title, 80));
7060 }
7061 }
7062}
7063
7064async fn cmd_ingest(
7077 path: &str,
7078 frontier: &Path,
7079 backend: Option<&str>,
7080 actor: Option<&str>,
7081 dry_run: bool,
7082 json: bool,
7083) {
7084 let lowered = path.trim().to_lowercase();
7086 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
7087 cmd_source_fetch(path.trim(), None, None, false, json).await;
7088 if !json {
7094 eprintln!();
7095 eprintln!(
7096 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7097 );
7098 eprintln!(
7099 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7100 frontier.display()
7101 );
7102 }
7103 return;
7104 }
7105
7106 let p = std::path::PathBuf::from(path);
7107 if !p.exists() {
7108 fail(&format!(
7109 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7110 ));
7111 }
7112
7113 let ext = p
7115 .extension()
7116 .and_then(|s| s.to_str())
7117 .map(|s| s.to_ascii_lowercase());
7118
7119 if p.is_file() {
7120 match ext.as_deref() {
7121 Some("pdf") => {
7122 cmd_scout(&p, frontier, backend, dry_run, json).await;
7126 }
7127 Some("md") | Some("markdown") => {
7128 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7131 }
7132 Some("csv") | Some("tsv") => {
7133 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7136 }
7137 Some("json") => {
7138 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7140 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
7141 }
7142 other => {
7143 fail(&format!(
7144 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
7145 other.unwrap_or("(none)")
7146 ));
7147 }
7148 }
7149 return;
7150 }
7151
7152 if p.is_dir() {
7153 let mut pdf_count = 0usize;
7160 let mut md_count = 0usize;
7161 let mut data_count = 0usize;
7162 let mut json_count = 0usize;
7163 let mut unhandled_exts: std::collections::BTreeSet<String> =
7164 std::collections::BTreeSet::new();
7165 if let Ok(entries) = std::fs::read_dir(&p) {
7166 for entry in entries.flatten() {
7167 let path = entry.path();
7168 if !path.is_file() {
7169 continue;
7170 }
7171 if let Some(name) = entry.file_name().to_str()
7172 && let Some(dot) = name.rfind('.')
7173 {
7174 let ext = name[dot + 1..].to_ascii_lowercase();
7175 match ext.as_str() {
7176 "pdf" => pdf_count += 1,
7177 "md" | "markdown" => md_count += 1,
7178 "csv" | "tsv" => data_count += 1,
7179 "json" => json_count += 1,
7180 other => {
7181 if !name.starts_with('.') {
7184 unhandled_exts.insert(other.to_string());
7185 }
7186 }
7187 }
7188 }
7189 }
7190 }
7191
7192 let dispatched_types = (pdf_count > 0) as usize
7193 + (md_count > 0) as usize
7194 + (data_count > 0) as usize
7195 + (json_count > 0) as usize;
7196
7197 if dispatched_types == 0 {
7198 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
7201 return;
7202 }
7203
7204 if dispatched_types > 1 {
7205 eprintln!(
7206 " vela ingest · folder has multiple handlable types; running each in sequence"
7207 );
7208 eprintln!(
7209 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
7210 );
7211 }
7212
7213 if pdf_count > 0 {
7220 cmd_scout(&p, frontier, backend, dry_run, json).await;
7221 }
7222 if md_count > 0 {
7223 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7224 }
7225 if data_count > 0 {
7226 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7227 }
7228 if json_count > 0 {
7229 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7232 if let Ok(entries) = std::fs::read_dir(&p) {
7233 for entry in entries.flatten() {
7234 let path = entry.path();
7235 if path.is_file()
7236 && path
7237 .extension()
7238 .and_then(|s| s.to_str())
7239 .map(|s| s.eq_ignore_ascii_case("json"))
7240 .unwrap_or(false)
7241 {
7242 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
7243 }
7244 }
7245 }
7246 }
7247
7248 if !unhandled_exts.is_empty() {
7249 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
7250 eprintln!(
7251 " vela ingest · skipped {} file extension(s) with no handler: {}",
7252 kinds.len(),
7253 kinds.join(", ")
7254 );
7255 }
7256 return;
7257 }
7258
7259 fail(&format!(
7260 "ingest: path '{path}' is neither a file nor a directory"
7261 ));
7262}
7263
7264#[allow(clippy::too_many_arguments)]
7265async fn cmd_compile_data(
7267 root: &Path,
7268 frontier: &Path,
7269 backend: Option<&str>,
7270 sample_rows: Option<usize>,
7271 dry_run: bool,
7272 json_out: bool,
7273) {
7274 match DATASETS_HANDLER.get() {
7275 Some(handler) => {
7276 handler(
7277 root.to_path_buf(),
7278 frontier.to_path_buf(),
7279 backend.map(String::from),
7280 sample_rows,
7281 dry_run,
7282 json_out,
7283 )
7284 .await;
7285 }
7286 None => {
7287 eprintln!(
7288 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
7289 style::err_prefix()
7290 );
7291 std::process::exit(1);
7292 }
7293 }
7294}
7295
7296async fn cmd_review_pending(
7299 frontier: &Path,
7300 backend: Option<&str>,
7301 max_proposals: Option<usize>,
7302 batch_size: usize,
7303 dry_run: bool,
7304 json_out: bool,
7305) {
7306 match REVIEWER_HANDLER.get() {
7307 Some(handler) => {
7308 handler(
7309 frontier.to_path_buf(),
7310 backend.map(String::from),
7311 max_proposals,
7312 batch_size,
7313 dry_run,
7314 json_out,
7315 )
7316 .await;
7317 }
7318 None => {
7319 eprintln!(
7320 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
7321 style::err_prefix()
7322 );
7323 std::process::exit(1);
7324 }
7325 }
7326}
7327
7328async fn cmd_find_tensions(
7331 frontier: &Path,
7332 backend: Option<&str>,
7333 max_findings: Option<usize>,
7334 dry_run: bool,
7335 json_out: bool,
7336) {
7337 match TENSIONS_HANDLER.get() {
7338 Some(handler) => {
7339 handler(
7340 frontier.to_path_buf(),
7341 backend.map(String::from),
7342 max_findings,
7343 dry_run,
7344 json_out,
7345 )
7346 .await;
7347 }
7348 None => {
7349 eprintln!(
7350 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
7351 style::err_prefix()
7352 );
7353 std::process::exit(1);
7354 }
7355 }
7356}
7357
7358async fn cmd_plan_experiments(
7361 frontier: &Path,
7362 backend: Option<&str>,
7363 max_findings: Option<usize>,
7364 dry_run: bool,
7365 json_out: bool,
7366) {
7367 match EXPERIMENTS_HANDLER.get() {
7368 Some(handler) => {
7369 handler(
7370 frontier.to_path_buf(),
7371 backend.map(String::from),
7372 max_findings,
7373 dry_run,
7374 json_out,
7375 )
7376 .await;
7377 }
7378 None => {
7379 eprintln!(
7380 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
7381 style::err_prefix()
7382 );
7383 std::process::exit(1);
7384 }
7385 }
7386}
7387
7388async fn cmd_compile_code(
7391 root: &Path,
7392 frontier: &Path,
7393 backend: Option<&str>,
7394 max_files: Option<usize>,
7395 dry_run: bool,
7396 json_out: bool,
7397) {
7398 match CODE_HANDLER.get() {
7399 Some(handler) => {
7400 handler(
7401 root.to_path_buf(),
7402 frontier.to_path_buf(),
7403 backend.map(String::from),
7404 max_files,
7405 dry_run,
7406 json_out,
7407 )
7408 .await;
7409 }
7410 None => {
7411 eprintln!(
7412 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
7413 style::err_prefix()
7414 );
7415 std::process::exit(1);
7416 }
7417 }
7418}
7419
7420async fn cmd_compile_notes(
7425 vault: &Path,
7426 frontier: &Path,
7427 backend: Option<&str>,
7428 max_files: Option<usize>,
7429 max_items_per_category: Option<usize>,
7430 dry_run: bool,
7431 json_out: bool,
7432) {
7433 match NOTES_HANDLER.get() {
7434 Some(handler) => {
7435 handler(
7436 vault.to_path_buf(),
7437 frontier.to_path_buf(),
7438 backend.map(String::from),
7439 max_files,
7440 max_items_per_category,
7441 dry_run,
7442 json_out,
7443 )
7444 .await;
7445 }
7446 None => {
7447 eprintln!(
7448 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
7449 style::err_prefix()
7450 );
7451 std::process::exit(1);
7452 }
7453 }
7454}
7455
7456async fn cmd_scout(
7463 folder: &Path,
7464 frontier: &Path,
7465 backend: Option<&str>,
7466 dry_run: bool,
7467 json_out: bool,
7468) {
7469 match SCOUT_HANDLER.get() {
7470 Some(handler) => {
7471 handler(
7472 folder.to_path_buf(),
7473 frontier.to_path_buf(),
7474 backend.map(String::from),
7475 dry_run,
7476 json_out,
7477 )
7478 .await;
7479 }
7480 None => {
7481 eprintln!(
7482 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
7483 style::err_prefix()
7484 );
7485 std::process::exit(1);
7486 }
7487 }
7488}
7489
7490#[allow(clippy::too_many_arguments)]
7491pub fn scan_for_sensitive_paths(root: &Path) -> Vec<PathBuf> {
7500 let mut hits: Vec<PathBuf> = Vec::new();
7501 let skip_dirs: &[&str] = &[".git", "target", "node_modules", "dist", "build"];
7502 let bad_exts: &[&str] = &["key", "pem", "p12", "pfx"];
7503 let bad_substrings: &[&str] = &["private", "secret", "credential"];
7504 let mut stack: Vec<PathBuf> = vec![root.to_path_buf()];
7505 while let Some(dir) = stack.pop() {
7506 let Ok(entries) = std::fs::read_dir(&dir) else {
7507 continue;
7508 };
7509 for entry in entries.flatten() {
7510 let path = entry.path();
7511 let name_os = path.file_name();
7512 let Some(name) = name_os.and_then(|n| n.to_str()) else {
7513 continue;
7514 };
7515 let lower = name.to_lowercase();
7516 if path.is_dir() {
7517 if skip_dirs.contains(&name) {
7518 continue;
7519 }
7520 stack.push(path);
7521 continue;
7522 }
7523 if lower.ends_with(".pub") || lower.ends_with(".pubkey") {
7525 continue;
7526 }
7527 if lower == "public.key" {
7529 continue;
7530 }
7531 let ext = path
7532 .extension()
7533 .and_then(|e| e.to_str())
7534 .map(str::to_lowercase)
7535 .unwrap_or_default();
7536 let mut hit = false;
7537 if bad_exts.iter().any(|x| ext == *x) {
7538 hit = true;
7539 }
7540 if bad_substrings.iter().any(|s| lower.contains(s)) {
7541 hit = true;
7542 }
7543 if hit {
7544 hits.push(path);
7545 }
7546 }
7547 }
7548 hits.sort();
7549 hits
7550}
7551
7552fn cmd_check(
7553 source: Option<&Path>,
7554 schema: bool,
7555 stats: bool,
7556 conformance_flag: bool,
7557 conformance_dir: &Path,
7558 all: bool,
7559 schema_only: bool,
7560 strict: bool,
7561 fix: bool,
7562 json_output: bool,
7563) {
7564 if json_output {
7565 let Some(src) = source else {
7566 fail("--json requires a frontier source");
7567 };
7568 let payload = check_json_payload(src, schema_only, strict);
7569 println!(
7570 "{}",
7571 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
7572 );
7573 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
7574 std::process::exit(1);
7575 }
7576 return;
7577 }
7578
7579 if strict && let Some(src) = source {
7590 let hits = scan_for_sensitive_paths(src);
7591 if !hits.is_empty() {
7592 eprintln!(
7593 "{} secret-audit: {} sensitive path(s) found under {}",
7594 style::err_prefix(),
7595 hits.len(),
7596 src.display()
7597 );
7598 for hit in &hits {
7599 eprintln!(" - {}", hit.display());
7600 }
7601 eprintln!(
7602 " hint: add `keys/` and `*.key` to .gitignore so these never reach a public repo (see THREAT_MODEL.md A17)"
7603 );
7604 std::process::exit(1);
7605 }
7606 }
7607
7608 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
7609 if run_all || schema || schema_only {
7610 let Some(src) = source else {
7611 fail("check requires a frontier source");
7612 };
7613 validate::run(src);
7614 }
7615 if !schema_only && (run_all || stats) {
7616 let Some(src) = source else {
7617 fail("--stats requires a frontier source");
7618 };
7619 let frontier = load_frontier_or_fail(src);
7620 let report = lint::lint(&frontier, None, None);
7621 lint::print_report(&report);
7622 let replay_report = events::replay_report(&frontier);
7623 println!("event replay: {}", replay_report.status);
7624 if !replay_report.conflicts.is_empty() {
7625 for conflict in &replay_report.conflicts {
7626 println!(" - {conflict}");
7627 }
7628 }
7629 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
7630 && signature_report.signed > 0
7631 {
7632 println!(
7633 "Signatures: {} valid / {} invalid / {} unsigned",
7634 signature_report.valid, signature_report.invalid, signature_report.unsigned
7635 );
7636 }
7637 let signal_report = signals::analyze(&frontier, &[]);
7638 print_signal_summary(&signal_report, strict);
7639 if !replay_report.ok
7640 || (strict
7641 && (!signal_report.review_queue.is_empty()
7642 || signal_report.proof_readiness.status != "ready"))
7643 {
7644 std::process::exit(1);
7645 }
7646 }
7647 if run_all || conformance_flag {
7648 if conformance_flag || conformance_dir.is_dir() {
7658 conformance::run(conformance_dir);
7659 } else {
7660 eprintln!(
7661 " conformance: skipped ({} not present; pass --conformance-dir <path> to point at the source repo's tests/conformance)",
7662 conformance_dir.display()
7663 );
7664 }
7665 }
7666 let _ = fix;
7667}
7668
7669fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
7670 let report = validate::validate(src);
7671 let loaded = repo::load_from_path(src).ok();
7672 let (method_report, graph_report) = if schema_only {
7673 (None, None)
7674 } else if let Some(frontier) = loaded.as_ref() {
7675 (
7676 Some(lint::lint(frontier, None, None)),
7677 Some(lint::lint_frontier(frontier)),
7678 )
7679 } else {
7680 (None, None)
7681 };
7682 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7683 let mut diagnostics = Vec::new();
7684 diagnostics.extend(report.errors.iter().map(|e| {
7685 json!({
7686 "severity": "error",
7687 "rule_id": "schema",
7688 "finding_id": null,
7689 "file": &e.file,
7690 "field_path": null,
7691 "message": &e.error,
7692 "suggestion": schema_error_suggestion(&e.error),
7693 "fixable": schema_error_fix(&e.error),
7694 "normalize_action": schema_error_action(&e.error),
7695 })
7696 }));
7697 for (check_id, lint_report) in [
7698 ("methodology", method_report.as_ref()),
7699 ("frontier_graph", graph_report.as_ref()),
7700 ] {
7701 if let Some(lint_report) = lint_report {
7702 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7703 json!({
7704 "severity": d.severity.to_string(),
7705 "rule_id": &d.rule_id,
7706 "check": check_id,
7707 "finding_id": &d.finding_id,
7708 "field_path": null,
7709 "message": &d.message,
7710 "suggestion": &d.suggestion,
7711 "fixable": false,
7712 "normalize_action": null,
7713 })
7714 }));
7715 }
7716 }
7717 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7718 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7719 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7720 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7721 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7722 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7723 let replay_report = loaded.as_ref().map(events::replay_report);
7724 let state_integrity_report = if schema_only {
7725 loaded.as_ref().map(state_integrity::analyze)
7726 } else {
7727 state_integrity::analyze_path(src).ok()
7728 };
7729 if let Some(replay) = replay_report.as_ref()
7730 && !replay.ok
7731 {
7732 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7733 json!({
7734 "severity": "error",
7735 "rule_id": "event_replay",
7736 "check": "events",
7737 "finding_id": null,
7738 "field_path": null,
7739 "message": conflict,
7740 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
7741 "fixable": false,
7742 "normalize_action": null,
7743 })
7744 }));
7745 }
7746 let event_errors = replay_report
7747 .as_ref()
7748 .map_or(0, |replay| usize::from(!replay.ok));
7749 let state_integrity_errors = state_integrity_report
7750 .as_ref()
7751 .map_or(0, |report| report.structural_errors.len());
7752 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
7753 .as_ref()
7754 .map(|frontier| {
7755 (
7756 sources::source_summary(frontier),
7757 sources::evidence_summary(frontier),
7758 sources::condition_summary(frontier),
7759 proposals::summary(frontier),
7760 proposals::proof_state_json(&frontier.proof_state),
7761 )
7762 })
7763 .unwrap_or_else(|| {
7764 (
7765 sources::SourceRegistrySummary::default(),
7766 sources::EvidenceAtomSummary::default(),
7767 sources::ConditionSummary::default(),
7768 proposals::ProposalSummary::default(),
7769 Value::Null,
7770 )
7771 });
7772 let signature_report = loaded
7773 .as_ref()
7774 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
7775 if let Some(frontier) = loaded.as_ref()
7776 && !schema_only
7777 {
7778 let projection = sources::derive_projection(frontier);
7779 let existing_sources = frontier
7780 .sources
7781 .iter()
7782 .map(|source| source.id.as_str())
7783 .collect::<std::collections::BTreeSet<_>>();
7784 let existing_atoms = frontier
7785 .evidence_atoms
7786 .iter()
7787 .map(|atom| atom.id.as_str())
7788 .collect::<std::collections::BTreeSet<_>>();
7789 let existing_conditions = frontier
7790 .condition_records
7791 .iter()
7792 .map(|record| record.id.as_str())
7793 .collect::<std::collections::BTreeSet<_>>();
7794 for source in projection
7795 .sources
7796 .iter()
7797 .filter(|source| !existing_sources.contains(source.id.as_str()))
7798 {
7799 diagnostics.push(json!({
7800 "severity": "warning",
7801 "rule_id": "missing_source_record",
7802 "check": "source_registry",
7803 "finding_id": source.finding_ids.first(),
7804 "field_path": "sources",
7805 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
7806 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
7807 "fixable": true,
7808 "normalize_action": "materialize_source_record",
7809 }));
7810 }
7811 for atom in projection
7812 .evidence_atoms
7813 .iter()
7814 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
7815 {
7816 diagnostics.push(json!({
7817 "severity": "warning",
7818 "rule_id": "missing_evidence_atom",
7819 "check": "evidence_atoms",
7820 "finding_id": atom.finding_id,
7821 "field_path": "evidence_atoms",
7822 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
7823 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
7824 "fixable": true,
7825 "normalize_action": "materialize_evidence_atom",
7826 }));
7827 }
7828 for atom in projection
7829 .evidence_atoms
7830 .iter()
7831 .filter(|atom| atom.locator.is_none())
7832 {
7833 diagnostics.push(json!({
7834 "severity": "warning",
7835 "rule_id": "missing_evidence_locator",
7836 "check": "evidence_atoms",
7837 "finding_id": atom.finding_id,
7838 "field_path": "evidence_atoms[].locator",
7839 "message": format!("Evidence atom {} has no source locator.", atom.id),
7840 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
7841 "fixable": false,
7842 "normalize_action": null,
7843 }));
7844 }
7845 for condition in projection
7846 .condition_records
7847 .iter()
7848 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
7849 {
7850 diagnostics.push(json!({
7851 "severity": "warning",
7852 "rule_id": "condition_record_missing",
7853 "check": "conditions",
7854 "finding_id": condition.finding_id,
7855 "field_path": "condition_records",
7856 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
7857 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
7858 "fixable": true,
7859 "normalize_action": "materialize_condition_record",
7860 }));
7861 }
7862 for proposal in frontier.proposals.iter().filter(|proposal| {
7863 matches!(proposal.status.as_str(), "accepted" | "applied")
7864 && proposal
7865 .reviewed_by
7866 .as_deref()
7867 .is_none_or(proposals::is_placeholder_reviewer)
7868 }) {
7869 diagnostics.push(json!({
7870 "severity": "error",
7871 "rule_id": "reviewer_identity_missing",
7872 "check": "proposals",
7873 "finding_id": proposal.target.id,
7874 "field_path": "proposals[].reviewed_by",
7875 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
7876 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
7877 "fixable": false,
7878 "normalize_action": null,
7879 }));
7880 }
7881 }
7882 let signal_report = loaded
7883 .as_ref()
7884 .map(|frontier| signals::analyze(frontier, &diagnostics))
7885 .unwrap_or_else(empty_signal_report);
7886 let errors =
7887 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
7888 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
7889 let infos = method_infos + graph_infos;
7890 let strict_blockers = signal_report
7891 .signals
7892 .iter()
7893 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
7894 .count();
7895 let fixable = diagnostics
7896 .iter()
7897 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
7898 .count();
7899 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
7900
7901 json!({
7902 "ok": ok,
7903 "command": "check",
7904 "schema_version": project::VELA_SCHEMA_VERSION,
7905 "source": {
7906 "path": src.display().to_string(),
7907 "hash": format!("sha256:{source_hash}"),
7908 },
7909 "summary": {
7910 "status": if ok { "pass" } else { "fail" },
7911 "checked_findings": report.total_files,
7912 "valid_findings": report.valid,
7913 "invalid_findings": report.invalid,
7914 "errors": errors,
7915 "warnings": warnings,
7916 "info": infos,
7917 "fixable": fixable,
7918 "strict": strict,
7919 "schema_only": schema_only,
7920 },
7921 "checks": [
7922 {
7923 "id": "schema",
7924 "status": if report.invalid == 0 { "pass" } else { "fail" },
7925 "checked": report.total_files,
7926 "failed": report.invalid,
7927 "errors": report.errors.iter().map(|e| json!({
7928 "file": e.file,
7929 "message": e.error,
7930 })).collect::<Vec<_>>(),
7931 },
7932 {
7933 "id": "methodology",
7934 "status": if method_errors == 0 { "pass" } else { "fail" },
7935 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
7936 "failed": method_errors,
7937 "warnings": method_warnings,
7938 "info": method_infos,
7939 "skipped": schema_only,
7940 },
7941 {
7942 "id": "frontier_graph",
7943 "status": if graph_errors == 0 { "pass" } else { "fail" },
7944 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
7945 "failed": graph_errors,
7946 "warnings": graph_warnings,
7947 "info": graph_infos,
7948 "skipped": schema_only,
7949 },
7950 {
7951 "id": "signals",
7952 "status": if strict_blockers == 0 { "pass" } else { "fail" },
7953 "checked": signal_report.signals.len(),
7954 "failed": strict_blockers,
7955 "warnings": signal_report.proof_readiness.warnings,
7956 "skipped": loaded.is_none(),
7957 "blockers": signal_report.signals.iter()
7958 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
7959 .map(|s| json!({
7960 "id": s.id,
7961 "kind": s.kind,
7962 "severity": s.severity,
7963 "reason": s.reason,
7964 }))
7965 .collect::<Vec<_>>(),
7966 },
7967 {
7968 "id": "events",
7969 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
7970 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
7971 "failed": event_errors,
7972 "skipped": schema_only || loaded.is_none(),
7973 },
7974 {
7975 "id": "state_integrity",
7976 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
7977 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
7978 "failed": state_integrity_errors,
7979 "skipped": schema_only || loaded.is_none(),
7980 }
7981 ],
7982 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
7983 "replay": replay_report,
7984 "state_integrity": state_integrity_report,
7985 "source_registry": source_registry,
7986 "evidence_atoms": evidence_atoms,
7987 "conditions": conditions,
7988 "proposals": proposal_summary,
7989 "proof_state": proof_state,
7990 "signatures": signature_report,
7991 "diagnostics": diagnostics,
7992 "signals": signal_report.signals,
7993 "review_queue": signal_report.review_queue,
7994 "proof_readiness": signal_report.proof_readiness,
7995 "repair_plan": build_repair_plan(&diagnostics),
7996 })
7997}
7998
7999#[allow(clippy::too_many_arguments)]
8000fn cmd_normalize(
8001 source: &Path,
8002 out: Option<&Path>,
8003 write: bool,
8004 dry_run: bool,
8005 rewrite_ids: bool,
8006 id_map: Option<&Path>,
8007 resync_provenance: bool,
8008 json_output: bool,
8009) {
8010 if write && out.is_some() {
8011 fail("Use either --write or --out, not both.");
8012 }
8013 if dry_run && (write || out.is_some()) {
8014 fail("--dry-run cannot be combined with --write or --out.");
8015 }
8016 if id_map.is_some() && !rewrite_ids {
8017 fail("--id-map requires --rewrite-ids.");
8018 }
8019
8020 let detected = repo::detect(source).unwrap_or_else(|e| {
8021 eprintln!("{e}");
8022 std::process::exit(1);
8023 });
8024 if matches!(detected, repo::VelaSource::PacketDir(_)) {
8025 fail(
8026 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
8027 );
8028 }
8029 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
8030 let has_substantive_events = frontier
8035 .events
8036 .iter()
8037 .any(|event| event.kind != "frontier.created");
8038 if has_substantive_events && (write || out.is_some()) {
8039 fail(
8040 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
8041 );
8042 }
8043 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
8044 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8045 let (entity_type_fixes, entity_name_fixes) =
8046 normalize::normalize_findings(&mut frontier.findings);
8047 let confidence_updates =
8048 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
8049 let provenance_resync_count = if resync_provenance {
8053 sources::resync_provenance_from_sources(&mut frontier)
8054 } else {
8055 0
8056 };
8057 let before_source_count = frontier.sources.len();
8058 let before_evidence_atom_count = frontier.evidence_atoms.len();
8059 let before_condition_record_count = frontier.condition_records.len();
8060
8061 let mut id_rewrites = Vec::new();
8062 if rewrite_ids {
8063 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
8064 for finding in &frontier.findings {
8065 let expected =
8066 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
8067 if expected != finding.id {
8068 id_map_values.insert(finding.id.clone(), expected);
8069 }
8070 }
8071 let new_ids = id_map_values
8072 .values()
8073 .map(String::as_str)
8074 .collect::<std::collections::HashSet<_>>();
8075 if new_ids.len() != id_map_values.len() {
8076 fail("Refusing to rewrite IDs because two findings map to the same content address.");
8077 }
8078 for finding in &mut frontier.findings {
8079 if let Some(new_id) = id_map_values.get(&finding.id) {
8080 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
8081 finding.previous_version = Some(finding.id.clone());
8082 finding.id = new_id.clone();
8083 }
8084 }
8085 for finding in &mut frontier.findings {
8086 for link in &mut finding.links {
8087 if let Some(new_target) = id_map_values.get(&link.target) {
8088 link.target = new_target.clone();
8089 }
8090 }
8091 }
8092 if let Some(path) = id_map {
8093 std::fs::write(
8094 path,
8095 serde_json::to_string_pretty(&id_map_values)
8096 .expect("failed to serialize normalize id map"),
8097 )
8098 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
8099 }
8100 }
8101
8102 sources::materialize_project(&mut frontier);
8103 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
8104 let evidence_atoms_materialized = frontier
8105 .evidence_atoms
8106 .len()
8107 .saturating_sub(before_evidence_atom_count);
8108 let condition_records_materialized = frontier
8109 .condition_records
8110 .len()
8111 .saturating_sub(before_condition_record_count);
8112 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8113 let id_rewrite_count = id_rewrites.len();
8114 let wrote_to = if write {
8115 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
8116 Some(source.display().to_string())
8117 } else if let Some(out_path) = out {
8118 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
8119 Some(out_path.display().to_string())
8120 } else {
8121 None
8122 };
8123 let wrote = wrote_to.is_some();
8124 let planned_changes = entity_type_fixes
8125 + entity_name_fixes
8126 + confidence_updates
8127 + id_rewrite_count
8128 + source_records_materialized
8129 + evidence_atoms_materialized
8130 + condition_records_materialized
8131 + provenance_resync_count;
8132 let payload = json!({
8133 "ok": true,
8134 "command": "normalize",
8135 "schema_version": project::VELA_SCHEMA_VERSION,
8136 "source": {
8137 "path": source.display().to_string(),
8138 "hash": format!("sha256:{source_hash}"),
8139 },
8140 "dry_run": wrote_to.is_none(),
8141 "wrote_to": wrote_to,
8142 "summary": {
8143 "planned": planned_changes,
8144 "safe": planned_changes,
8145 "unsafe": 0,
8146 "applied": if wrote { planned_changes } else { 0 },
8147 },
8148 "changes": {
8149 "entity_type_fixes": entity_type_fixes,
8150 "entity_name_fixes": entity_name_fixes,
8151 "confidence_updates": confidence_updates,
8152 "id_rewrites": id_rewrite_count,
8153 "source_records_materialized": source_records_materialized,
8154 "evidence_atoms_materialized": evidence_atoms_materialized,
8155 "condition_records_materialized": condition_records_materialized,
8156 "provenance_resyncs": provenance_resync_count,
8157 "stats_changed": before_stats != after_stats,
8158 },
8159 "id_rewrites": id_rewrites,
8160 "repair_plan": if wrote { Vec::<Value>::new() } else {
8161 vec![json!({
8162 "action": "apply_normalization",
8163 "command": "vela normalize <frontier> --out frontier.normalized.json"
8164 })]
8165 },
8166 });
8167 if json_output {
8168 println!(
8169 "{}",
8170 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
8171 );
8172 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
8173 println!("{} normalized frontier written to {path}", style::ok("ok"));
8174 println!(
8175 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8176 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8177 );
8178 } else {
8179 println!("normalize dry run for {}", source.display());
8180 println!(
8181 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8182 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8183 );
8184 }
8185}
8186
8187fn cmd_proof(
8188 frontier: &Path,
8189 out: &Path,
8190 template: &str,
8191 gold: Option<&Path>,
8192 record_proof_state: bool,
8193 json_output: bool,
8194) {
8195 if template != "bbb-alzheimer" {
8196 fail(&format!(
8197 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
8198 ));
8199 }
8200 let mut loaded = load_frontier_or_fail(frontier);
8201 let source_hash = hash_path_or_fail(frontier);
8202 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
8203 .unwrap_or_else(|e| fail(&e));
8204 let benchmark_summary = gold.map(|gold_path| {
8205 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
8206 fail(&format!(
8207 "Failed to run proof benchmark '{}': {e}",
8208 gold_path.display()
8209 ))
8210 });
8211 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
8212 fail(&format!("Failed to write benchmark summary: {e}"));
8213 });
8214 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
8215 fail(&format!(
8216 "Proof benchmark failed for {}",
8217 gold_path.display()
8218 ));
8219 }
8220 summary
8221 });
8222 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
8223 fail(&format!("Proof packet validation failed: {e}"));
8224 });
8225 proposals::record_proof_export(
8226 &mut loaded,
8227 proposals::ProofPacketRecord {
8228 generated_at: export_record.generated_at.clone(),
8229 snapshot_hash: export_record.snapshot_hash.clone(),
8230 event_log_hash: export_record.event_log_hash.clone(),
8231 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
8232 },
8233 );
8234 project::recompute_stats(&mut loaded);
8235 if record_proof_state {
8236 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
8237 }
8238 let signal_report = signals::analyze(&loaded, &[]);
8239 if json_output {
8240 let payload = json!({
8241 "ok": true,
8242 "command": "proof",
8243 "schema_version": project::VELA_SCHEMA_VERSION,
8244 "recorded_proof_state": record_proof_state,
8245 "frontier": {
8246 "name": &loaded.project.name,
8247 "source": frontier.display().to_string(),
8248 "hash": format!("sha256:{source_hash}"),
8249 },
8250 "template": template,
8251 "gold": gold.map(|p| p.display().to_string()),
8252 "benchmark": benchmark_summary,
8253 "output": out.display().to_string(),
8254 "packet": {
8255 "manifest_path": out.join("manifest.json").display().to_string(),
8256 },
8257 "validation": {
8258 "status": "ok",
8259 "summary": validation_summary,
8260 },
8261 "proposals": proposals::summary(&loaded),
8262 "proof_state": loaded.proof_state,
8263 "signals": signal_report.signals,
8264 "review_queue": signal_report.review_queue,
8265 "proof_readiness": signal_report.proof_readiness,
8266 "trace_path": out.join("proof-trace.json").display().to_string(),
8267 });
8268 println!(
8269 "{}",
8270 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
8271 );
8272 } else {
8273 println!("vela proof");
8274 println!(" source: {}", frontier.display());
8275 println!(" template: {template}");
8276 println!(" output: {}", out.display());
8277 println!(" trace: {}", out.join("proof-trace.json").display());
8278 println!(
8279 " proof state: {}",
8280 if record_proof_state {
8281 "recorded"
8282 } else {
8283 "not recorded"
8284 }
8285 );
8286 println!();
8287 println!("{validation_summary}");
8288 }
8289}
8290
8291fn cmd_status(path: &Path, json: bool) {
8295 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8296
8297 let mut pending_total = 0usize;
8299 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
8300 std::collections::BTreeMap::new();
8301 for p in &project.proposals {
8302 if p.status == "pending_review" {
8303 pending_total += 1;
8304 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8305 }
8306 }
8307
8308 let audit = crate::causal_reasoning::audit_frontier(&project);
8310 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
8311
8312 let mut last_sync: Option<&crate::events::StateEvent> = None;
8314 let mut last_conflict: Option<&crate::events::StateEvent> = None;
8315 let mut total_conflicts = 0usize;
8316 for e in &project.events {
8317 match e.kind.as_str() {
8318 "frontier.synced_with_peer" => {
8319 if last_sync
8320 .map(|prev| e.timestamp > prev.timestamp)
8321 .unwrap_or(true)
8322 {
8323 last_sync = Some(e);
8324 }
8325 }
8326 "frontier.conflict_detected" => {
8327 total_conflicts += 1;
8328 if last_conflict
8329 .map(|prev| e.timestamp > prev.timestamp)
8330 .unwrap_or(true)
8331 {
8332 last_conflict = Some(e);
8333 }
8334 }
8335 _ => {}
8336 }
8337 }
8338
8339 let mut targets_with_success = std::collections::HashSet::new();
8341 let mut failed_replications = 0usize;
8342 for r in &project.replications {
8343 if r.outcome == "replicated" {
8344 targets_with_success.insert(r.target_finding.clone());
8345 } else if r.outcome == "failed" {
8346 failed_replications += 1;
8347 }
8348 }
8349
8350 if json {
8351 println!(
8352 "{}",
8353 serde_json::to_string_pretty(&json!({
8354 "ok": true,
8355 "command": "status",
8356 "frontier": frontier_label(&project),
8357 "vfr_id": project.frontier_id(),
8358 "findings": project.findings.len(),
8359 "events": project.events.len(),
8360 "actors": project.actors.len(),
8361 "peers": project.peers.len(),
8362 "inbox": {
8363 "pending_total": pending_total,
8364 "pending_by_kind": pending_by_kind,
8365 },
8366 "causal_audit": {
8367 "identified": audit_summary.identified,
8368 "conditional": audit_summary.conditional,
8369 "underidentified": audit_summary.underidentified,
8370 "underdetermined": audit_summary.underdetermined,
8371 },
8372 "replications": {
8373 "total": project.replications.len(),
8374 "findings_with_success": targets_with_success.len(),
8375 "failed": failed_replications,
8376 },
8377 "federation": {
8378 "peers": project.peers.len(),
8379 "last_sync": last_sync.map(|e| e.timestamp.clone()),
8380 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
8381 "total_conflicts": total_conflicts,
8382 },
8383 }))
8384 .expect("serialize status")
8385 );
8386 return;
8387 }
8388
8389 println!();
8390 println!(
8391 " {}",
8392 format!("VELA · STATUS · {}", path.display())
8393 .to_uppercase()
8394 .dimmed()
8395 );
8396 println!(" {}", style::tick_row(60));
8397 println!();
8398 println!(" frontier: {}", frontier_label(&project));
8399 println!(" vfr_id: {}", project.frontier_id());
8400 println!(
8401 " findings: {} events: {} peers: {} actors: {}",
8402 project.findings.len(),
8403 project.events.len(),
8404 project.peers.len(),
8405 project.actors.len(),
8406 );
8407 println!();
8408 if pending_total > 0 {
8409 println!(
8410 " {} {pending_total} pending proposals",
8411 style::warn("inbox")
8412 );
8413 for (k, n) in &pending_by_kind {
8414 println!(" · {n:>3} {k}");
8415 }
8416 } else {
8417 println!(" {} inbox clean", style::ok("ok"));
8418 }
8419 println!();
8420 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
8421 let chip = if audit_summary.underidentified > 0 {
8422 style::lost("audit")
8423 } else {
8424 style::warn("audit")
8425 };
8426 println!(
8427 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
8428 chip,
8429 audit_summary.identified,
8430 audit_summary.conditional,
8431 audit_summary.underidentified,
8432 audit_summary.underdetermined,
8433 );
8434 if audit_summary.underidentified > 0 {
8435 println!(
8436 " next: vela causal audit {} --problems-only",
8437 path.display()
8438 );
8439 }
8440 } else if audit_summary.underdetermined == 0 {
8441 println!(
8442 " {} causal audit: all {} identified",
8443 style::ok("ok"),
8444 audit_summary.identified
8445 );
8446 } else {
8447 println!(
8448 " {} causal audit: {} identified, {} ungraded",
8449 style::warn("audit"),
8450 audit_summary.identified,
8451 audit_summary.underdetermined,
8452 );
8453 }
8454 println!();
8455 if !project.replications.is_empty() {
8456 println!(
8457 " {} {} records · {} findings replicated · {} failed",
8458 style::ok("replications"),
8459 project.replications.len(),
8460 targets_with_success.len(),
8461 failed_replications,
8462 );
8463 }
8464 if project.peers.is_empty() {
8465 println!(
8466 " {} no federation peers registered",
8467 style::warn("federation")
8468 );
8469 } else {
8470 let last = last_sync
8471 .map(|e| fmt_timestamp(&e.timestamp))
8472 .unwrap_or_else(|| "never".to_string());
8473 let chip = if total_conflicts > 0 {
8474 style::warn("federation")
8475 } else {
8476 style::ok("federation")
8477 };
8478 println!(
8479 " {} {} peer(s) · last sync {} · {} conflict events",
8480 chip,
8481 project.peers.len(),
8482 last,
8483 total_conflicts,
8484 );
8485 }
8486 println!();
8487}
8488
8489fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
8491 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8492 let mut events: Vec<&crate::events::StateEvent> = project
8493 .events
8494 .iter()
8495 .filter(|e| match kind_filter {
8496 Some(k) => e.kind.contains(k),
8497 None => true,
8498 })
8499 .collect();
8500 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8501 events.truncate(limit);
8502
8503 if json {
8504 let payload: Vec<_> = events
8505 .iter()
8506 .map(|e| {
8507 json!({
8508 "id": e.id,
8509 "kind": e.kind,
8510 "actor": e.actor.id,
8511 "target": &e.target.id,
8512 "target_type": &e.target.r#type,
8513 "timestamp": e.timestamp,
8514 "reason": e.reason,
8515 })
8516 })
8517 .collect();
8518 println!(
8519 "{}",
8520 serde_json::to_string_pretty(&json!({
8521 "ok": true,
8522 "command": "log",
8523 "events": payload,
8524 }))
8525 .expect("serialize log")
8526 );
8527 return;
8528 }
8529
8530 println!();
8531 println!(
8532 " {}",
8533 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
8534 .to_uppercase()
8535 .dimmed()
8536 );
8537 println!(" {}", style::tick_row(60));
8538 if events.is_empty() {
8539 println!(" (no events)");
8540 return;
8541 }
8542 for e in &events {
8543 let when = fmt_timestamp(&e.timestamp);
8544 let target_short = if e.target.id.len() > 22 {
8545 format!("{}…", &e.target.id[..21])
8546 } else {
8547 e.target.id.clone()
8548 };
8549 let reason: String = e.reason.chars().take(70).collect();
8550 println!(
8551 " {:<19} {:<32} {:<24} {}",
8552 when, e.kind, target_short, reason
8553 );
8554 }
8555 println!();
8556}
8557
8558fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
8560 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8561
8562 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
8565 std::collections::HashMap::new();
8566 for p in &project.proposals {
8567 if p.kind != "finding.note" {
8568 continue;
8569 }
8570 if p.actor.id != "agent:reviewer-agent" {
8571 continue;
8572 }
8573 let reason = &p.reason;
8574 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
8575 continue;
8576 };
8577 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
8578 let extract = |k: &str| -> f64 {
8579 let pat = format!("{k} ");
8580 text.find(&pat)
8581 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
8582 .and_then(|t| t.parse::<f64>().ok())
8583 .unwrap_or(0.0)
8584 };
8585 score_map.insert(
8586 target.to_string(),
8587 (
8588 extract("plausibility"),
8589 extract("evidence"),
8590 extract("scope"),
8591 extract("duplicate-risk"),
8592 ),
8593 );
8594 }
8595
8596 let mut pending: Vec<&crate::proposals::StateProposal> = project
8597 .proposals
8598 .iter()
8599 .filter(|p| {
8600 p.status == "pending_review"
8601 && match kind_filter {
8602 Some(k) => p.kind.contains(k),
8603 None => true,
8604 }
8605 })
8606 .collect();
8607 pending.sort_by(|a, b| {
8609 let sa = score_map
8610 .get(&a.id)
8611 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8612 let sb = score_map
8613 .get(&b.id)
8614 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8615 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
8616 });
8617 pending.truncate(limit);
8618
8619 if json {
8620 let payload: Vec<_> = pending
8621 .iter()
8622 .map(|p| {
8623 let assertion_text = p
8624 .payload
8625 .get("finding")
8626 .and_then(|f| f.get("assertion"))
8627 .and_then(|a| a.get("text"))
8628 .and_then(|t| t.as_str());
8629 let assertion_type = p
8630 .payload
8631 .get("finding")
8632 .and_then(|f| f.get("assertion"))
8633 .and_then(|a| a.get("type"))
8634 .and_then(|t| t.as_str());
8635 let composite = score_map
8636 .get(&p.id)
8637 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8638 json!({
8639 "proposal_id": p.id,
8640 "kind": p.kind,
8641 "actor": p.actor,
8642 "reason": p.reason,
8643 "assertion_text": assertion_text,
8644 "assertion_type": assertion_type,
8645 "reviewer_composite": composite,
8646 })
8647 })
8648 .collect();
8649 println!(
8650 "{}",
8651 serde_json::to_string_pretty(&json!({
8652 "ok": true,
8653 "command": "inbox",
8654 "shown": pending.len(),
8655 "proposals": payload,
8656 }))
8657 .expect("serialize inbox")
8658 );
8659 return;
8660 }
8661
8662 println!();
8663 println!(
8664 " {}",
8665 format!(
8666 "VELA · INBOX · {} ({} pending shown)",
8667 path.display(),
8668 pending.len()
8669 )
8670 .to_uppercase()
8671 .dimmed()
8672 );
8673 println!(" {}", style::tick_row(60));
8674 if pending.is_empty() {
8675 println!(" (inbox clean)");
8676 return;
8677 }
8678 for p in &pending {
8679 let assertion_text = p
8680 .payload
8681 .get("finding")
8682 .and_then(|f| f.get("assertion"))
8683 .and_then(|a| a.get("text"))
8684 .and_then(|t| t.as_str())
8685 .unwrap_or("");
8686 let assertion_type = p
8687 .payload
8688 .get("finding")
8689 .and_then(|f| f.get("assertion"))
8690 .and_then(|a| a.get("type"))
8691 .and_then(|t| t.as_str())
8692 .unwrap_or("");
8693 let composite = score_map
8694 .get(&p.id)
8695 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8696 let score_str = composite
8697 .map(|c| format!("[{:.2}]", c))
8698 .unwrap_or_else(|| "[—] ".to_string());
8699 let kind_short = if p.kind.len() > 12 {
8700 format!("{}…", &p.kind[..11])
8701 } else {
8702 p.kind.clone()
8703 };
8704 let summary: String = if !assertion_text.is_empty() {
8705 assertion_text.chars().take(80).collect()
8706 } else {
8707 p.reason.chars().take(80).collect()
8708 };
8709 println!(
8710 " {} {} {:<13} {:<18} {}",
8711 score_str, p.id, kind_short, assertion_type, summary
8712 );
8713 }
8714 println!();
8715}
8716
8717fn cmd_ask(path: &Path, question: &str, json: bool) {
8722 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8723
8724 if question.trim().is_empty() {
8725 use std::io::{BufRead, Write};
8727 println!();
8728 println!(
8729 " {}",
8730 format!("VELA · ASK · {}", path.display())
8731 .to_uppercase()
8732 .dimmed()
8733 );
8734 println!(" {}", style::tick_row(60));
8735 println!(" Ask a question. Type `exit` to quit.");
8736 println!(" Examples:");
8737 println!(" · what's pending?");
8738 println!(" · what's underidentified?");
8739 println!(" · how many findings?");
8740 println!(" · what changed recently?");
8741 println!(" · who has what calibration?");
8742 println!();
8743 let stdin = std::io::stdin();
8744 let mut stdout = std::io::stdout();
8745 loop {
8746 print!(" ask> ");
8747 stdout.flush().ok();
8748 let mut line = String::new();
8749 if stdin.lock().read_line(&mut line).is_err() {
8750 break;
8751 }
8752 let q = line.trim();
8753 if q.is_empty() {
8754 continue;
8755 }
8756 if matches!(q, "exit" | "quit" | "q") {
8757 break;
8758 }
8759 answer(&project, q, false);
8760 }
8761 return;
8762 }
8763
8764 answer(&project, question, json);
8765}
8766
8767fn answer(project: &crate::project::Project, q: &str, json: bool) {
8768 let lower = q.to_lowercase();
8769
8770 if lower.contains("pending")
8772 || lower.contains("inbox")
8773 || lower.contains("queue")
8774 || lower.contains("to review")
8775 {
8776 let pending: Vec<&crate::proposals::StateProposal> = project
8777 .proposals
8778 .iter()
8779 .filter(|p| p.status == "pending_review")
8780 .collect();
8781 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
8782 for p in &pending {
8783 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8784 }
8785 if json {
8786 println!(
8787 "{}",
8788 serde_json::to_string_pretty(&json!({
8789 "answer": "pending",
8790 "total": pending.len(),
8791 "by_kind": by_kind,
8792 }))
8793 .unwrap()
8794 );
8795 } else {
8796 println!(" {} pending proposals.", pending.len());
8797 for (k, n) in &by_kind {
8798 println!(" · {n:>3} {k}");
8799 }
8800 if pending.is_empty() {
8801 println!(" Inbox is clean.");
8802 } else {
8803 println!(" Run `vela inbox <frontier>` to triage.");
8804 }
8805 }
8806 return;
8807 }
8808
8809 if lower.contains("underident")
8811 || lower.contains("audit")
8812 || lower.contains("identif")
8813 || lower.contains("causal")
8814 {
8815 let entries = crate::causal_reasoning::audit_frontier(project);
8816 let summary = crate::causal_reasoning::summarize_audit(&entries);
8817 if json {
8818 println!(
8819 "{}",
8820 serde_json::to_string_pretty(&json!({
8821 "answer": "audit",
8822 "summary": {
8823 "identified": summary.identified,
8824 "conditional": summary.conditional,
8825 "underidentified": summary.underidentified,
8826 "underdetermined": summary.underdetermined,
8827 },
8828 }))
8829 .unwrap()
8830 );
8831 } else {
8832 println!(
8833 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
8834 summary.identified,
8835 summary.conditional,
8836 summary.underidentified,
8837 summary.underdetermined,
8838 );
8839 if summary.underidentified > 0 {
8840 println!(
8841 " The {} underidentified findings are concrete review items:",
8842 summary.underidentified
8843 );
8844 for e in entries
8845 .iter()
8846 .filter(|e| {
8847 matches!(
8848 e.verdict,
8849 crate::causal_reasoning::Identifiability::Underidentified
8850 )
8851 })
8852 .take(8)
8853 {
8854 let txt: String = e.assertion_text.chars().take(70).collect();
8855 println!(" · {} {}", e.finding_id, txt);
8856 }
8857 }
8858 }
8859 return;
8860 }
8861
8862 if lower.contains("recent")
8864 || lower.contains("changed")
8865 || lower.contains("latest")
8866 || lower.contains("happen")
8867 {
8868 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
8869 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8870 events.truncate(8);
8871 if json {
8872 println!(
8873 "{}",
8874 serde_json::to_string_pretty(&json!({
8875 "answer": "recent_events",
8876 "events": events.iter().map(|e| json!({
8877 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
8878 "actor": e.actor.id, "target": e.target.id,
8879 })).collect::<Vec<_>>(),
8880 }))
8881 .unwrap()
8882 );
8883 } else {
8884 println!(" Most recent {} events:", events.len());
8885 for e in &events {
8886 let when = fmt_timestamp(&e.timestamp);
8887 println!(" · {when} {:<28} {}", e.kind, e.target.id);
8888 }
8889 }
8890 return;
8891 }
8892
8893 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
8895 let n = project.findings.len();
8896 let evs = project.events.len();
8897 let peers = project.peers.len();
8898 let actors = project.actors.len();
8899 if json {
8900 println!(
8901 "{}",
8902 serde_json::to_string_pretty(&json!({
8903 "answer": "counts",
8904 "findings": n,
8905 "events": evs,
8906 "peers": peers,
8907 "actors": actors,
8908 "replications": project.replications.len(),
8909 "predictions": project.predictions.len(),
8910 }))
8911 .unwrap()
8912 );
8913 } else {
8914 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
8915 println!(
8916 " {} replications · {} predictions · {} datasets · {} code artifacts.",
8917 project.replications.len(),
8918 project.predictions.len(),
8919 project.datasets.len(),
8920 project.code_artifacts.len(),
8921 );
8922 }
8923 return;
8924 }
8925
8926 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
8928 let records =
8929 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
8930 if json {
8931 println!("{}", serde_json::to_string_pretty(&records).unwrap());
8932 } else if records.is_empty() {
8933 println!(" No predictions yet. The calibration ledger is empty.");
8934 } else {
8935 println!(" Calibration over {} actor(s):", records.len());
8936 for r in &records {
8937 let brier = r
8938 .brier_score
8939 .map(|b| format!("{:.3}", b))
8940 .unwrap_or_else(|| "—".into());
8941 println!(
8942 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
8943 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
8944 );
8945 }
8946 }
8947 return;
8948 }
8949
8950 if lower.contains("peer")
8952 || lower.contains("federat")
8953 || lower.contains("sync")
8954 || lower.contains("conflict")
8955 {
8956 let mut total_conflicts = 0usize;
8957 for e in &project.events {
8958 if e.kind == "frontier.conflict_detected" {
8959 total_conflicts += 1;
8960 }
8961 }
8962 if json {
8963 println!(
8964 "{}",
8965 serde_json::to_string_pretty(&json!({
8966 "answer": "federation",
8967 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
8968 "total_conflicts": total_conflicts,
8969 }))
8970 .unwrap()
8971 );
8972 } else {
8973 println!(" {} peer(s) registered:", project.peers.len());
8974 for p in &project.peers {
8975 println!(" · {:<24} {}", p.id, p.url);
8976 }
8977 println!(" {total_conflicts} conflict events on the canonical log.");
8978 }
8979 return;
8980 }
8981
8982 if json {
8984 println!(
8985 "{}",
8986 serde_json::to_string_pretty(&json!({
8987 "answer": "unknown_question",
8988 "question": q,
8989 "hint": "Try: pending, audit, recent, how many, calibration, peers."
8990 }))
8991 .unwrap()
8992 );
8993 } else {
8994 println!(" Don't know how to route that question yet.");
8995 println!(" Try: pending · audit · recent · how many · calibration · peers");
8996 }
8997}
8998
8999fn frontier_label(p: &crate::project::Project) -> String {
9000 if p.project.name.trim().is_empty() {
9001 "(unnamed)".to_string()
9002 } else {
9003 p.project.name.clone()
9004 }
9005}
9006
9007fn fmt_timestamp(ts: &str) -> String {
9008 chrono::DateTime::parse_from_rfc3339(ts)
9011 .map(|dt| dt.format("%m-%d %H:%M").to_string())
9012 .unwrap_or_else(|_| ts.chars().take(16).collect())
9013}
9014
9015fn cmd_stats(path: &Path) {
9016 let frontier = load_frontier_or_fail(path);
9017 let s = &frontier.stats;
9018 println!();
9019 println!(" {}", "FRONTIER · V0.36.0".dimmed());
9020 println!(" {}", frontier.project.name.bold());
9021 println!(" {}", style::tick_row(60));
9022 println!(" id: {}", frontier.frontier_id());
9023 println!(" compiled: {}", frontier.project.compiled_at);
9024 println!(" papers: {}", frontier.project.papers_processed);
9025 println!(" findings: {}", s.findings);
9026 println!(" links: {}", s.links);
9027 println!(" replicated: {}", s.replicated);
9028 println!(" avg confidence: {}", s.avg_confidence);
9029 println!(" gaps: {}", s.gaps);
9030 println!(" contested: {}", s.contested);
9031 println!(" reviewed: {}", s.human_reviewed);
9032 println!(" proposals: {}", s.proposal_count);
9033 println!(
9034 " recorded proof: {}",
9035 frontier.proof_state.latest_packet.status
9036 );
9037 if frontier.proof_state.latest_packet.status != "never_exported" {
9038 println!(
9039 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
9040 );
9041 }
9042 if !s.categories.is_empty() {
9043 println!();
9044 println!(" {}", "categories".dimmed());
9045 let mut categories = s.categories.iter().collect::<Vec<_>>();
9046 categories.sort_by(|a, b| b.1.cmp(a.1));
9047 for (category, count) in categories {
9048 println!(" {category}: {}", count);
9049 }
9050 }
9051 println!();
9052 println!(" {}", style::tick_row(60));
9053 println!();
9054}
9055
9056fn cmd_proposals(action: ProposalAction) {
9057 match action {
9058 ProposalAction::List {
9059 frontier,
9060 status,
9061 json,
9062 } => {
9063 let frontier_state =
9064 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9065 let proposals_list = proposals::list(&frontier_state, status.as_deref());
9066 let payload = json!({
9067 "ok": true,
9068 "command": "proposals.list",
9069 "frontier": frontier_state.project.name,
9070 "status_filter": status,
9071 "summary": proposals::summary(&frontier_state),
9072 "proposals": proposals_list,
9073 });
9074 if json {
9075 println!(
9076 "{}",
9077 serde_json::to_string_pretty(&payload)
9078 .expect("failed to serialize proposals list")
9079 );
9080 } else {
9081 println!("vela proposals list");
9082 println!(" frontier: {}", frontier_state.project.name);
9083 println!(
9084 " proposals: {}",
9085 payload["proposals"].as_array().map_or(0, Vec::len)
9086 );
9087 }
9088 }
9089 ProposalAction::Show {
9090 frontier,
9091 proposal_id,
9092 json,
9093 } => {
9094 let frontier_state =
9095 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9096 let proposal =
9097 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
9098 let payload = json!({
9099 "ok": true,
9100 "command": "proposals.show",
9101 "frontier": frontier_state.project.name,
9102 "proposal": proposal,
9103 });
9104 if json {
9105 println!(
9106 "{}",
9107 serde_json::to_string_pretty(&payload)
9108 .expect("failed to serialize proposal show")
9109 );
9110 } else {
9111 println!("vela proposals show");
9112 println!(" frontier: {}", frontier_state.project.name);
9113 println!(" proposal: {}", proposal_id);
9114 println!(" kind: {}", proposal.kind);
9115 println!(" status: {}", proposal.status);
9116 }
9117 }
9118 ProposalAction::Preview {
9119 frontier,
9120 proposal_id,
9121 reviewer,
9122 json,
9123 } => {
9124 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
9125 .unwrap_or_else(|e| fail_return(&e));
9126 let payload = json!({
9127 "ok": true,
9128 "command": "proposals.preview",
9129 "frontier": frontier.display().to_string(),
9130 "preview": preview,
9131 });
9132 if json {
9133 println!(
9134 "{}",
9135 serde_json::to_string_pretty(&payload)
9136 .expect("failed to serialize proposal preview")
9137 );
9138 } else {
9139 println!("vela proposals preview");
9140 println!(" proposal: {}", proposal_id);
9141 println!(" kind: {}", preview.kind);
9142 println!(
9143 " findings: {} -> {}",
9144 preview.findings_before, preview.findings_after
9145 );
9146 println!(
9147 " artifacts: {} -> {}",
9148 preview.artifacts_before, preview.artifacts_after
9149 );
9150 println!(
9151 " events: {} -> {}",
9152 preview.events_before, preview.events_after
9153 );
9154 if !preview.changed_findings.is_empty() {
9155 println!(
9156 " findings changed: {}",
9157 preview.changed_findings.join(", ")
9158 );
9159 }
9160 if !preview.changed_artifacts.is_empty() {
9161 println!(
9162 " artifacts changed: {}",
9163 preview.changed_artifacts.join(", ")
9164 );
9165 }
9166 if !preview.event_kinds.is_empty() {
9167 println!(" event kinds: {}", preview.event_kinds.join(", "));
9168 }
9169 println!(" event: {}", preview.applied_event_id);
9170 }
9171 }
9172 ProposalAction::Import {
9173 frontier,
9174 source,
9175 json,
9176 } => {
9177 let report =
9178 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
9179 let payload = json!({
9180 "ok": true,
9181 "command": "proposals.import",
9182 "frontier": frontier.display().to_string(),
9183 "source": source.display().to_string(),
9184 "summary": {
9185 "imported": report.imported,
9186 "applied": report.applied,
9187 "rejected": report.rejected,
9188 "duplicates": report.duplicates,
9189 },
9190 });
9191 if json {
9192 println!(
9193 "{}",
9194 serde_json::to_string_pretty(&payload)
9195 .expect("failed to serialize proposal import")
9196 );
9197 } else {
9198 println!(
9199 "Imported {} proposals into {}",
9200 report.imported, report.wrote_to
9201 );
9202 }
9203 }
9204 ProposalAction::Validate { source, json } => {
9205 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
9206 let payload = json!({
9207 "ok": report.ok,
9208 "command": "proposals.validate",
9209 "source": source.display().to_string(),
9210 "summary": {
9211 "checked": report.checked,
9212 "valid": report.valid,
9213 "invalid": report.invalid,
9214 },
9215 "proposal_ids": report.proposal_ids,
9216 "errors": report.errors,
9217 });
9218 if json {
9219 println!(
9220 "{}",
9221 serde_json::to_string_pretty(&payload)
9222 .expect("failed to serialize proposal validation")
9223 );
9224 } else if report.ok {
9225 println!("{} validated {} proposals", style::ok("ok"), report.valid);
9226 } else {
9227 println!(
9228 "{} validated {} proposals, {} invalid",
9229 style::lost("lost"),
9230 report.valid,
9231 report.invalid
9232 );
9233 for error in &report.errors {
9234 println!(" · {error}");
9235 }
9236 std::process::exit(1);
9237 }
9238 }
9239 ProposalAction::Export {
9240 frontier,
9241 output,
9242 status,
9243 json,
9244 } => {
9245 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
9246 .unwrap_or_else(|e| fail_return(&e));
9247 let payload = json!({
9248 "ok": true,
9249 "command": "proposals.export",
9250 "frontier": frontier.display().to_string(),
9251 "output": output.display().to_string(),
9252 "status": status,
9253 "exported": count,
9254 });
9255 if json {
9256 println!(
9257 "{}",
9258 serde_json::to_string_pretty(&payload)
9259 .expect("failed to serialize proposal export")
9260 );
9261 } else {
9262 println!("sealed · {count} proposals · {}", output.display());
9263 }
9264 }
9265 ProposalAction::Accept {
9266 frontier,
9267 proposal_id,
9268 reviewer,
9269 reason,
9270 json,
9271 } => {
9272 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
9273 .unwrap_or_else(|e| fail_return(&e));
9274 let payload = json!({
9275 "ok": true,
9276 "command": "proposals.accept",
9277 "frontier": frontier.display().to_string(),
9278 "proposal_id": proposal_id,
9279 "reviewer": reviewer,
9280 "applied_event_id": event_id,
9281 });
9282 if json {
9283 println!(
9284 "{}",
9285 serde_json::to_string_pretty(&payload)
9286 .expect("failed to serialize proposal accept")
9287 );
9288 } else {
9289 println!(
9290 "{} accepted and applied proposal {}",
9291 style::ok("ok"),
9292 proposal_id
9293 );
9294 println!(" event: {}", event_id);
9295 }
9296 }
9297 ProposalAction::Reject {
9298 frontier,
9299 proposal_id,
9300 reviewer,
9301 reason,
9302 json,
9303 } => {
9304 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
9305 .unwrap_or_else(|e| fail_return(&e));
9306 let payload = json!({
9307 "ok": true,
9308 "command": "proposals.reject",
9309 "frontier": frontier.display().to_string(),
9310 "proposal_id": proposal_id,
9311 "reviewer": reviewer,
9312 "status": "rejected",
9313 });
9314 if json {
9315 println!(
9316 "{}",
9317 serde_json::to_string_pretty(&payload)
9318 .expect("failed to serialize proposal reject")
9319 );
9320 } else {
9321 println!(
9322 "{} rejected proposal {}",
9323 style::warn("rejected"),
9324 proposal_id
9325 );
9326 }
9327 }
9328 }
9329}
9330
9331fn cmd_artifact_to_state(
9332 frontier: &Path,
9333 packet: &Path,
9334 actor: &str,
9335 apply_artifacts: bool,
9336 json: bool,
9337) {
9338 let report =
9339 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
9340 .unwrap_or_else(|e| fail_return(&e));
9341 if json {
9342 println!(
9343 "{}",
9344 serde_json::to_string_pretty(&report)
9345 .expect("failed to serialize artifact-to-state report")
9346 );
9347 } else {
9348 println!("vela artifact-to-state");
9349 println!(" packet: {}", report.packet_id);
9350 println!(" frontier: {}", report.frontier);
9351 println!(" artifact proposals: {}", report.artifact_proposals);
9352 println!(" finding proposals: {}", report.finding_proposals);
9353 println!(" gap proposals: {}", report.gap_proposals);
9354 println!(
9355 " applied artifact events: {}",
9356 report.applied_artifact_events
9357 );
9358 println!(
9359 " pending truth proposals: {}",
9360 report.pending_truth_proposals
9361 );
9362 }
9363}
9364
9365async fn cmd_bridge_kit(action: BridgeKitAction) {
9366 match action {
9367 BridgeKitAction::Validate { source, json } => {
9368 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
9369 if json {
9370 println!(
9371 "{}",
9372 serde_json::to_string_pretty(&report)
9373 .expect("failed to serialize bridge-kit validation report")
9374 );
9375 } else {
9376 println!("vela bridge-kit validate");
9377 println!(" source: {}", report.source);
9378 println!(" packets: {}", report.packet_count);
9379 println!(" valid: {}", report.valid_packet_count);
9380 println!(" invalid: {}", report.invalid_packet_count);
9381 for packet in &report.packets {
9382 if packet.ok {
9383 println!(
9384 " ok: {} · {} artifacts · {} claims · {} needs",
9385 packet
9386 .packet_id
9387 .as_deref()
9388 .unwrap_or("packet id unavailable"),
9389 packet.artifact_count,
9390 packet.candidate_claim_count,
9391 packet.open_need_count
9392 );
9393 } else {
9394 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
9395 }
9396 }
9397 for error in &report.errors {
9398 println!(" error: {error}");
9399 }
9400 }
9401 if !report.ok {
9402 std::process::exit(1);
9403 }
9404 }
9405 BridgeKitAction::VerifyProvenance { packet, json } => {
9406 let report = verify_packet_provenance(&packet).await;
9407 if json {
9408 println!(
9409 "{}",
9410 serde_json::to_string_pretty(&report)
9411 .expect("failed to serialize provenance verification report")
9412 );
9413 } else {
9414 println!("vela bridge-kit verify-provenance");
9415 println!(" packet: {}", report.packet);
9416 println!(" identifiers: {}", report.identifiers.len());
9417 println!(" resolved: {}", report.resolved_count);
9418 println!(" unresolved: {}", report.unresolved_count);
9419 println!(" skipped: {}", report.skipped_count);
9420 for entry in &report.identifiers {
9421 let status = match entry.status.as_str() {
9422 "resolved" => "ok ",
9423 "unresolved" => "FAIL",
9424 "skipped" => "skip",
9425 _ => "? ",
9426 };
9427 println!(
9428 " {} {} ({})",
9429 status,
9430 entry.identifier,
9431 entry.note.as_deref().unwrap_or(entry.kind.as_str())
9432 );
9433 }
9434 }
9435 if report.unresolved_count > 0 {
9436 std::process::exit(1);
9437 }
9438 }
9439 }
9440}
9441
9442#[derive(Debug, Clone, Serialize)]
9443struct ProvenanceVerificationReport {
9444 command: String,
9445 packet: String,
9446 identifiers: Vec<ProvenanceVerificationEntry>,
9447 resolved_count: usize,
9448 unresolved_count: usize,
9449 skipped_count: usize,
9450}
9451
9452#[derive(Debug, Clone, Serialize)]
9453struct ProvenanceVerificationEntry {
9454 identifier: String,
9455 kind: String,
9456 status: String,
9457 #[serde(skip_serializing_if = "Option::is_none")]
9458 note: Option<String>,
9459}
9460
9461async fn verify_packet_provenance(packet_path: &Path) -> ProvenanceVerificationReport {
9466 use crate::artifact_to_state::ArtifactPacket;
9467 let raw = std::fs::read_to_string(packet_path)
9468 .unwrap_or_else(|e| fail_return(&format!("read packet: {e}")));
9469 let parsed: ArtifactPacket =
9470 serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse packet: {e}")));
9471 let packet = parsed
9472 .validate()
9473 .unwrap_or_else(|e| fail_return(&format!("validate packet: {e}")));
9474
9475 let mut candidates: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
9477 for artifact in &packet.artifacts {
9478 if let Some(ident) = extract_identifier(&artifact.locator) {
9479 candidates.insert(ident);
9480 }
9481 }
9482 for claim in &packet.candidate_claims {
9483 for source_ref in &claim.source_refs {
9484 if let Some(ident) = extract_identifier(source_ref) {
9485 candidates.insert(ident);
9486 }
9487 }
9488 }
9489
9490 let client = reqwest::Client::builder()
9491 .user_agent("vela/0.108 (+https://github.com/vela-science/vela)")
9492 .timeout(std::time::Duration::from_secs(15))
9493 .build()
9494 .unwrap_or_else(|e| fail_return(&format!("build http client: {e}")));
9495
9496 let mut entries: Vec<ProvenanceVerificationEntry> = Vec::new();
9497 let mut resolved = 0usize;
9498 let mut unresolved = 0usize;
9499 let mut skipped = 0usize;
9500 for candidate in &candidates {
9501 let entry = if let Some(doi) = candidate.strip_prefix("doi:") {
9502 verify_doi(&client, doi).await
9503 } else if let Some(pmid) = candidate.strip_prefix("pmid:") {
9504 verify_pmid(&client, pmid).await
9505 } else {
9506 ProvenanceVerificationEntry {
9507 identifier: candidate.clone(),
9508 kind: "unknown".to_string(),
9509 status: "skipped".to_string(),
9510 note: Some("no recognized identifier prefix".to_string()),
9511 }
9512 };
9513 match entry.status.as_str() {
9514 "resolved" => resolved += 1,
9515 "unresolved" => unresolved += 1,
9516 _ => skipped += 1,
9517 }
9518 entries.push(entry);
9519 }
9520
9521 ProvenanceVerificationReport {
9522 command: "bridge-kit.verify-provenance".to_string(),
9523 packet: packet_path.display().to_string(),
9524 identifiers: entries,
9525 resolved_count: resolved,
9526 unresolved_count: unresolved,
9527 skipped_count: skipped,
9528 }
9529}
9530
9531fn extract_identifier(s: &str) -> Option<String> {
9536 let trimmed = s.trim();
9537 if trimmed.is_empty() {
9538 return None;
9539 }
9540 if trimmed.starts_with("doi:") || trimmed.starts_with("pmid:") {
9542 return Some(trimmed.to_string());
9543 }
9544 for prefix in ["https://doi.org/", "http://doi.org/", "https://dx.doi.org/"] {
9546 if let Some(rest) = trimmed.strip_prefix(prefix) {
9547 return Some(format!("doi:{rest}"));
9548 }
9549 }
9550 for prefix in [
9552 "https://pubmed.ncbi.nlm.nih.gov/",
9553 "http://pubmed.ncbi.nlm.nih.gov/",
9554 ] {
9555 if let Some(rest) = trimmed.strip_prefix(prefix) {
9556 let pmid = rest.trim_end_matches('/');
9557 return Some(format!("pmid:{pmid}"));
9558 }
9559 }
9560 if trimmed.starts_with("10.") && trimmed.contains('/') && !trimmed.contains(' ') {
9562 return Some(format!("doi:{trimmed}"));
9563 }
9564 None
9565}
9566
9567async fn verify_doi(client: &reqwest::Client, doi: &str) -> ProvenanceVerificationEntry {
9568 let url = format!("https://api.crossref.org/works/{doi}");
9569 match client.get(&url).send().await {
9570 Ok(resp) if resp.status().is_success() => ProvenanceVerificationEntry {
9571 identifier: format!("doi:{doi}"),
9572 kind: "doi".to_string(),
9573 status: "resolved".to_string(),
9574 note: None,
9575 },
9576 Ok(resp) => ProvenanceVerificationEntry {
9577 identifier: format!("doi:{doi}"),
9578 kind: "doi".to_string(),
9579 status: "unresolved".to_string(),
9580 note: Some(format!("crossref returned {}", resp.status())),
9581 },
9582 Err(e) => ProvenanceVerificationEntry {
9583 identifier: format!("doi:{doi}"),
9584 kind: "doi".to_string(),
9585 status: "skipped".to_string(),
9586 note: Some(format!("crossref unreachable: {e}")),
9587 },
9588 }
9589}
9590
9591async fn verify_pmid(client: &reqwest::Client, pmid: &str) -> ProvenanceVerificationEntry {
9592 let url = format!(
9593 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id={pmid}&retmode=json"
9594 );
9595 match client.get(&url).send().await {
9596 Ok(resp) if resp.status().is_success() => {
9597 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
9600 let result = body.get("result");
9601 let uids = result
9602 .and_then(|r| r.get("uids"))
9603 .and_then(|u| u.as_array());
9604 let resolved = uids.is_some_and(|a| !a.is_empty());
9605 if resolved {
9606 ProvenanceVerificationEntry {
9607 identifier: format!("pmid:{pmid}"),
9608 kind: "pmid".to_string(),
9609 status: "resolved".to_string(),
9610 note: None,
9611 }
9612 } else {
9613 ProvenanceVerificationEntry {
9614 identifier: format!("pmid:{pmid}"),
9615 kind: "pmid".to_string(),
9616 status: "unresolved".to_string(),
9617 note: Some("eutils returned empty uids".to_string()),
9618 }
9619 }
9620 }
9621 Ok(resp) => ProvenanceVerificationEntry {
9622 identifier: format!("pmid:{pmid}"),
9623 kind: "pmid".to_string(),
9624 status: "unresolved".to_string(),
9625 note: Some(format!("eutils returned {}", resp.status())),
9626 },
9627 Err(e) => ProvenanceVerificationEntry {
9628 identifier: format!("pmid:{pmid}"),
9629 kind: "pmid".to_string(),
9630 status: "skipped".to_string(),
9631 note: Some(format!("eutils unreachable: {e}")),
9632 },
9633 }
9634}
9635
9636async fn cmd_source_adapter(action: SourceAdapterAction) {
9637 match action {
9638 SourceAdapterAction::Run {
9639 frontier,
9640 adapter,
9641 actor,
9642 entries,
9643 priority,
9644 include_excluded,
9645 allow_partial,
9646 dry_run,
9647 input_dir,
9648 apply_artifacts,
9649 json,
9650 } => {
9651 let report = crate::source_adapters::run(
9652 &frontier,
9653 crate::source_adapters::SourceAdapterRunOptions {
9654 adapter,
9655 actor,
9656 entries,
9657 priority,
9658 include_excluded,
9659 allow_partial,
9660 dry_run,
9661 input_dir,
9662 apply_artifacts,
9663 },
9664 )
9665 .await
9666 .unwrap_or_else(|e| fail_return(&e));
9667 if json {
9668 println!(
9669 "{}",
9670 serde_json::to_string_pretty(&report)
9671 .expect("failed to serialize source adapter report")
9672 );
9673 } else {
9674 println!("vela source-adapter run");
9675 println!(" adapter: {}", report.adapter);
9676 println!(" run: {}", report.run_id);
9677 println!(" frontier: {}", report.frontier);
9678 println!(" selected entries: {}", report.selected_entries);
9679 println!(" fetched records: {}", report.fetched_records);
9680 println!(" changed records: {}", report.changed_records);
9681 println!(" unchanged records: {}", report.unchanged_records);
9682 println!(" failed records: {}", report.failed_records.len());
9683 if let Some(packet_id) = report.packet_id {
9684 println!(" packet: {packet_id}");
9685 }
9686 println!(" artifact proposals: {}", report.artifact_proposals);
9687 println!(" review note proposals: {}", report.review_note_proposals);
9688 println!(" applied events: {}", report.applied_event_ids.len());
9689 }
9690 }
9691 }
9692}
9693
9694fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
9695 match action {
9696 RuntimeAdapterAction::Run {
9697 frontier,
9698 adapter,
9699 input,
9700 actor,
9701 dry_run,
9702 apply_artifacts,
9703 json,
9704 } => {
9705 let report = crate::runtime_adapters::run(
9706 &frontier,
9707 crate::runtime_adapters::RuntimeAdapterRunOptions {
9708 adapter,
9709 input,
9710 actor,
9711 dry_run,
9712 apply_artifacts,
9713 },
9714 )
9715 .unwrap_or_else(|e| fail_return(&e));
9716 if json {
9717 println!(
9718 "{}",
9719 serde_json::to_string_pretty(&report)
9720 .expect("failed to serialize runtime adapter report")
9721 );
9722 } else {
9723 println!("vela runtime-adapter run");
9724 println!(" adapter: {}", report.adapter);
9725 println!(" run: {}", report.run_id);
9726 println!(" frontier: {}", report.frontier);
9727 if let Some(packet_id) = report.packet_id {
9728 println!(" packet: {packet_id}");
9729 }
9730 println!(" artifact proposals: {}", report.artifact_proposals);
9731 println!(" finding proposals: {}", report.finding_proposals);
9732 println!(" gap proposals: {}", report.gap_proposals);
9733 println!(" review note proposals: {}", report.review_note_proposals);
9734 println!(
9735 " applied artifact events: {}",
9736 report.applied_artifact_events
9737 );
9738 println!(
9739 " pending truth proposals: {}",
9740 report.pending_truth_proposals
9741 );
9742 }
9743 }
9744 }
9745}
9746
9747fn cmd_sign(action: SignAction) {
9748 match action {
9749 SignAction::GenerateKeypair { out, json } => {
9750 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
9751 let payload = json!({
9752 "ok": true,
9753 "command": "sign.generate-keypair",
9754 "output_dir": out.display().to_string(),
9755 "public_key": public_key,
9756 });
9757 if json {
9758 println!(
9759 "{}",
9760 serde_json::to_string_pretty(&payload)
9761 .expect("failed to serialize sign.generate-keypair")
9762 );
9763 } else {
9764 println!("{} keypair · {}", style::ok("generated"), out.display());
9765 println!(" public key: {public_key}");
9766 }
9767 }
9768 SignAction::Apply {
9769 frontier,
9770 private_key,
9771 json,
9772 } => {
9773 let count =
9774 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
9775 let payload = json!({
9776 "ok": true,
9777 "command": "sign.apply",
9778 "frontier": frontier.display().to_string(),
9779 "private_key": private_key.display().to_string(),
9780 "signed": count,
9781 });
9782 if json {
9783 println!(
9784 "{}",
9785 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
9786 );
9787 } else {
9788 println!(
9789 "{} {count} findings in {}",
9790 style::ok("signed"),
9791 frontier.display()
9792 );
9793 }
9794 }
9795 SignAction::Verify {
9796 frontier,
9797 public_key,
9798 json,
9799 } => {
9800 let report = sign::verify_frontier(&frontier, public_key.as_deref())
9801 .unwrap_or_else(|e| fail_return(&e));
9802 if json {
9803 println!(
9804 "{}",
9805 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
9806 );
9807 } else {
9808 println!();
9809 println!(
9810 " {}",
9811 format!("VELA · SIGN · VERIFY · {}", frontier.display())
9812 .to_uppercase()
9813 .dimmed()
9814 );
9815 println!(" {}", style::tick_row(60));
9816 println!(" total findings: {}", report.total_findings);
9817 println!(" signed: {}", report.signed);
9818 println!(" unsigned: {}", report.unsigned);
9819 println!(" valid: {}", report.valid);
9820 println!(" invalid: {}", report.invalid);
9821 if report.findings_with_threshold > 0 {
9822 println!(" with threshold: {}", report.findings_with_threshold);
9823 println!(" jointly accepted: {}", report.jointly_accepted);
9824 }
9825 }
9826 }
9827 SignAction::ThresholdSet {
9828 frontier,
9829 finding_id,
9830 to,
9831 json,
9832 } => {
9833 if to == 0 {
9834 fail("--to must be >= 1");
9835 }
9836 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9837 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
9838 fail(&format!("finding '{finding_id}' not present in frontier"));
9839 };
9840 project.findings[idx].flags.signature_threshold = Some(to);
9841 sign::refresh_jointly_accepted(&mut project);
9845 let met = project.findings[idx].flags.jointly_accepted;
9846 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9847
9848 if json {
9849 println!(
9850 "{}",
9851 serde_json::to_string_pretty(&json!({
9852 "ok": true,
9853 "command": "sign.threshold-set",
9854 "finding_id": finding_id,
9855 "threshold": to,
9856 "jointly_accepted": met,
9857 "frontier": frontier.display().to_string(),
9858 }))
9859 .expect("failed to serialize sign.threshold-set")
9860 );
9861 } else {
9862 println!(
9863 "{} signature_threshold={to} on {finding_id} ({})",
9864 style::ok("set"),
9865 if met {
9866 "jointly accepted"
9867 } else {
9868 "awaiting signatures"
9869 }
9870 );
9871 }
9872 }
9873 }
9874}
9875
9876fn cmd_actor(action: ActorAction) {
9877 match action {
9878 ActorAction::Add {
9879 frontier,
9880 id,
9881 pubkey,
9882 tier,
9883 orcid,
9884 clearance,
9885 json,
9886 } => {
9887 let trimmed = pubkey.trim();
9889 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
9890 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
9891 }
9892 let orcid_normalized = orcid
9894 .as_deref()
9895 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
9896 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
9899 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
9900 });
9901
9902 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9903 if project.actors.iter().any(|actor| actor.id == id) {
9904 fail(&format!(
9905 "Actor '{id}' already registered in this frontier."
9906 ));
9907 }
9908 project.actors.push(sign::ActorRecord {
9909 id: id.clone(),
9910 public_key: trimmed.to_string(),
9911 algorithm: "ed25519".to_string(),
9912 created_at: chrono::Utc::now().to_rfc3339(),
9913 tier: tier.clone(),
9914 orcid: orcid_normalized.clone(),
9915 access_clearance: clearance,
9916 });
9917 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9918 let payload = json!({
9919 "ok": true,
9920 "command": "actor.add",
9921 "frontier": frontier.display().to_string(),
9922 "actor_id": id,
9923 "public_key": trimmed,
9924 "tier": tier,
9925 "orcid": orcid_normalized,
9926 "registered_count": project.actors.len(),
9927 });
9928 if json {
9929 println!(
9930 "{}",
9931 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
9932 );
9933 } else {
9934 let tier_suffix = tier
9935 .as_deref()
9936 .map_or_else(String::new, |t| format!(" tier={t}"));
9937 println!(
9938 "{} actor {} (pubkey {}{tier_suffix})",
9939 style::ok("registered"),
9940 id,
9941 &trimmed[..16]
9942 );
9943 }
9944 }
9945 ActorAction::List { frontier, json } => {
9946 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9947 if json {
9948 let payload = json!({
9949 "ok": true,
9950 "command": "actor.list",
9951 "frontier": frontier.display().to_string(),
9952 "actors": project.actors,
9953 });
9954 println!(
9955 "{}",
9956 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
9957 );
9958 } else {
9959 println!();
9960 println!(
9961 " {}",
9962 format!("VELA · ACTOR · LIST · {}", frontier.display())
9963 .to_uppercase()
9964 .dimmed()
9965 );
9966 println!(" {}", style::tick_row(60));
9967 if project.actors.is_empty() {
9968 println!(" (no actors registered)");
9969 } else {
9970 for actor in &project.actors {
9971 println!(
9972 " {:<28} {}… registered {}",
9973 actor.id,
9974 &actor.public_key[..16],
9975 actor.created_at
9976 );
9977 }
9978 }
9979 }
9980 }
9981 }
9982}
9983
9984fn cmd_causal(action: CausalAction) {
9986 use crate::causal_reasoning;
9987
9988 match action {
9989 CausalAction::Audit {
9990 frontier,
9991 problems_only,
9992 json,
9993 } => {
9994 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9995 let mut entries = causal_reasoning::audit_frontier(&project);
9996 if problems_only {
9997 entries.retain(|e| e.verdict.needs_reviewer_attention());
9998 }
9999 let summary = causal_reasoning::summarize_audit(&entries);
10000
10001 if json {
10002 println!(
10003 "{}",
10004 serde_json::to_string_pretty(&json!({
10005 "ok": true,
10006 "command": "causal.audit",
10007 "frontier": frontier.display().to_string(),
10008 "summary": summary,
10009 "entries": entries,
10010 }))
10011 .expect("serialize causal.audit")
10012 );
10013 return;
10014 }
10015
10016 println!();
10017 println!(
10018 " {}",
10019 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
10020 .to_uppercase()
10021 .dimmed()
10022 );
10023 println!(" {}", style::tick_row(60));
10024 println!(
10025 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
10026 summary.total,
10027 summary.identified,
10028 summary.conditional,
10029 summary.underidentified,
10030 summary.underdetermined,
10031 );
10032 if entries.is_empty() {
10033 println!(" (no entries to report)");
10034 return;
10035 }
10036 for e in &entries {
10037 let chip = match e.verdict {
10038 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
10039 crate::causal_reasoning::Identifiability::Conditional => {
10040 style::warn("conditional")
10041 }
10042 crate::causal_reasoning::Identifiability::Underidentified => {
10043 style::lost("underidentified")
10044 }
10045 crate::causal_reasoning::Identifiability::Underdetermined => {
10046 style::warn("underdetermined")
10047 }
10048 };
10049 let claim = e
10050 .causal_claim
10051 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
10052 let grade = e
10053 .causal_evidence_grade
10054 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
10055 println!();
10056 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
10057 let assertion_short: String = e.assertion_text.chars().take(78).collect();
10058 println!(" {assertion_short}");
10059 println!(" {} {}", style::ok("why:"), e.rationale);
10060 if e.verdict.needs_reviewer_attention()
10061 || matches!(
10062 e.verdict,
10063 crate::causal_reasoning::Identifiability::Underdetermined
10064 )
10065 {
10066 println!(" {} {}", style::ok("fix:"), e.remediation);
10067 }
10068 }
10069 }
10070 CausalAction::Effect {
10071 frontier,
10072 source,
10073 on: target,
10074 json,
10075 } => {
10076 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
10077
10078 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10079 let verdict = identify_effect(&project, &source, &target);
10080
10081 if json {
10082 println!(
10083 "{}",
10084 serde_json::to_string_pretty(&json!({
10085 "ok": true,
10086 "command": "causal.effect",
10087 "frontier": frontier.display().to_string(),
10088 "source": source,
10089 "target": target,
10090 "verdict": verdict,
10091 }))
10092 .expect("serialize causal.effect")
10093 );
10094 return;
10095 }
10096
10097 println!();
10098 println!(
10099 " {}",
10100 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
10101 .to_uppercase()
10102 .dimmed()
10103 );
10104 println!(" {}", style::tick_row(60));
10105 match verdict {
10106 CausalEffectVerdict::Identified {
10107 adjustment_set,
10108 back_door_paths_considered,
10109 } => {
10110 if adjustment_set.is_empty() {
10111 println!(
10112 " {} no back-door adjustment needed",
10113 style::ok("identified")
10114 );
10115 } else {
10116 println!(" {} identified by adjusting on:", style::ok("identified"));
10117 for z in &adjustment_set {
10118 println!(" · {z}");
10119 }
10120 }
10121 println!(
10122 " back-door paths considered: {}",
10123 back_door_paths_considered
10124 );
10125 }
10126 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
10127 println!(
10128 " {} identified via front-door criterion (Pearl 1995 §3.3)",
10129 style::ok("identified")
10130 );
10131 println!(" mediators that intercept all directed paths:");
10132 for m in &mediator_set {
10133 println!(" · {m}");
10134 }
10135 println!(
10136 " applies when source-target confounders are unobserved but the mediator chain is."
10137 );
10138 }
10139 CausalEffectVerdict::NoCausalPath { reason } => {
10140 println!(" {} no causal path: {reason}", style::warn("no_path"));
10141 }
10142 CausalEffectVerdict::Underidentified {
10143 unblocked_back_door_paths,
10144 candidates_tried,
10145 } => {
10146 println!(
10147 " {} no observational adjustment set found ({} candidates tried)",
10148 style::lost("underidentified"),
10149 candidates_tried
10150 );
10151 println!(" open back-door paths:");
10152 for path in unblocked_back_door_paths.iter().take(5) {
10153 println!(" · {}", path.join(" — "));
10154 }
10155 println!(
10156 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
10157 );
10158 }
10159 CausalEffectVerdict::UnknownNode { which } => {
10160 fail(&which);
10161 }
10162 }
10163 println!();
10164 }
10165 CausalAction::Graph {
10166 frontier,
10167 node,
10168 json,
10169 } => {
10170 use crate::causal_graph::CausalGraph;
10171 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10172 let graph = CausalGraph::from_project(&project);
10173
10174 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
10177 if !graph.contains(n) {
10178 fail(&format!("node not in frontier: {n}"));
10179 }
10180 vec![n]
10181 } else {
10182 project.findings.iter().map(|f| f.id.as_str()).collect()
10183 };
10184
10185 if json {
10186 let payload: Vec<_> = nodes
10187 .iter()
10188 .map(|n| {
10189 let parents: Vec<&str> = graph.parents_of(n).collect();
10190 let children: Vec<&str> = graph.children_of(n).collect();
10191 json!({
10192 "node": n,
10193 "parents": parents,
10194 "children": children,
10195 })
10196 })
10197 .collect();
10198 println!(
10199 "{}",
10200 serde_json::to_string_pretty(&json!({
10201 "ok": true,
10202 "command": "causal.graph",
10203 "node_count": graph.node_count(),
10204 "edge_count": graph.edge_count(),
10205 "nodes": payload,
10206 }))
10207 .expect("serialize causal.graph")
10208 );
10209 return;
10210 }
10211
10212 println!();
10213 println!(
10214 " {}",
10215 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
10216 .to_uppercase()
10217 .dimmed()
10218 );
10219 println!(" {}", style::tick_row(60));
10220 println!(
10221 " {} nodes · {} edges",
10222 graph.node_count(),
10223 graph.edge_count()
10224 );
10225 println!();
10226 for n in &nodes {
10227 let parents: Vec<&str> = graph.parents_of(n).collect();
10228 let children: Vec<&str> = graph.children_of(n).collect();
10229 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
10230 continue; }
10232 println!(" {n}");
10233 if !parents.is_empty() {
10234 println!(" parents: {}", parents.join(", "));
10235 }
10236 if !children.is_empty() {
10237 println!(" children: {}", children.join(", "));
10238 }
10239 }
10240 }
10241 CausalAction::Counterfactual {
10242 frontier,
10243 intervene_on,
10244 set_to,
10245 target,
10246 json,
10247 } => {
10248 use crate::counterfactual::{
10249 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
10250 };
10251
10252 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10253 let query = CounterfactualQuery {
10254 intervene_on: intervene_on.clone(),
10255 set_to,
10256 target: target.clone(),
10257 };
10258 let verdict = answer_counterfactual(&project, &query);
10259
10260 if json {
10261 println!(
10262 "{}",
10263 serde_json::to_string_pretty(&json!({
10264 "ok": true,
10265 "command": "causal.counterfactual",
10266 "frontier": frontier.display().to_string(),
10267 "query": query,
10268 "verdict": verdict,
10269 }))
10270 .expect("serialize causal.counterfactual")
10271 );
10272 return;
10273 }
10274
10275 println!();
10276 println!(
10277 " {}",
10278 format!(
10279 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
10280 )
10281 .to_uppercase()
10282 .dimmed()
10283 );
10284 println!(" {}", style::tick_row(72));
10285 match verdict {
10286 CounterfactualVerdict::Resolved {
10287 factual,
10288 counterfactual,
10289 delta,
10290 paths_used,
10291 } => {
10292 println!(
10293 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
10294 style::ok("resolved")
10295 );
10296 println!(
10297 " twin-network propagation through {} causal path(s):",
10298 paths_used.len()
10299 );
10300 for p in paths_used.iter().take(5) {
10301 println!(" · {}", p.join(" → "));
10302 }
10303 println!(
10304 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
10305 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
10306 );
10307 }
10308 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
10309 println!(
10310 " {} causal path exists but {} edge(s) lack a mechanism annotation",
10311 style::warn("mechanism_unspecified"),
10312 unspecified_edges.len()
10313 );
10314 for (parent, child) in unspecified_edges.iter().take(8) {
10315 println!(" · {parent} → {child}");
10316 }
10317 println!(
10318 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
10319 );
10320 }
10321 CounterfactualVerdict::NoCausalPath { factual } => {
10322 println!(
10323 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
10324 style::warn("no_path")
10325 );
10326 }
10327 CounterfactualVerdict::UnknownNode { which } => {
10328 fail(&format!("node not in frontier: {which}"));
10329 }
10330 CounterfactualVerdict::InvalidIntervention { reason } => {
10331 fail(&reason);
10332 }
10333 }
10334 println!();
10335 }
10336 }
10337}
10338
10339fn cmd_bridges(action: BridgesAction) {
10342 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
10343 use std::collections::HashMap;
10344
10345 fn bridges_dir(frontier: &Path) -> PathBuf {
10346 frontier.join(".vela/bridges")
10347 }
10348
10349 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
10350 let path = bridges_dir(frontier).join(format!("{id}.json"));
10351 if !path.is_file() {
10352 return Err(format!("bridge not found: {id}"));
10353 }
10354 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
10355 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
10356 }
10357
10358 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
10359 let dir = bridges_dir(frontier);
10360 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
10361 let path = dir.join(format!("{}.json", b.id));
10362 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
10363 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
10364 }
10365
10366 fn default_reviewer_id() -> String {
10369 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
10370 }
10371
10372 fn emit_bridge_reviewed_event(
10383 frontier: &Path,
10384 bridge_id: &str,
10385 status: &str,
10386 reviewer_id: &str,
10387 note: Option<&str>,
10388 ) -> Result<(), String> {
10389 let mut payload = serde_json::json!({
10390 "bridge_id": bridge_id,
10391 "status": status,
10392 });
10393 if let Some(n) = note
10394 && !n.trim().is_empty()
10395 {
10396 payload["note"] = serde_json::Value::String(n.to_string());
10397 }
10398 let known_ids: Vec<String> = list_bridges(frontier)
10400 .unwrap_or_default()
10401 .into_iter()
10402 .map(|b| b.id)
10403 .collect();
10404 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
10405 let event = crate::events::new_bridge_reviewed_event(
10406 bridge_id,
10407 reviewer_id,
10408 "human",
10409 &format!("Bridge {status} by {reviewer_id}"),
10410 payload,
10411 Vec::new(),
10412 );
10413 let events_dir = frontier.join(".vela/events");
10414 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
10415 let event_path = events_dir.join(format!("{}.json", event.id));
10416 let data =
10417 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
10418 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
10419 }
10420
10421 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
10422 let dir = bridges_dir(frontier);
10423 if !dir.is_dir() {
10424 return Ok(Vec::new());
10425 }
10426 let mut out = Vec::new();
10427 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
10428 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
10429 let path = entry.path();
10430 if path.extension().and_then(|s| s.to_str()) != Some("json") {
10431 continue;
10432 }
10433 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
10434 let b: Bridge =
10435 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
10436 out.push(b);
10437 }
10438 out.sort_by(|a, b| {
10439 b.finding_refs
10440 .len()
10441 .cmp(&a.finding_refs.len())
10442 .then(a.entity_name.cmp(&b.entity_name))
10443 });
10444 Ok(out)
10445 }
10446
10447 match action {
10448 BridgesAction::Derive {
10449 frontier_a,
10450 label_a,
10451 frontier_b,
10452 label_b,
10453 json,
10454 } => {
10455 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
10456 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
10457 let now = chrono::Utc::now().to_rfc3339();
10458 let new_bridges =
10459 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
10460
10461 let existing = list_bridges(&frontier_a).unwrap_or_default();
10465 let existing_by_id: HashMap<String, Bridge> =
10466 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
10467 let mut written = 0;
10468 let mut preserved = 0;
10469 let mut new_ids = Vec::new();
10470 for mut bridge in new_bridges {
10471 if let Some(prev) = existing_by_id.get(&bridge.id)
10472 && prev.status != BridgeStatus::Derived
10473 {
10474 bridge.status = prev.status;
10476 bridge.derived_at = prev.derived_at.clone();
10477 preserved += 1;
10478 }
10479 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
10480 new_ids.push(bridge.id.clone());
10481 written += 1;
10482 }
10483
10484 if json {
10485 println!(
10486 "{}",
10487 serde_json::to_string_pretty(&json!({
10488 "ok": true,
10489 "command": "bridges.derive",
10490 "frontier_a": frontier_a.display().to_string(),
10491 "frontier_b": frontier_b.display().to_string(),
10492 "bridges_written": written,
10493 "reviewer_judgments_preserved": preserved,
10494 "ids": new_ids,
10495 }))
10496 .expect("serialize bridges.derive")
10497 );
10498 return;
10499 }
10500
10501 println!();
10502 println!(
10503 " {}",
10504 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
10505 .to_uppercase()
10506 .dimmed()
10507 );
10508 println!(" {}", style::tick_row(60));
10509 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
10510 if preserved > 0 {
10511 println!(
10512 " {} {} reviewer judgment(s) preserved",
10513 style::ok("kept"),
10514 preserved
10515 );
10516 }
10517 for id in new_ids.iter().take(10) {
10518 println!(" · {id}");
10519 }
10520 if new_ids.len() > 10 {
10521 println!(" … and {} more", new_ids.len() - 10);
10522 }
10523 println!();
10524 }
10525 BridgesAction::List {
10526 frontier,
10527 status,
10528 json,
10529 } => {
10530 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
10531 if let Some(s) = status.as_deref() {
10532 let want = match s.to_lowercase().as_str() {
10533 "derived" => BridgeStatus::Derived,
10534 "confirmed" => BridgeStatus::Confirmed,
10535 "refuted" => BridgeStatus::Refuted,
10536 other => fail_return(&format!(
10537 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
10538 )),
10539 };
10540 bridges.retain(|b| b.status == want);
10541 }
10542 if json {
10543 println!(
10544 "{}",
10545 serde_json::to_string_pretty(&json!({
10546 "ok": true,
10547 "command": "bridges.list",
10548 "frontier": frontier.display().to_string(),
10549 "count": bridges.len(),
10550 "bridges": bridges,
10551 }))
10552 .expect("serialize bridges.list")
10553 );
10554 return;
10555 }
10556 println!();
10557 println!(
10558 " {}",
10559 format!("VELA · BRIDGES · LIST · {}", frontier.display())
10560 .to_uppercase()
10561 .dimmed()
10562 );
10563 println!(" {}", style::tick_row(60));
10564 println!(" {} bridge(s)", bridges.len());
10565 for b in &bridges {
10566 let chip = match b.status {
10567 BridgeStatus::Derived => style::warn("derived"),
10568 BridgeStatus::Confirmed => style::ok("confirmed"),
10569 BridgeStatus::Refuted => style::lost("refuted"),
10570 };
10571 println!();
10572 println!(
10573 " {chip} {} {} ↔ findings:{}",
10574 b.id,
10575 b.entity_name,
10576 b.finding_refs.len()
10577 );
10578 println!(" frontiers: {}", b.frontiers.join(", "));
10579 if let Some(t) = &b.tension {
10580 println!(" tension: {t}");
10581 }
10582 }
10583 println!();
10584 }
10585 BridgesAction::Show {
10586 frontier,
10587 bridge_id,
10588 json,
10589 } => {
10590 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10591 if json {
10592 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10593 return;
10594 }
10595 println!();
10596 println!(
10597 " {}",
10598 format!("VELA · BRIDGES · SHOW · {}", b.id)
10599 .to_uppercase()
10600 .dimmed()
10601 );
10602 println!(" {}", style::tick_row(60));
10603 println!(" entity: {}", b.entity_name);
10604 println!(" status: {:?}", b.status);
10605 println!(" frontiers: {}", b.frontiers.join(", "));
10606 if !b.frontier_ids.is_empty() {
10607 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
10608 }
10609 if let Some(t) = &b.tension {
10610 println!(" tension: {t}");
10611 }
10612 println!(" derived_at: {}", b.derived_at);
10613 println!(" finding refs ({}):", b.finding_refs.len());
10614 for r in &b.finding_refs {
10615 let dir = r.direction.as_deref().unwrap_or("—");
10616 let truncated: String = r.assertion_text.chars().take(72).collect();
10617 println!(
10618 " · [{}] {} (conf={:.2}, dir={})",
10619 r.frontier, r.finding_id, r.confidence, dir
10620 );
10621 println!(" {truncated}");
10622 }
10623 println!();
10624 }
10625 BridgesAction::Confirm {
10626 frontier,
10627 bridge_id,
10628 reviewer,
10629 note,
10630 json,
10631 } => {
10632 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10633 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10634 b.status = BridgeStatus::Confirmed;
10635 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10636 let _ = emit_bridge_reviewed_event(
10640 &frontier,
10641 &bridge_id,
10642 "confirmed",
10643 &reviewer_id,
10644 note.as_deref(),
10645 );
10646 if json {
10647 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10648 return;
10649 }
10650 println!();
10651 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
10652 println!();
10653 }
10654 BridgesAction::Refute {
10655 frontier,
10656 bridge_id,
10657 reviewer,
10658 note,
10659 json,
10660 } => {
10661 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10662 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10663 b.status = BridgeStatus::Refuted;
10664 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10665 let _ = emit_bridge_reviewed_event(
10666 &frontier,
10667 &bridge_id,
10668 "refuted",
10669 &reviewer_id,
10670 note.as_deref(),
10671 );
10672 if json {
10673 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10674 return;
10675 }
10676 println!();
10677 println!(" {} {} now refuted", style::lost("refuted"), b.id);
10678 println!();
10679 }
10680 }
10681}
10682
10683fn cmd_federation(action: FederationAction) {
10685 use crate::federation::PeerHub;
10686
10687 match action {
10688 FederationAction::PeerAdd {
10689 frontier,
10690 id,
10691 url,
10692 pubkey,
10693 note,
10694 json,
10695 } => {
10696 let peer = PeerHub {
10697 id: id.clone(),
10698 url: url.clone(),
10699 public_key: pubkey.trim().to_string(),
10700 added_at: chrono::Utc::now().to_rfc3339(),
10701 note: note.clone(),
10702 };
10703 peer.validate().unwrap_or_else(|e| fail_return(&e));
10704
10705 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10706 if project.peers.iter().any(|p| p.id == id) {
10707 fail(&format!("peer '{id}' already in registry"));
10708 }
10709 project.peers.push(peer.clone());
10710 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10711
10712 if json {
10713 println!(
10714 "{}",
10715 serde_json::to_string_pretty(&json!({
10716 "ok": true,
10717 "command": "federation.peer-add",
10718 "frontier": frontier.display().to_string(),
10719 "peer": peer,
10720 "registered_count": project.peers.len(),
10721 }))
10722 .expect("serialize federation.peer-add")
10723 );
10724 } else {
10725 println!(
10726 "{} peer {} (pubkey {}…) at {}",
10727 style::ok("registered"),
10728 id,
10729 &peer.public_key[..16],
10730 peer.url
10731 );
10732 }
10733 }
10734 FederationAction::PeerList { frontier, json } => {
10735 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10736 if json {
10737 println!(
10738 "{}",
10739 serde_json::to_string_pretty(&json!({
10740 "ok": true,
10741 "command": "federation.peer-list",
10742 "frontier": frontier.display().to_string(),
10743 "peers": project.peers,
10744 }))
10745 .expect("serialize federation.peer-list")
10746 );
10747 } else {
10748 println!();
10749 println!(
10750 " {}",
10751 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
10752 .to_uppercase()
10753 .dimmed()
10754 );
10755 println!(" {}", style::tick_row(60));
10756 if project.peers.is_empty() {
10757 println!(" (no peers registered)");
10758 } else {
10759 for p in &project.peers {
10760 let note_suffix = if p.note.is_empty() {
10761 String::new()
10762 } else {
10763 format!(" · {}", p.note)
10764 };
10765 println!(
10766 " {:<24} {} {}…{note_suffix}",
10767 p.id,
10768 p.url,
10769 &p.public_key[..16]
10770 );
10771 }
10772 }
10773 }
10774 }
10775 FederationAction::Sync {
10776 frontier,
10777 peer_id,
10778 url,
10779 via_hub,
10780 vfr_id,
10781 allow_cross_vfr,
10782 dry_run,
10783 json,
10784 } => {
10785 use crate::federation::{self, DiscoveryResult};
10786
10787 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10788 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
10789 fail(&format!(
10790 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
10791 ));
10792 };
10793 let local_frontier_id = project.frontier_id();
10794
10795 if via_hub
10802 && let Some(target) = vfr_id.as_deref()
10803 && target != local_frontier_id
10804 && !allow_cross_vfr
10805 {
10806 fail(&format!(
10807 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
10808 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
10809 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
10810 ));
10811 }
10812
10813 #[derive(Debug)]
10815 enum SyncOutcome {
10816 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
10820 }
10821
10822 let outcome = if via_hub {
10823 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
10824 match federation::discover_peer_frontier(
10825 &peer.url,
10826 &target_vfr,
10827 Some(&peer.public_key),
10828 ) {
10829 DiscoveryResult::Resolved(p) => {
10830 let src =
10831 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
10832 SyncOutcome::Resolved(p, src)
10833 }
10834 DiscoveryResult::BrokenLocator {
10835 vfr_id,
10836 locator,
10837 status,
10838 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
10839 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
10840 SyncOutcome::UnverifiedEntry(vfr_id, reason)
10841 }
10842 DiscoveryResult::EntryNotFound { vfr_id, status } => {
10843 SyncOutcome::EntryNotFound(vfr_id, status)
10844 }
10845 DiscoveryResult::Unreachable { url, error } => {
10846 fail(&format!("peer hub unreachable ({url}): {error}"));
10847 }
10848 }
10849 } else {
10850 let resolved_url = url.unwrap_or_else(|| {
10851 let base = peer.url.trim_end_matches('/');
10852 format!("{base}/manifest/{local_frontier_id}.json")
10853 });
10854 match federation::fetch_peer_frontier(&resolved_url) {
10855 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
10856 Err(e) => fail(&format!("direct fetch failed: {e}")),
10857 }
10858 };
10859
10860 let peer_source: String;
10863 let peer_state = match outcome {
10864 SyncOutcome::Resolved(p, src) => {
10865 if !json {
10866 println!(" · resolved via {src}");
10867 }
10868 peer_source = src;
10869 p
10870 }
10871 SyncOutcome::BrokenLocator(vfr, locator, status) => {
10872 if dry_run {
10873 if json {
10874 println!(
10875 "{}",
10876 serde_json::to_string_pretty(&json!({
10877 "ok": true,
10878 "command": "federation.sync",
10879 "dry_run": true,
10880 "outcome": "broken_locator",
10881 "vfr_id": vfr,
10882 "locator": locator,
10883 "http_status": status,
10884 }))
10885 .expect("serialize")
10886 );
10887 } else {
10888 println!(
10889 "{} dry-run: peer entry resolved but locator dead",
10890 style::warn("broken_locator")
10891 );
10892 println!(" vfr_id: {vfr}");
10893 println!(" locator: {locator} (HTTP {status})");
10894 }
10895 return;
10896 }
10897 let report = federation::record_locator_failure(
10898 &mut project,
10899 &peer_id,
10900 &vfr,
10901 &locator,
10902 status,
10903 );
10904 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10905 if json {
10906 println!(
10907 "{}",
10908 serde_json::to_string_pretty(&json!({
10909 "ok": true,
10910 "command": "federation.sync",
10911 "outcome": "broken_locator",
10912 "report": report,
10913 }))
10914 .expect("serialize")
10915 );
10916 } else {
10917 println!(
10918 "{} sync recorded broken-locator conflict against {peer_id}",
10919 style::warn("broken_locator")
10920 );
10921 println!(" vfr_id: {vfr}");
10922 println!(" locator: {locator} (HTTP {status})");
10923 println!(" events appended: {}", report.events_appended);
10924 }
10925 return;
10926 }
10927 SyncOutcome::UnverifiedEntry(vfr, reason) => {
10928 if dry_run {
10929 if json {
10930 println!(
10931 "{}",
10932 serde_json::to_string_pretty(&json!({
10933 "ok": true,
10934 "command": "federation.sync",
10935 "dry_run": true,
10936 "outcome": "unverified_peer_entry",
10937 "vfr_id": vfr,
10938 "reason": reason,
10939 }))
10940 .expect("serialize")
10941 );
10942 } else {
10943 println!(
10944 "{} dry-run: peer entry signature did not verify",
10945 style::lost("unverified_peer_entry")
10946 );
10947 println!(" vfr_id: {vfr}");
10948 println!(" reason: {reason}");
10949 }
10950 return;
10951 }
10952 let report =
10953 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
10954 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10955 if json {
10956 println!(
10957 "{}",
10958 serde_json::to_string_pretty(&json!({
10959 "ok": true,
10960 "command": "federation.sync",
10961 "outcome": "unverified_peer_entry",
10962 "report": report,
10963 }))
10964 .expect("serialize")
10965 );
10966 } else {
10967 println!(
10968 "{} sync halted; peer's registry entry signature did not verify",
10969 style::lost("unverified_peer_entry")
10970 );
10971 println!(" vfr_id: {vfr}");
10972 println!(" reason: {reason}");
10973 }
10974 return;
10975 }
10976 SyncOutcome::EntryNotFound(vfr, status) => {
10977 if json {
10978 println!(
10979 "{}",
10980 serde_json::to_string_pretty(&json!({
10981 "ok": false,
10982 "command": "federation.sync",
10983 "outcome": "entry_not_found",
10984 "vfr_id": vfr,
10985 "http_status": status,
10986 }))
10987 .expect("serialize")
10988 );
10989 } else {
10990 println!(
10991 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
10992 style::warn("entry_not_found")
10993 );
10994 }
10995 return;
10996 }
10997 };
10998
10999 if dry_run {
11000 let conflicts = federation::diff_frontiers(&project, &peer_state);
11001 if json {
11002 println!(
11003 "{}",
11004 serde_json::to_string_pretty(&json!({
11005 "ok": true,
11006 "command": "federation.sync",
11007 "dry_run": true,
11008 "peer_id": peer_id,
11009 "peer_source": peer_source,
11010 "conflicts": conflicts,
11011 }))
11012 .expect("serialize federation.sync (dry-run)")
11013 );
11014 } else {
11015 println!(
11016 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
11017 style::ok("ok"),
11018 peer_source,
11019 conflicts.len()
11020 );
11021 for c in &conflicts {
11022 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
11023 }
11024 }
11025 return;
11026 }
11027
11028 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
11029 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11030
11031 if json {
11032 println!(
11033 "{}",
11034 serde_json::to_string_pretty(&json!({
11035 "ok": true,
11036 "command": "federation.sync",
11037 "peer_id": peer_id,
11038 "peer_source": peer_source,
11039 "report": report,
11040 }))
11041 .expect("serialize federation.sync")
11042 );
11043 } else {
11044 println!(
11045 "{} synced with {} ({})",
11046 style::ok("ok"),
11047 peer_id,
11048 peer_source
11049 );
11050 println!(
11051 " our: {}",
11052 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
11053 );
11054 println!(
11055 " peer: {}",
11056 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
11057 );
11058 println!(
11059 " conflicts: {} events appended: {}",
11060 report.conflicts.len(),
11061 report.events_appended
11062 );
11063 for c in &report.conflicts {
11064 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
11065 }
11066 }
11067 }
11068 FederationAction::PushResolution {
11069 frontier,
11070 conflict_event_id,
11071 to,
11072 key,
11073 vfr_id,
11074 json,
11075 } => {
11076 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
11077 }
11078 FederationAction::PeerRemove { frontier, id, json } => {
11079 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11080 let before = project.peers.len();
11081 project.peers.retain(|p| p.id != id);
11082 if project.peers.len() == before {
11083 fail(&format!("peer '{id}' not found in registry"));
11084 }
11085 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11086
11087 if json {
11088 println!(
11089 "{}",
11090 serde_json::to_string_pretty(&json!({
11091 "ok": true,
11092 "command": "federation.peer-remove",
11093 "frontier": frontier.display().to_string(),
11094 "removed": id,
11095 "remaining": project.peers.len(),
11096 }))
11097 .expect("serialize federation.peer-remove")
11098 );
11099 } else {
11100 println!(
11101 "{} peer {} ({} remaining)",
11102 style::ok("removed"),
11103 id,
11104 project.peers.len()
11105 );
11106 }
11107 }
11108 }
11109}
11110
11111fn cmd_federation_push_resolution(
11123 frontier: PathBuf,
11124 conflict_event_id: String,
11125 to: String,
11126 key: Option<PathBuf>,
11127 vfr_id: Option<String>,
11128 json: bool,
11129) {
11130 use crate::canonical;
11131 use crate::sign;
11132
11133 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11134
11135 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
11136 fail(&format!(
11137 "peer '{to}' not in registry; run `vela federation peer-add` first"
11138 ));
11139 };
11140
11141 let Some(resolution) = project
11143 .events
11144 .iter()
11145 .find(|e| {
11146 e.kind == "frontier.conflict_resolved"
11147 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
11148 == Some(conflict_event_id.as_str())
11149 })
11150 .cloned()
11151 else {
11152 fail(&format!(
11153 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
11154 frontier.display()
11155 ));
11156 };
11157
11158 let actor_id = resolution.actor.id.clone();
11161 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
11162 fail(&format!(
11163 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
11164 register the reviewer with `vela actor add` before pushing"
11165 ));
11166 };
11167
11168 let key_path = key.unwrap_or_else(|| {
11171 let home = std::env::var("HOME").unwrap_or_default();
11172 let base = PathBuf::from(home)
11173 .join(".config")
11174 .join("vela")
11175 .join("keys");
11176 let safe_id = actor.id.replace([':', '/'], "_");
11177 let by_actor = base.join(format!("{safe_id}.key"));
11178 if by_actor.exists() {
11179 by_actor
11180 } else {
11181 base.join("private.key")
11182 }
11183 });
11184
11185 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
11186 fail_return(&format!(
11187 "load private key from {}: {e}",
11188 key_path.display()
11189 ))
11190 });
11191 let pubkey_hex = sign::pubkey_hex(&signing_key);
11192 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
11193 fail(&format!(
11194 "private key at {} does not match actor {}'s registered public key. \
11195 Loaded pubkey {}, expected {}.",
11196 key_path.display(),
11197 actor.id,
11198 &pubkey_hex[..16],
11199 &actor.public_key[..16]
11200 ));
11201 }
11202
11203 let signature_hex = sign::sign_event(&resolution, &signing_key)
11206 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
11207
11208 let mut body = resolution.clone();
11213 body.signature = None;
11214 let body_value =
11215 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
11216 let _canonical_check = canonical::to_canonical_bytes(&body_value)
11217 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
11218
11219 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
11220 let url = format!(
11221 "{}/entries/{}/events",
11222 peer.url.trim_end_matches('/'),
11223 target_vfr
11224 );
11225
11226 let url_owned = url.clone();
11228 let pubkey_owned = pubkey_hex.clone();
11229 let signature_owned = signature_hex.clone();
11230 let body_owned = body_value.clone();
11231 let response: Result<(u16, String), String> = std::thread::spawn(move || {
11232 let client = reqwest::blocking::Client::new();
11233 let resp = client
11234 .post(&url_owned)
11235 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
11236 .header("X-Vela-Signature", &signature_owned)
11237 .json(&body_owned)
11238 .send()
11239 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
11240 let status = resp.status().as_u16();
11241 let text = resp.text().unwrap_or_default();
11242 Ok((status, text))
11243 })
11244 .join()
11245 .map_err(|_| "push thread panicked".to_string())
11246 .unwrap_or_else(|e| fail_return(&e));
11247
11248 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
11249 let parsed: serde_json::Value =
11250 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
11251
11252 let accepted = matches!(status, 200..=202);
11253 if json {
11254 println!(
11255 "{}",
11256 serde_json::to_string_pretty(&json!({
11257 "ok": accepted,
11258 "command": "federation.push-resolution",
11259 "frontier": frontier.display().to_string(),
11260 "peer_id": to,
11261 "url": url,
11262 "conflict_event_id": conflict_event_id,
11263 "event_id": resolution.id,
11264 "actor_id": actor.id,
11265 "http_status": status,
11266 "response": parsed,
11267 }))
11268 .expect("serialize federation.push-resolution")
11269 );
11270 } else if accepted {
11271 println!(
11272 "{} resolution {} pushed to {} (HTTP {})",
11273 style::ok("ok"),
11274 &resolution.id[..16.min(resolution.id.len())],
11275 to,
11276 status
11277 );
11278 println!(" url: {url}");
11279 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
11280 } else {
11281 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
11282 println!(" url: {url}");
11283 println!(" response: {text}");
11284 std::process::exit(1);
11285 }
11286}
11287
11288fn cmd_queue(action: QueueAction) {
11293 use crate::queue;
11294 match action {
11295 QueueAction::List { queue_file, json } => {
11296 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11297 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11298 if json {
11299 let payload = json!({
11300 "ok": true,
11301 "command": "queue.list",
11302 "queue_file": path.display().to_string(),
11303 "schema": q.schema,
11304 "actions": q.actions,
11305 });
11306 println!(
11307 "{}",
11308 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
11309 );
11310 } else {
11311 println!();
11312 println!(
11313 " {}",
11314 format!("VELA · QUEUE · LIST · {}", path.display())
11315 .to_uppercase()
11316 .dimmed()
11317 );
11318 println!(" {}", style::tick_row(60));
11319 if q.actions.is_empty() {
11320 println!(" (queue is empty)");
11321 } else {
11322 for (idx, action) in q.actions.iter().enumerate() {
11323 println!(
11324 " [{idx}] {} → {} queued {}",
11325 action.kind,
11326 action.frontier.display(),
11327 action.queued_at
11328 );
11329 }
11330 }
11331 }
11332 }
11333 QueueAction::Clear { queue_file, json } => {
11334 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11335 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
11336 if json {
11337 let payload = json!({
11338 "ok": true,
11339 "command": "queue.clear",
11340 "queue_file": path.display().to_string(),
11341 "dropped": dropped,
11342 });
11343 println!(
11344 "{}",
11345 serde_json::to_string_pretty(&payload)
11346 .expect("failed to serialize queue.clear")
11347 );
11348 } else {
11349 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
11350 }
11351 }
11352 QueueAction::Sign {
11353 actor,
11354 key,
11355 queue_file,
11356 yes_to_all,
11357 json,
11358 } => {
11359 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11360 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11361 if q.actions.is_empty() {
11362 if json {
11363 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
11364 } else {
11365 println!("{} queue is empty", style::ok("ok"));
11366 }
11367 return;
11368 }
11369 let key_hex = std::fs::read_to_string(&key)
11370 .map(|s| s.trim().to_string())
11371 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
11372 let signing_key = parse_signing_key(&key_hex);
11373 let mut signed_count = 0usize;
11374 let mut remaining = Vec::new();
11375 for action in q.actions.iter() {
11376 if !yes_to_all && !confirm_action(action) {
11377 remaining.push(action.clone());
11378 continue;
11379 }
11380 match sign_and_apply(&signing_key, &actor, action) {
11381 Ok(report) => {
11382 signed_count += 1;
11383 if !json {
11384 println!(
11385 "{} {} on {} → {}",
11386 style::ok("signed"),
11387 action.kind,
11388 action.frontier.display(),
11389 report
11390 );
11391 }
11392 }
11393 Err(error) => {
11394 remaining.push(action.clone());
11396 if !json {
11397 eprintln!(
11398 "{} {} on {}: {error}",
11399 style::warn("failed"),
11400 action.kind,
11401 action.frontier.display()
11402 );
11403 }
11404 }
11405 }
11406 }
11407 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
11408 if json {
11409 let payload = json!({
11410 "ok": true,
11411 "command": "queue.sign",
11412 "signed": signed_count,
11413 "remaining": remaining.len(),
11414 });
11415 println!(
11416 "{}",
11417 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
11418 );
11419 } else {
11420 println!(
11421 "{} signed {signed_count} action(s); {} remaining in queue",
11422 style::ok("ok"),
11423 remaining.len()
11424 );
11425 }
11426 }
11427 }
11428}
11429
11430fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
11431 let bytes = hex::decode(hex_str)
11432 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
11433 let key_bytes: [u8; 32] = bytes
11434 .try_into()
11435 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
11436 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
11437}
11438
11439fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
11440 use std::io::{self, BufRead, Write};
11441 let mut stdout = io::stdout().lock();
11442 let _ = writeln!(
11443 stdout,
11444 " sign {} on {}? [y/N] ",
11445 action.kind,
11446 action.frontier.display()
11447 );
11448 let _ = stdout.flush();
11449 drop(stdout);
11450 let stdin = io::stdin();
11451 let mut line = String::new();
11452 if stdin.lock().read_line(&mut line).is_err() {
11453 return false;
11454 }
11455 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
11456}
11457
11458fn sign_and_apply(
11463 signing_key: &ed25519_dalek::SigningKey,
11464 actor: &str,
11465 action: &crate::queue::QueuedAction,
11466) -> Result<String, String> {
11467 use crate::events::StateTarget;
11468 use crate::proposals;
11469 let args = &action.args;
11470 match action.kind.as_str() {
11471 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
11472 let kind = match action.kind.as_str() {
11473 "propose_review" => "finding.review",
11474 "propose_note" => "finding.note",
11475 "propose_revise_confidence" => "finding.confidence_revise",
11476 "propose_retract" => "finding.retract",
11477 _ => unreachable!(),
11478 };
11479 let target_id = args
11480 .get("target_finding_id")
11481 .and_then(Value::as_str)
11482 .ok_or("target_finding_id missing")?;
11483 let reason = args
11484 .get("reason")
11485 .and_then(Value::as_str)
11486 .ok_or("reason missing")?;
11487 let payload = match action.kind.as_str() {
11488 "propose_review" => {
11489 let status = args
11490 .get("status")
11491 .and_then(Value::as_str)
11492 .ok_or("status missing")?;
11493 json!({"status": status})
11494 }
11495 "propose_note" => {
11496 let text = args
11497 .get("text")
11498 .and_then(Value::as_str)
11499 .ok_or("text missing")?;
11500 json!({"text": text})
11501 }
11502 "propose_revise_confidence" => {
11503 let new_score = args
11504 .get("new_score")
11505 .and_then(Value::as_f64)
11506 .ok_or("new_score missing")?;
11507 json!({"new_score": new_score})
11508 }
11509 "propose_retract" => json!({}),
11510 _ => unreachable!(),
11511 };
11512 let created_at = args
11513 .get("created_at")
11514 .and_then(Value::as_str)
11515 .map(String::from)
11516 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11517 let mut proposal = proposals::new_proposal(
11518 kind,
11519 StateTarget {
11520 r#type: "finding".to_string(),
11521 id: target_id.to_string(),
11522 },
11523 actor,
11524 "human",
11525 reason,
11526 payload,
11527 Vec::new(),
11528 Vec::new(),
11529 );
11530 proposal.created_at = created_at;
11531 proposal.id = proposals::proposal_id(&proposal);
11532 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
11536 let result = proposals::create_or_apply(&action.frontier, proposal, false)
11537 .map_err(|e| format!("create_or_apply: {e}"))?;
11538 Ok(format!("proposal {}", result.proposal_id))
11539 }
11540 "accept_proposal" | "reject_proposal" => {
11541 let proposal_id = args
11542 .get("proposal_id")
11543 .and_then(Value::as_str)
11544 .ok_or("proposal_id missing")?;
11545 let reason = args
11546 .get("reason")
11547 .and_then(Value::as_str)
11548 .ok_or("reason missing")?;
11549 let timestamp = args
11550 .get("timestamp")
11551 .and_then(Value::as_str)
11552 .map(String::from)
11553 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11554 let preimage = json!({
11556 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
11557 "proposal_id": proposal_id,
11558 "reviewer_id": actor,
11559 "reason": reason,
11560 "timestamp": timestamp,
11561 });
11562 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
11563 use ed25519_dalek::Signer;
11564 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
11565 if action.kind == "accept_proposal" {
11566 let event_id =
11567 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
11568 .map_err(|e| format!("accept_at_path: {e}"))?;
11569 Ok(format!("event {event_id}"))
11570 } else {
11571 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
11572 .map_err(|e| format!("reject_at_path: {e}"))?;
11573 Ok(format!("rejected {proposal_id}"))
11574 }
11575 }
11576 other => Err(format!("unsupported queued action kind '{other}'")),
11577 }
11578}
11579
11580fn cmd_entity(action: EntityAction) {
11592 use crate::entity_resolve;
11593 match action {
11594 EntityAction::Resolve {
11595 frontier,
11596 force,
11597 json,
11598 } => {
11599 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11600 let report = entity_resolve::resolve_frontier(&mut p, force);
11601 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11602 if json {
11603 println!(
11604 "{}",
11605 serde_json::to_string_pretty(&serde_json::json!({
11606 "ok": true,
11607 "command": "entity.resolve",
11608 "frontier_path": frontier.display().to_string(),
11609 "report": report,
11610 }))
11611 .expect("serialize")
11612 );
11613 } else {
11614 println!(
11615 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
11616 style::ok("entity"),
11617 report.resolved,
11618 report.total_entities,
11619 report.already_resolved,
11620 report.unresolved_count,
11621 report.findings_touched,
11622 );
11623 let unresolved_summary: std::collections::BTreeSet<&str> = report
11624 .per_finding
11625 .iter()
11626 .flat_map(|f| f.unresolved.iter().map(String::as_str))
11627 .collect();
11628 if !unresolved_summary.is_empty() {
11629 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
11630 println!(
11631 " unresolved (first {}): {}",
11632 take.len(),
11633 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
11634 );
11635 }
11636 }
11637 }
11638 EntityAction::List { json } => {
11639 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
11640 .map(|(name, etype, source, id)| {
11641 serde_json::json!({
11642 "canonical_name": name,
11643 "entity_type": etype,
11644 "source": source,
11645 "id": id,
11646 })
11647 })
11648 .collect();
11649 if json {
11650 println!(
11651 "{}",
11652 serde_json::to_string_pretty(&serde_json::json!({
11653 "ok": true,
11654 "command": "entity.list",
11655 "count": entries.len(),
11656 "entries": entries,
11657 }))
11658 .expect("serialize")
11659 );
11660 } else {
11661 println!("{} {} bundled entries", style::ok("entity"), entries.len());
11662 for e in &entries {
11663 println!(
11664 " {:32} {:18} {} {}",
11665 e["canonical_name"].as_str().unwrap_or("?"),
11666 e["entity_type"].as_str().unwrap_or("?"),
11667 e["source"].as_str().unwrap_or("?"),
11668 e["id"].as_str().unwrap_or("?"),
11669 );
11670 }
11671 }
11672 }
11673 }
11674}
11675
11676fn cmd_link(action: LinkAction) {
11677 use crate::bundle::{Link, LinkRef};
11678 match action {
11679 LinkAction::Add {
11680 frontier,
11681 from,
11682 to,
11683 r#type,
11684 note,
11685 inferred_by,
11686 no_check_target,
11687 json,
11688 } => {
11689 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
11690 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
11691 fail(&format!(
11692 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
11693 ));
11694 }
11695 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
11696 fail(&format!(
11697 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
11698 ))
11699 });
11700 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11701 let source_idx = p
11702 .findings
11703 .iter()
11704 .position(|f| f.id == from)
11705 .unwrap_or_else(|| {
11706 fail_return(&format!("--from finding '{from}' not in frontier"))
11707 });
11708 if let LinkRef::Local { vf_id } = &parsed
11709 && !p.findings.iter().any(|f| &f.id == vf_id)
11710 {
11711 fail(&format!(
11712 "local --to target '{vf_id}' not in frontier; add the target finding first"
11713 ));
11714 }
11715 if let LinkRef::Cross { vfr_id, .. } = &parsed
11716 && p.dep_for_vfr(vfr_id).is_none()
11717 {
11718 fail(&format!(
11719 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
11720 ));
11721 }
11722
11723 let mut target_warning: Option<String> = None;
11729 if let LinkRef::Cross {
11730 vfr_id: target_vfr,
11731 vf_id: target_vf,
11732 } = &parsed
11733 && !no_check_target
11734 && let Some(dep) = p.dep_for_vfr(target_vfr)
11735 && let Some(locator) = dep.locator.as_deref()
11736 && (locator.starts_with("http://") || locator.starts_with("https://"))
11737 {
11738 let client = reqwest::blocking::Client::builder()
11739 .timeout(std::time::Duration::from_secs(15))
11740 .build()
11741 .ok();
11742 if let Some(client) = client
11743 && let Ok(resp) = client.get(locator).send()
11744 && resp.status().is_success()
11745 && let Ok(dep_project) = resp.json::<crate::project::Project>()
11746 {
11747 if let Some(target_finding) =
11748 dep_project.findings.iter().find(|f| &f.id == target_vf)
11749 {
11750 if target_finding.flags.superseded {
11751 target_warning = Some(format!(
11752 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
11753You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
11754Use --no-check-target to skip this check."
11755 ));
11756 }
11757 } else {
11758 target_warning = Some(format!(
11759 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
11760The target may have been removed or never existed in the pinned snapshot."
11761 ));
11762 }
11763 }
11764 }
11765
11766 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11767 let link = Link {
11768 target: to.clone(),
11769 link_type: r#type.clone(),
11770 note: note.clone(),
11771 inferred_by: inferred_by.clone(),
11772 created_at: now,
11773 mechanism: None,
11774 };
11775 p.findings[source_idx].links.push(link);
11776 project::recompute_stats(&mut p);
11777 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11778 let payload = json!({
11779 "ok": true,
11780 "command": "link.add",
11781 "frontier": frontier.display().to_string(),
11782 "from": from,
11783 "to": to,
11784 "type": r#type,
11785 "cross_frontier": parsed.is_cross_frontier(),
11786 });
11787 if json {
11788 let mut p2 = payload.clone();
11789 if let Some(w) = &target_warning
11790 && let serde_json::Value::Object(m) = &mut p2
11791 {
11792 m.insert(
11793 "target_warning".to_string(),
11794 serde_json::Value::String(w.clone()),
11795 );
11796 }
11797 println!(
11798 "{}",
11799 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
11800 );
11801 } else {
11802 println!(
11803 "{} {} --[{}]--> {}{}",
11804 style::ok("link"),
11805 from,
11806 r#type,
11807 to,
11808 if parsed.is_cross_frontier() {
11809 " (cross-frontier)"
11810 } else {
11811 ""
11812 }
11813 );
11814 if let Some(w) = target_warning {
11815 println!(" {w}");
11816 }
11817 }
11818 }
11819 }
11820}
11821
11822fn cmd_frontier(action: FrontierAction) {
11823 use crate::project::ProjectDependency;
11824 use crate::repo;
11825 match action {
11826 FrontierAction::New {
11827 path,
11828 name,
11829 description,
11830 force,
11831 json,
11832 } => {
11833 if path.exists() && !force {
11834 fail(&format!(
11835 "{} already exists; pass --force to overwrite",
11836 path.display()
11837 ));
11838 }
11839 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11840 let project = project::Project {
11841 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
11842 schema: project::VELA_SCHEMA_URL.to_string(),
11843 frontier_id: None,
11844 project: project::ProjectMeta {
11845 name: name.clone(),
11846 description: description.clone(),
11847 compiled_at: now,
11848 compiler: project::VELA_COMPILER_VERSION.to_string(),
11849 papers_processed: 0,
11850 errors: 0,
11851 dependencies: Vec::new(),
11852 },
11853 stats: project::ProjectStats::default(),
11854 findings: Vec::new(),
11855 sources: Vec::new(),
11856 evidence_atoms: Vec::new(),
11857 condition_records: Vec::new(),
11858 review_events: Vec::new(),
11859 confidence_updates: Vec::new(),
11860 events: Vec::new(),
11861 proposals: Vec::new(),
11862 proof_state: proposals::ProofState::default(),
11863 signatures: Vec::new(),
11864 actors: Vec::new(),
11865 replications: Vec::new(),
11866 datasets: Vec::new(),
11867 code_artifacts: Vec::new(),
11868 artifacts: Vec::new(),
11869 predictions: Vec::new(),
11870 resolutions: Vec::new(),
11871 peers: Vec::new(),
11872 negative_results: Vec::new(),
11873 trajectories: Vec::new(),
11874 };
11875 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
11876 let payload = json!({
11877 "ok": true,
11878 "command": "frontier.new",
11879 "path": path.display().to_string(),
11880 "name": name,
11881 "schema": project::VELA_SCHEMA_URL,
11882 "vela_version": env!("CARGO_PKG_VERSION"),
11883 "next_steps": [
11884 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
11885 "vela sign generate-keypair --out keys",
11886 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
11887 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11888 ],
11889 });
11890 if json {
11891 println!(
11892 "{}",
11893 serde_json::to_string_pretty(&payload)
11894 .expect("failed to serialize frontier.new")
11895 );
11896 } else {
11897 println!(
11898 "{} scaffolded frontier '{name}' at {}",
11899 style::ok("frontier"),
11900 path.display()
11901 );
11902 println!(" next steps:");
11903 println!(
11904 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
11905 path.display()
11906 );
11907 println!(" 2. vela sign generate-keypair --out keys");
11908 println!(
11909 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
11910 path.display()
11911 );
11912 println!(
11913 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11914 path.display()
11915 );
11916 }
11917 }
11918 FrontierAction::Materialize { frontier, json } => {
11919 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
11920 if json {
11921 println!(
11922 "{}",
11923 serde_json::to_string_pretty(&payload)
11924 .expect("failed to serialize frontier materialize")
11925 );
11926 } else {
11927 println!(
11928 "{} materialized frontier repo at {}",
11929 style::ok("frontier"),
11930 frontier.display()
11931 );
11932 }
11933 }
11934 FrontierAction::AddDep {
11935 frontier,
11936 vfr_id,
11937 locator,
11938 snapshot,
11939 name,
11940 json,
11941 } => {
11942 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11943 if p.project
11944 .dependencies
11945 .iter()
11946 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
11947 {
11948 fail(&format!(
11949 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
11950 ));
11951 }
11952 let dep = ProjectDependency {
11953 name: name.unwrap_or_else(|| vfr_id.clone()),
11954 source: "vela.hub".into(),
11955 version: None,
11956 pinned_hash: None,
11957 vfr_id: Some(vfr_id.clone()),
11958 locator: Some(locator.clone()),
11959 pinned_snapshot_hash: Some(snapshot.clone()),
11960 };
11961 p.project.dependencies.push(dep);
11962 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11963 let payload = json!({
11964 "ok": true,
11965 "command": "frontier.add-dep",
11966 "frontier": frontier.display().to_string(),
11967 "vfr_id": vfr_id,
11968 "locator": locator,
11969 "pinned_snapshot_hash": snapshot,
11970 "declared_count": p.project.dependencies.len(),
11971 });
11972 if json {
11973 println!(
11974 "{}",
11975 serde_json::to_string_pretty(&payload)
11976 .expect("failed to serialize frontier.add-dep")
11977 );
11978 } else {
11979 println!(
11980 "{} declared cross-frontier dep {vfr_id}",
11981 style::ok("frontier")
11982 );
11983 println!(" locator: {locator}");
11984 println!(" snapshot: {snapshot}");
11985 }
11986 }
11987 FrontierAction::ListDeps { frontier, json } => {
11988 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11989 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
11990 if json {
11991 let payload = json!({
11992 "ok": true,
11993 "command": "frontier.list-deps",
11994 "frontier": frontier.display().to_string(),
11995 "count": deps.len(),
11996 "dependencies": deps,
11997 });
11998 println!(
11999 "{}",
12000 serde_json::to_string_pretty(&payload)
12001 .expect("failed to serialize frontier.list-deps")
12002 );
12003 } else {
12004 println!();
12005 println!(
12006 " {}",
12007 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
12008 .to_uppercase()
12009 .dimmed()
12010 );
12011 println!(" {}", style::tick_row(60));
12012 if deps.is_empty() {
12013 println!(" (no dependencies declared)");
12014 } else {
12015 for d in &deps {
12016 let kind = if d.is_cross_frontier() {
12017 "cross-frontier"
12018 } else {
12019 "compile-time"
12020 };
12021 println!(" · {} [{kind}]", d.name);
12022 if let Some(v) = &d.vfr_id {
12023 println!(" vfr_id: {v}");
12024 }
12025 if let Some(l) = &d.locator {
12026 println!(" locator: {l}");
12027 }
12028 if let Some(s) = &d.pinned_snapshot_hash {
12029 println!(" snapshot: {s}");
12030 }
12031 }
12032 }
12033 }
12034 }
12035 FrontierAction::RemoveDep {
12036 frontier,
12037 vfr_id,
12038 json,
12039 } => {
12040 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12041 for f in &p.findings {
12043 for l in &f.links {
12044 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
12045 crate::bundle::LinkRef::parse(&l.target)
12046 && v == &vfr_id
12047 {
12048 fail(&format!(
12049 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
12050 f.id, l.target
12051 ));
12052 }
12053 }
12054 }
12055 let before = p.project.dependencies.len();
12056 p.project
12057 .dependencies
12058 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
12059 let removed = before - p.project.dependencies.len();
12060 if removed == 0 {
12061 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
12062 }
12063 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12064 let payload = json!({
12065 "ok": true,
12066 "command": "frontier.remove-dep",
12067 "frontier": frontier.display().to_string(),
12068 "vfr_id": vfr_id,
12069 "removed": removed,
12070 });
12071 if json {
12072 println!(
12073 "{}",
12074 serde_json::to_string_pretty(&payload)
12075 .expect("failed to serialize frontier.remove-dep")
12076 );
12077 } else {
12078 println!(
12079 "{} removed cross-frontier dep {vfr_id}",
12080 style::ok("frontier")
12081 );
12082 }
12083 }
12084 FrontierAction::RefreshDeps {
12085 frontier,
12086 from,
12087 dry_run,
12088 json,
12089 } => {
12090 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12091 let cross_deps: Vec<String> = p
12092 .project
12093 .dependencies
12094 .iter()
12095 .filter_map(|d| d.vfr_id.clone())
12096 .collect();
12097 if cross_deps.is_empty() {
12098 if json {
12099 println!(
12100 "{}",
12101 serde_json::to_string_pretty(&json!({
12102 "ok": true,
12103 "command": "frontier.refresh-deps",
12104 "frontier": frontier.display().to_string(),
12105 "from": from,
12106 "dry_run": dry_run,
12107 "deps": [],
12108 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
12109 })).expect("serialize")
12110 );
12111 } else {
12112 println!(
12113 "{} no cross-frontier deps declared in {}",
12114 style::ok("frontier"),
12115 frontier.display()
12116 );
12117 }
12118 return;
12119 }
12120 let client = reqwest::blocking::Client::builder()
12121 .timeout(std::time::Duration::from_secs(20))
12122 .build()
12123 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
12124 let base = from.trim_end_matches('/');
12125 #[derive(serde::Deserialize)]
12126 struct HubEntry {
12127 latest_snapshot_hash: String,
12128 }
12129 let mut per_dep: Vec<serde_json::Value> = Vec::new();
12130 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
12131 (0u32, 0u32, 0u32, 0u32);
12132 for vfr in &cross_deps {
12133 let url = format!("{base}/entries/{vfr}");
12134 let resp = client.get(&url).send();
12135 let outcome = match resp {
12136 Ok(r) if r.status().as_u16() == 404 => {
12137 missing += 1;
12138 json!({ "vfr_id": vfr, "status": "missing", "url": url })
12139 }
12140 Ok(r) if !r.status().is_success() => {
12141 unreachable += 1;
12142 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
12143 }
12144 Err(e) => {
12145 unreachable += 1;
12146 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
12147 }
12148 Ok(r) => match r.json::<HubEntry>() {
12149 Err(e) => {
12150 unreachable += 1;
12151 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
12152 }
12153 Ok(entry) => {
12154 match p
12156 .project
12157 .dependencies
12158 .iter()
12159 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
12160 {
12161 None => {
12162 unreachable += 1;
12163 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
12164 }
12165 Some(idx) => {
12166 let local_pin =
12167 p.project.dependencies[idx].pinned_snapshot_hash.clone();
12168 let new_pin = entry.latest_snapshot_hash;
12169 if local_pin.as_deref() == Some(new_pin.as_str()) {
12170 unchanged += 1;
12171 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
12172 } else {
12173 if !dry_run {
12174 p.project.dependencies[idx].pinned_snapshot_hash =
12175 Some(new_pin.clone());
12176 }
12177 refreshed += 1;
12178 json!({
12179 "vfr_id": vfr,
12180 "status": "refreshed",
12181 "old_snapshot": local_pin,
12182 "new_snapshot": new_pin,
12183 })
12184 }
12185 }
12186 }
12187 }
12188 },
12189 };
12190 per_dep.push(outcome);
12191 }
12192 if !dry_run && refreshed > 0 {
12193 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12194 }
12195 let payload = json!({
12196 "ok": true,
12197 "command": "frontier.refresh-deps",
12198 "frontier": frontier.display().to_string(),
12199 "from": from,
12200 "dry_run": dry_run,
12201 "deps": per_dep,
12202 "summary": {
12203 "total": cross_deps.len(),
12204 "refreshed": refreshed,
12205 "unchanged": unchanged,
12206 "missing": missing,
12207 "unreachable": unreachable,
12208 },
12209 });
12210 if json {
12211 println!(
12212 "{}",
12213 serde_json::to_string_pretty(&payload)
12214 .expect("failed to serialize frontier.refresh-deps")
12215 );
12216 } else {
12217 let mode = if dry_run { " (dry-run)" } else { "" };
12218 println!(
12219 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
12220 style::ok("frontier"),
12221 cross_deps.len()
12222 );
12223 for d in &per_dep {
12224 let vfr = d["vfr_id"].as_str().unwrap_or("?");
12225 let status = d["status"].as_str().unwrap_or("?");
12226 match status {
12227 "refreshed" => println!(
12228 " {vfr} refreshed {} → {}",
12229 d["old_snapshot"]
12230 .as_str()
12231 .unwrap_or("(none)")
12232 .chars()
12233 .take(16)
12234 .collect::<String>(),
12235 d["new_snapshot"]
12236 .as_str()
12237 .unwrap_or("?")
12238 .chars()
12239 .take(16)
12240 .collect::<String>(),
12241 ),
12242 "unchanged" => println!(" {vfr} unchanged"),
12243 "missing" => println!(" {vfr} missing on hub"),
12244 _ => println!(" {vfr} unreachable"),
12245 }
12246 }
12247 }
12248 }
12249 FrontierAction::Diff {
12250 frontier,
12251 since,
12252 week,
12253 json,
12254 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
12255 }
12256}
12257
12258fn cmd_repo(action: RepoAction) {
12259 match action {
12260 RepoAction::Status { frontier, json } => {
12261 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
12262 if json {
12263 println!(
12264 "{}",
12265 serde_json::to_string_pretty(&payload)
12266 .expect("failed to serialize repo status")
12267 );
12268 } else {
12269 let summary = payload.get("summary").unwrap_or(&Value::Null);
12270 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
12271 println!("vela repo status");
12272 println!(" frontier: {}", frontier.display());
12273 println!(
12274 " events: {}",
12275 summary
12276 .get("accepted_events")
12277 .and_then(Value::as_u64)
12278 .unwrap_or_default()
12279 );
12280 println!(
12281 " open proposals: {}",
12282 summary
12283 .get("open_proposals")
12284 .and_then(Value::as_u64)
12285 .unwrap_or_default()
12286 );
12287 println!(
12288 " state: {}",
12289 freshness
12290 .get("materialized_state")
12291 .and_then(Value::as_str)
12292 .unwrap_or("unknown")
12293 );
12294 println!(
12295 " proof: {}",
12296 freshness
12297 .get("proof")
12298 .and_then(Value::as_str)
12299 .unwrap_or("unknown")
12300 );
12301 }
12302 }
12303 RepoAction::Doctor { frontier, json } => {
12304 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
12305 if json {
12306 println!(
12307 "{}",
12308 serde_json::to_string_pretty(&payload)
12309 .expect("failed to serialize repo doctor")
12310 );
12311 } else {
12312 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12313 let issues = payload
12314 .get("issues")
12315 .and_then(Value::as_array)
12316 .map_or(0, Vec::len);
12317 println!("vela repo doctor");
12318 println!(" frontier: {}", frontier.display());
12319 println!(" status: {}", if ok { "ok" } else { "needs attention" });
12320 println!(" issues: {issues}");
12321 }
12322 }
12323 }
12324}
12325
12326fn cmd_proof_verify(frontier: &Path, json_output: bool) {
12327 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
12328 if json_output {
12329 println!(
12330 "{}",
12331 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
12332 );
12333 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12334 std::process::exit(1);
12335 }
12336 } else {
12337 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12338 println!("vela proof verify");
12339 println!(" frontier: {}", frontier.display());
12340 println!(" status: {}", if ok { "ok" } else { "failed" });
12341 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
12342 for issue in issues {
12343 if let Some(message) = issue.get("message").and_then(Value::as_str) {
12344 println!(" issue: {message}");
12345 }
12346 }
12347 }
12348 if !ok {
12349 std::process::exit(1);
12350 }
12351 }
12352}
12353
12354fn cmd_proof_explain(frontier: &Path) {
12355 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
12356 print!("{text}");
12357}
12358
12359fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
12368 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
12369
12370 let now = chrono::Utc::now();
12372 let (window_start, window_end, week_label): (
12373 chrono::DateTime<chrono::Utc>,
12374 chrono::DateTime<chrono::Utc>,
12375 Option<String>,
12376 ) = if let Some(s) = since {
12377 let parsed = chrono::DateTime::parse_from_rfc3339(s)
12378 .map(|d| d.with_timezone(&chrono::Utc))
12379 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
12380 (parsed, now, None)
12381 } else {
12382 let key = week
12383 .map(str::to_owned)
12384 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
12385 let (start, end) = iso_week_bounds(&key)
12386 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
12387 (start, end, Some(key))
12388 };
12389
12390 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
12392 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
12393 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
12394 let mut cumulative: usize = 0;
12395
12396 for f in &project.findings {
12397 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
12398 .map(|d| d.with_timezone(&chrono::Utc))
12399 .ok();
12400 let updated_ts = f
12401 .updated
12402 .as_deref()
12403 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
12404 .map(|d| d.with_timezone(&chrono::Utc));
12405
12406 if let Some(c) = created
12407 && c < window_end
12408 {
12409 cumulative += 1;
12410 }
12411
12412 if let Some(c) = created
12413 && c >= window_start
12414 && c < window_end
12415 {
12416 added.push(f);
12417 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
12418 if is_tension {
12419 new_contradictions.push(f);
12420 }
12421 continue;
12422 }
12423 if let Some(u) = updated_ts
12424 && u >= window_start
12425 && u < window_end
12426 {
12427 updated.push(f);
12428 }
12429 }
12430
12431 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
12433 list.iter()
12434 .map(|f| {
12435 json!({
12436 "id": f.id,
12437 "assertion": f.assertion.text,
12438 "evidence_type": f.evidence.evidence_type,
12439 "confidence": f.confidence.score,
12440 "doi": f.provenance.doi,
12441 "pmid": f.provenance.pmid,
12442 })
12443 })
12444 .collect()
12445 };
12446
12447 let payload = json!({
12448 "ok": true,
12449 "command": "frontier.diff",
12450 "frontier": frontier.display().to_string(),
12451 "frontier_id": project.frontier_id,
12452 "window": {
12453 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12454 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12455 "iso_week": week_label,
12456 },
12457 "totals": {
12458 "added": added.len(),
12459 "updated": updated.len(),
12460 "new_contradictions": new_contradictions.len(),
12461 "cumulative_claims": cumulative,
12462 },
12463 "added": summary_for(&added),
12464 "updated": summary_for(&updated),
12465 "new_contradictions": summary_for(&new_contradictions),
12466 });
12467
12468 if json {
12469 println!(
12470 "{}",
12471 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
12472 );
12473 return;
12474 }
12475
12476 let label = week_label
12477 .clone()
12478 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
12479 println!();
12480 println!(
12481 " {}",
12482 format!("VELA · FRONTIER · DIFF · {label}")
12483 .to_uppercase()
12484 .dimmed()
12485 );
12486 println!(" {}", style::tick_row(60));
12487 println!(
12488 " range: {} → {}",
12489 window_start.format("%Y-%m-%d %H:%M"),
12490 window_end.format("%Y-%m-%d %H:%M")
12491 );
12492 println!(" added: {}", added.len());
12493 println!(" updated: {}", updated.len());
12494 println!(" contradictions: {}", new_contradictions.len());
12495 println!(" cumulative: {cumulative}");
12496 if added.is_empty() && updated.is_empty() {
12497 println!();
12498 println!(" (quiet window — no findings added or updated)");
12499 } else {
12500 println!();
12501 println!(" added:");
12502 for f in &added {
12503 println!(
12504 " · {} {}",
12505 f.id.dimmed(),
12506 truncate(&f.assertion.text, 88)
12507 );
12508 }
12509 if !updated.is_empty() {
12510 println!();
12511 println!(" updated:");
12512 for f in &updated {
12513 println!(
12514 " · {} {}",
12515 f.id.dimmed(),
12516 truncate(&f.assertion.text, 88)
12517 );
12518 }
12519 }
12520 }
12521}
12522
12523fn truncate(s: &str, n: usize) -> String {
12524 if s.chars().count() <= n {
12525 s.to_string()
12526 } else {
12527 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
12528 out.push('…');
12529 out
12530 }
12531}
12532
12533fn iso_week_key_for(d: chrono::NaiveDate) -> String {
12535 use chrono::Datelike;
12536 let iso = d.iso_week();
12537 format!("{:04}-W{:02}", iso.year(), iso.week())
12538}
12539
12540fn iso_week_bounds(
12543 key: &str,
12544) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
12545 let (year_str, week_str) = key
12546 .split_once("-W")
12547 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
12548 let year: i32 = year_str
12549 .parse()
12550 .map_err(|e| format!("bad year in '{key}': {e}"))?;
12551 let week: u32 = week_str
12552 .parse()
12553 .map_err(|e| format!("bad week in '{key}': {e}"))?;
12554 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
12555 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
12556 let next_monday = monday + chrono::Duration::days(7);
12557 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
12558 let end = next_monday
12559 .and_hms_opt(0, 0, 0)
12560 .expect("00:00 valid")
12561 .and_utc();
12562 Ok((start, end))
12563}
12564
12565fn cmd_registry(action: RegistryAction) {
12570 use crate::registry;
12571 let default_registry = || -> PathBuf {
12572 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12573 PathBuf::from(home)
12574 .join(".vela")
12575 .join("registry")
12576 .join("entries.json")
12577 };
12578 match action {
12579 RegistryAction::DependsOn { vfr_id, from, json } => {
12580 let base = from.trim_end_matches('/');
12581 let url = format!("{base}/entries/{vfr_id}/depends-on");
12582 let client = reqwest::blocking::Client::builder()
12583 .timeout(std::time::Duration::from_secs(30))
12584 .build()
12585 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12586 let resp = client
12587 .get(&url)
12588 .send()
12589 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
12590 if !resp.status().is_success() {
12591 fail(&format!("GET {url}: HTTP {}", resp.status()));
12592 }
12593 let body: serde_json::Value = resp
12594 .json()
12595 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
12596 if json {
12597 println!(
12598 "{}",
12599 serde_json::to_string_pretty(&body).expect("serialize")
12600 );
12601 } else {
12602 let dependents = body
12603 .get("dependents")
12604 .and_then(|v| v.as_array())
12605 .cloned()
12606 .unwrap_or_default();
12607 let count = dependents.len();
12608 println!(
12609 "{} {count} {} on {vfr_id}",
12610 style::ok("registry"),
12611 if count == 1 {
12612 "frontier depends"
12613 } else {
12614 "frontiers depend"
12615 },
12616 );
12617 for e in &dependents {
12618 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
12619 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
12620 let o = e
12621 .get("owner_actor_id")
12622 .and_then(|v| v.as_str())
12623 .unwrap_or("?");
12624 println!(" {v} {n} ({o})");
12625 }
12626 }
12627 }
12628 RegistryAction::Mirror {
12629 vfr_id,
12630 from,
12631 to,
12632 json,
12633 } => {
12634 let src_base = from.trim_end_matches('/');
12635 let dst_base = to.trim_end_matches('/');
12636 let src_url = format!("{src_base}/entries/{vfr_id}");
12637 let dst_url = format!("{dst_base}/entries");
12638 let client = reqwest::blocking::Client::builder()
12639 .timeout(std::time::Duration::from_secs(30))
12640 .build()
12641 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12642
12643 let entry: serde_json::Value = client
12644 .get(&src_url)
12645 .send()
12646 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12647 .error_for_status()
12648 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12649 .json()
12650 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
12651
12652 let resp = client
12653 .post(&dst_url)
12654 .header("content-type", "application/json")
12655 .body(
12656 serde_json::to_vec(&entry)
12657 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
12658 )
12659 .send()
12660 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
12661 let status = resp.status();
12662 if !status.is_success() {
12663 let body = resp.text().unwrap_or_default();
12664 fail(&format!(
12665 "POST {dst_url}: HTTP {status}: {}",
12666 body.chars().take(300).collect::<String>()
12667 ));
12668 }
12669 let body: serde_json::Value = resp
12670 .json()
12671 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
12672 let duplicate = body
12673 .get("duplicate")
12674 .and_then(serde_json::Value::as_bool)
12675 .unwrap_or(false);
12676 let payload = json!({
12677 "ok": true,
12678 "command": "registry.mirror",
12679 "vfr_id": vfr_id,
12680 "from": src_base,
12681 "to": dst_base,
12682 "duplicate_on_destination": duplicate,
12683 "destination_response": body,
12684 });
12685 if json {
12686 println!(
12687 "{}",
12688 serde_json::to_string_pretty(&payload).expect("serialize")
12689 );
12690 } else {
12691 println!(
12692 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
12693 style::ok("registry"),
12694 if duplicate {
12695 " (duplicate; signature already known)"
12696 } else {
12697 " (fresh insert)"
12698 }
12699 );
12700 }
12701 }
12702 RegistryAction::List { from, json } => {
12703 let (label, registry_data) = match &from {
12706 Some(loc) if loc.starts_with("http") => (
12707 loc.clone(),
12708 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12709 ),
12710 Some(loc) => {
12711 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12712 (
12713 p.display().to_string(),
12714 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12715 )
12716 }
12717 None => {
12718 let p = default_registry();
12719 (
12720 p.display().to_string(),
12721 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12722 )
12723 }
12724 };
12725 let r = registry_data;
12726 let path_label = label;
12727 if json {
12728 let payload = json!({
12729 "ok": true,
12730 "command": "registry.list",
12731 "registry": path_label,
12732 "entry_count": r.entries.len(),
12733 "entries": r.entries,
12734 });
12735 println!(
12736 "{}",
12737 serde_json::to_string_pretty(&payload)
12738 .expect("failed to serialize registry.list")
12739 );
12740 } else {
12741 println!();
12742 println!(
12743 " {}",
12744 format!("VELA · REGISTRY · LIST · {}", path_label)
12745 .to_uppercase()
12746 .dimmed()
12747 );
12748 println!(" {}", style::tick_row(60));
12749 if r.entries.is_empty() {
12750 println!(" (registry is empty)");
12751 } else {
12752 for entry in &r.entries {
12753 println!(
12754 " {} {} ({}) by {} published {}",
12755 entry.vfr_id,
12756 entry.name,
12757 entry.network_locator,
12758 entry.owner_actor_id,
12759 entry.signed_publish_at
12760 );
12761 }
12762 }
12763 }
12764 }
12765 RegistryAction::Publish {
12766 frontier,
12767 owner,
12768 key,
12769 locator,
12770 to,
12771 json,
12772 } => {
12773 let key_hex = std::fs::read_to_string(&key)
12776 .map(|s| s.trim().to_string())
12777 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
12778 let signing_key = parse_signing_key(&key_hex);
12779 let derived = hex::encode(signing_key.verifying_key().to_bytes());
12780
12781 let mut frontier_data =
12783 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12784
12785 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
12786 Some(actor) => actor.public_key.clone(),
12787 None => {
12788 eprintln!(
12796 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
12797 &derived[..16]
12798 );
12799 frontier_data.actors.push(sign::ActorRecord {
12800 id: owner.clone(),
12801 public_key: derived.clone(),
12802 algorithm: "ed25519".to_string(),
12803 created_at: chrono::Utc::now().to_rfc3339(),
12804 tier: None,
12805 orcid: None,
12806 access_clearance: None,
12807 });
12808 repo::save_to_path(&frontier, &frontier_data)
12809 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
12810 derived.clone()
12811 }
12812 };
12813
12814 let snapshot_hash = events::snapshot_hash(&frontier_data);
12818 let event_log_hash = events::event_log_hash(&frontier_data.events);
12819 let vfr_id = frontier_data.frontier_id();
12820 let name = frontier_data.project.name.clone();
12821
12822 if derived != pubkey {
12824 fail(&format!(
12825 "private key does not match registered pubkey for owner '{owner}'"
12826 ));
12827 }
12828
12829 let to_is_remote = matches!(
12837 to.as_deref(),
12838 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
12839 );
12840 let resolved_locator = match locator {
12841 Some(l) => l,
12842 None => {
12843 if to_is_remote {
12844 let hub = to.as_deref().unwrap().trim_end_matches('/');
12845 let hub_root = hub.trim_end_matches("/entries");
12846 format!("{hub_root}/entries/{vfr_id}/snapshot")
12847 } else {
12848 fail_return(
12849 "--locator is required for local publishes; pass e.g. \
12850 --locator file:///path/to/frontier.json or an HTTPS URL.",
12851 )
12852 }
12853 }
12854 };
12855
12856 let mut entry = registry::RegistryEntry {
12857 schema: registry::ENTRY_SCHEMA.to_string(),
12858 vfr_id: vfr_id.clone(),
12859 name: name.clone(),
12860 owner_actor_id: owner.clone(),
12861 owner_pubkey: pubkey,
12862 latest_snapshot_hash: snapshot_hash,
12863 latest_event_log_hash: event_log_hash,
12864 network_locator: resolved_locator,
12865 signed_publish_at: chrono::Utc::now().to_rfc3339(),
12866 signature: String::new(),
12867 };
12868 entry.signature =
12869 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
12870
12871 let (registry_label, duplicate) = if to_is_remote {
12872 let hub_url = to.clone().unwrap();
12873 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
12877 .unwrap_or_else(|e| fail_return(&e));
12878 (hub_url, resp.duplicate)
12879 } else {
12880 let registry_path = match &to {
12881 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
12882 None => default_registry(),
12883 };
12884 registry::publish_entry(®istry_path, entry.clone())
12885 .unwrap_or_else(|e| fail_return(&e));
12886 (registry_path.display().to_string(), false)
12887 };
12888
12889 let payload = json!({
12890 "ok": true,
12891 "command": "registry.publish",
12892 "registry": registry_label,
12893 "vfr_id": vfr_id,
12894 "name": name,
12895 "owner": owner,
12896 "snapshot_hash": entry.latest_snapshot_hash,
12897 "event_log_hash": entry.latest_event_log_hash,
12898 "signed_publish_at": entry.signed_publish_at,
12899 "signature": entry.signature,
12900 "duplicate": duplicate,
12901 });
12902 if json {
12903 println!(
12904 "{}",
12905 serde_json::to_string_pretty(&payload)
12906 .expect("failed to serialize registry.publish")
12907 );
12908 } else {
12909 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
12910 println!(
12911 "{} published {vfr_id} → {}{}",
12912 style::ok("registry"),
12913 registry_label,
12914 dup_suffix
12915 );
12916 println!(" snapshot: {}", entry.latest_snapshot_hash);
12917 println!(" event_log: {}", entry.latest_event_log_hash);
12918 println!(" signature: {}…", &entry.signature[..16]);
12919 }
12920 }
12921 RegistryAction::Pull {
12922 vfr_id,
12923 from,
12924 out,
12925 transitive,
12926 depth,
12927 json,
12928 } => {
12929 let (registry_label, registry_data) = match &from {
12933 Some(loc) if loc.starts_with("http") => (
12934 loc.clone(),
12935 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12936 ),
12937 Some(loc) => {
12938 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12939 (
12940 p.display().to_string(),
12941 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12942 )
12943 }
12944 None => {
12945 let p = default_registry();
12946 (
12947 p.display().to_string(),
12948 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12949 )
12950 }
12951 };
12952 let entry = registry::find_latest(®istry_data, &vfr_id)
12953 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
12954
12955 if transitive {
12956 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
12960 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
12961
12962 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
12963 result
12964 .deps
12965 .iter()
12966 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
12967 .collect(),
12968 );
12969 let payload = json!({
12970 "ok": true,
12971 "command": "registry.pull",
12972 "registry": registry_label,
12973 "vfr_id": vfr_id,
12974 "transitive": true,
12975 "depth": depth,
12976 "out_dir": out.display().to_string(),
12977 "primary": result.primary_path.display().to_string(),
12978 "verified": result.verified,
12979 "deps": dep_paths_json,
12980 });
12981 if json {
12982 println!(
12983 "{}",
12984 serde_json::to_string_pretty(&payload)
12985 .expect("failed to serialize registry.pull")
12986 );
12987 } else {
12988 println!(
12989 "{} pulled {vfr_id} (transitive) → {}",
12990 style::ok("registry"),
12991 out.display()
12992 );
12993 println!(" verified {} frontier(s):", result.verified.len());
12994 for v in &result.verified {
12995 println!(" · {v}");
12996 }
12997 println!(" every cross-frontier dependency's pinned snapshot hash matched");
12998 }
12999 return;
13000 }
13001
13002 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
13005 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
13006 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
13007 let _ = std::fs::remove_file(&out);
13008 fail_return(&format!("pull verification failed: {e}"))
13009 });
13010
13011 let payload = json!({
13012 "ok": true,
13013 "command": "registry.pull",
13014 "registry": registry_label,
13015 "vfr_id": vfr_id,
13016 "out": out.display().to_string(),
13017 "snapshot_hash": entry.latest_snapshot_hash,
13018 "event_log_hash": entry.latest_event_log_hash,
13019 "verified": true,
13020 });
13021 if json {
13022 println!(
13023 "{}",
13024 serde_json::to_string_pretty(&payload)
13025 .expect("failed to serialize registry.pull")
13026 );
13027 } else {
13028 println!(
13029 "{} pulled {vfr_id} → {}",
13030 style::ok("registry"),
13031 out.display()
13032 );
13033 println!(" verified snapshot+event_log hashes match registry; signature ok");
13034 }
13035 }
13036 }
13037}
13038
13039fn print_stats_json(path: &Path) {
13040 let frontier = load_frontier_or_fail(path);
13041 let source_hash = hash_path_or_fail(path);
13042 let payload = json!({
13043 "ok": true,
13044 "command": "stats",
13045 "schema_version": project::VELA_SCHEMA_VERSION,
13046 "frontier": {
13047 "name": &frontier.project.name,
13048 "description": &frontier.project.description,
13049 "source": path.display().to_string(),
13050 "hash": format!("sha256:{source_hash}"),
13051 "compiled_at": &frontier.project.compiled_at,
13052 "compiler": &frontier.project.compiler,
13053 "papers_processed": frontier.project.papers_processed,
13054 "errors": frontier.project.errors,
13055 },
13056 "stats": frontier.stats,
13057 "proposals": proposals::summary(&frontier),
13058 "proof_state": frontier.proof_state,
13059 });
13060 println!(
13061 "{}",
13062 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
13063 );
13064}
13065
13066fn cmd_search(
13067 source: Option<&Path>,
13068 query: &str,
13069 entity: Option<&str>,
13070 assertion_type: Option<&str>,
13071 all: Option<&Path>,
13072 limit: usize,
13073 json_output: bool,
13074) {
13075 if let Some(dir) = all {
13076 search::run_all(dir, query, entity, assertion_type, limit);
13077 return;
13078 }
13079 let Some(src) = source else {
13080 fail("Provide --source <frontier> or --all <directory>.");
13081 };
13082 if json_output {
13083 let results = search::search(src, query, entity, assertion_type, limit);
13084 let loaded = load_frontier_or_fail(src);
13085 let source_hash = hash_path_or_fail(src);
13086 let payload = json!({
13087 "ok": true,
13088 "command": "search",
13089 "schema_version": project::VELA_SCHEMA_VERSION,
13090 "query": query,
13091 "frontier": {
13092 "name": &loaded.project.name,
13093 "source": src.display().to_string(),
13094 "hash": format!("sha256:{source_hash}"),
13095 },
13096 "filters": {
13097 "entity": entity,
13098 "assertion_type": assertion_type,
13099 "limit": limit,
13100 },
13101 "count": results.len(),
13102 "results": results.iter().map(|result| json!({
13103 "id": &result.id,
13104 "score": result.score,
13105 "assertion": &result.assertion,
13106 "assertion_type": &result.assertion_type,
13107 "confidence": result.confidence,
13108 "entities": &result.entities,
13109 "doi": &result.doi,
13110 })).collect::<Vec<_>>()
13111 });
13112 println!(
13113 "{}",
13114 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
13115 );
13116 } else {
13117 search::run(src, query, entity, assertion_type, limit);
13118 }
13119}
13120
13121fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
13122 let frontier = load_frontier_or_fail(source);
13123 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
13124 if json_output {
13125 let source_hash = hash_path_or_fail(source);
13126 let payload = json!({
13127 "ok": true,
13128 "command": "tensions",
13129 "schema_version": project::VELA_SCHEMA_VERSION,
13130 "frontier": {
13131 "name": &frontier.project.name,
13132 "source": source.display().to_string(),
13133 "hash": format!("sha256:{source_hash}"),
13134 },
13135 "filters": {
13136 "both_high": both_high,
13137 "cross_domain": cross_domain,
13138 "top": top,
13139 },
13140 "count": result.len(),
13141 "tensions": result.iter().map(|t| json!({
13142 "score": t.score,
13143 "resolved": t.resolved,
13144 "superseding_id": &t.superseding_id,
13145 "finding_a": {
13146 "id": &t.finding_a.id,
13147 "assertion": &t.finding_a.assertion,
13148 "confidence": t.finding_a.confidence,
13149 "assertion_type": &t.finding_a.assertion_type,
13150 "citation_count": t.finding_a.citation_count,
13151 "contradicts_count": t.finding_a.contradicts_count,
13152 },
13153 "finding_b": {
13154 "id": &t.finding_b.id,
13155 "assertion": &t.finding_b.assertion,
13156 "confidence": t.finding_b.confidence,
13157 "assertion_type": &t.finding_b.assertion_type,
13158 "citation_count": t.finding_b.citation_count,
13159 "contradicts_count": t.finding_b.contradicts_count,
13160 }
13161 })).collect::<Vec<_>>()
13162 });
13163 println!(
13164 "{}",
13165 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
13166 );
13167 } else {
13168 tensions::print_tensions(&result);
13169 }
13170}
13171
13172fn cmd_gaps(action: GapsAction) {
13173 match action {
13174 GapsAction::Rank {
13175 frontier,
13176 top,
13177 domain,
13178 json,
13179 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
13180 }
13181}
13182
13183fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
13184 let frontier = load_frontier_or_fail(frontier_path);
13185 let mut ranked = frontier
13186 .findings
13187 .iter()
13188 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
13189 .filter(|finding| {
13190 domain.is_none_or(|domain| {
13191 finding
13192 .assertion
13193 .text
13194 .to_lowercase()
13195 .contains(&domain.to_lowercase())
13196 || finding
13197 .assertion
13198 .entities
13199 .iter()
13200 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
13201 })
13202 })
13203 .map(|finding| {
13204 let dependency_count = frontier
13205 .findings
13206 .iter()
13207 .flat_map(|candidate| candidate.links.iter())
13208 .filter(|link| link.target == finding.id)
13209 .count();
13210 let score = dependency_count as f64 + finding.confidence.score;
13211 json!({
13212 "id": &finding.id,
13213 "kind": "candidate_gap_review_lead",
13214 "assertion": &finding.assertion.text,
13215 "score": score,
13216 "dependency_count": dependency_count,
13217 "confidence": finding.confidence.score,
13218 "evidence_type": &finding.evidence.evidence_type,
13219 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
13220 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
13221 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
13222 })
13223 })
13224 .collect::<Vec<_>>();
13225 ranked.sort_by(|a, b| {
13226 b.get("score")
13227 .and_then(Value::as_f64)
13228 .partial_cmp(&a.get("score").and_then(Value::as_f64))
13229 .unwrap_or(std::cmp::Ordering::Equal)
13230 });
13231 ranked.truncate(top);
13232 if json_output {
13233 let source_hash = hash_path_or_fail(frontier_path);
13234 let payload = json!({
13235 "ok": true,
13236 "command": "gaps rank",
13237 "schema_version": project::VELA_SCHEMA_VERSION,
13238 "frontier": {
13239 "name": &frontier.project.name,
13240 "source": frontier_path.display().to_string(),
13241 "hash": format!("sha256:{source_hash}"),
13242 },
13243 "filters": {
13244 "top": top,
13245 "domain": domain,
13246 },
13247 "count": ranked.len(),
13248 "ranking_label": "candidate gap review leads",
13249 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
13250 "review_leads": ranked.clone(),
13251 "gaps": ranked,
13252 });
13253 println!(
13254 "{}",
13255 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
13256 );
13257 } else {
13258 println!();
13259 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
13260 println!(" {}", style::tick_row(60));
13261 println!(" review source scope; these are not guaranteed experiment targets.");
13262 println!();
13263 for (idx, gap) in ranked.iter().enumerate() {
13264 println!(
13265 " {}. [{}] score={} {}",
13266 idx + 1,
13267 gap["id"].as_str().unwrap_or("?"),
13268 gap["score"].as_f64().unwrap_or(0.0),
13269 gap["assertion"].as_str().unwrap_or("")
13270 );
13271 }
13272 }
13273}
13274
13275async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
13276 if inputs.len() < 2 {
13277 fail("need at least 2 frontier files for bridge detection.");
13278 }
13279 println!();
13280 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
13281 println!(" {}", style::tick_row(60));
13282 println!(" loading {} frontiers...", inputs.len());
13283 let mut named_projects = Vec::<(String, project::Project)>::new();
13284 let mut total_findings = 0;
13285 for path in inputs {
13286 let frontier = load_frontier_or_fail(path);
13287 let name = path
13288 .file_stem()
13289 .unwrap_or_default()
13290 .to_string_lossy()
13291 .to_string();
13292 println!(" {} · {} findings", name, frontier.stats.findings);
13293 total_findings += frontier.stats.findings;
13294 named_projects.push((name, frontier));
13295 }
13296 let refs = named_projects
13297 .iter()
13298 .map(|(name, frontier)| (name.as_str(), frontier))
13299 .collect::<Vec<_>>();
13300 let mut bridges = bridge::detect_bridges(&refs);
13301 if check_novelty && !bridges.is_empty() {
13302 let client = Client::new();
13303 let check_count = bridges.len().min(top_n);
13304 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
13305 for bridge_item in bridges.iter_mut().take(check_count) {
13306 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
13307 match bridge::check_novelty(&client, &query).await {
13308 Ok(count) => bridge_item.pubmed_count = Some(count),
13309 Err(e) => eprintln!(
13310 " {} prior-art check failed for {}: {e}",
13311 style::err_prefix(),
13312 bridge_item.entity_name
13313 ),
13314 }
13315 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
13316 }
13317 }
13318 print!("{}", bridge::format_report(&bridges, total_findings));
13319}
13320
13321struct BenchArgs {
13322 frontier: Option<PathBuf>,
13323 gold: Option<PathBuf>,
13324 entity_gold: Option<PathBuf>,
13325 link_gold: Option<PathBuf>,
13326 suite: Option<PathBuf>,
13327 suite_ready: bool,
13328 min_f1: Option<f64>,
13329 min_precision: Option<f64>,
13330 min_recall: Option<f64>,
13331 no_thresholds: bool,
13332 json: bool,
13333}
13334
13335fn cmd_agent_bench(
13340 gold: &Path,
13341 candidate: &Path,
13342 sources: Option<&Path>,
13343 threshold: Option<f64>,
13344 report_path: Option<&Path>,
13345 json_out: bool,
13346) {
13347 let input = crate::agent_bench::BenchInput {
13348 gold_path: gold.to_path_buf(),
13349 candidate_path: candidate.to_path_buf(),
13350 sources: sources.map(Path::to_path_buf),
13351 threshold: threshold.unwrap_or(0.0),
13352 };
13353 let report = match crate::agent_bench::run(input) {
13354 Ok(r) => r,
13355 Err(e) => {
13356 eprintln!("{} bench failed: {e}", style::err_prefix());
13357 std::process::exit(1);
13358 }
13359 };
13360
13361 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
13362 if let Some(path) = report_path
13363 && let Err(e) = std::fs::write(path, &json)
13364 {
13365 eprintln!(
13366 "{} failed to write report to {}: {e}",
13367 style::err_prefix(),
13368 path.display()
13369 );
13370 }
13371
13372 if json_out {
13373 println!("{json}");
13374 } else {
13375 println!();
13376 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
13377 println!(" {}", style::tick_row(60));
13378 print!("{}", crate::agent_bench::render_pretty(&report));
13379 println!();
13380 }
13381
13382 if !report.pass {
13383 std::process::exit(1);
13384 }
13385}
13386
13387fn cmd_bench(args: BenchArgs) {
13388 if args.suite_ready {
13389 let suite_path = args
13390 .suite
13391 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
13392 let payload =
13393 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
13394 println!(
13395 "{}",
13396 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
13397 );
13398 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13399 std::process::exit(1);
13400 }
13401 return;
13402 }
13403 if let Some(suite_path) = args.suite {
13404 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
13405 if args.json {
13406 println!(
13407 "{}",
13408 serde_json::to_string_pretty(&payload)
13409 .expect("failed to serialize benchmark suite")
13410 );
13411 } else {
13412 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13413 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
13414 println!();
13415 println!(" {}", "VELA · BENCH · SUITE".dimmed());
13416 println!(" {}", style::tick_row(60));
13417 println!(" suite: {}", suite_path.display());
13418 println!(
13419 " status: {}",
13420 if ok {
13421 style::ok("pass")
13422 } else {
13423 style::lost("fail")
13424 }
13425 );
13426 println!(
13427 " tasks: {}/{} passed",
13428 metrics
13429 .get("tasks_passed")
13430 .and_then(Value::as_u64)
13431 .unwrap_or(0),
13432 metrics
13433 .get("tasks_total")
13434 .and_then(Value::as_u64)
13435 .unwrap_or(0)
13436 );
13437 }
13438 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13439 std::process::exit(1);
13440 }
13441 return;
13442 }
13443
13444 let frontier = args
13445 .frontier
13446 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
13447 let thresholds = benchmark::BenchmarkThresholds {
13448 min_f1: if args.no_thresholds {
13449 None
13450 } else {
13451 args.min_f1.or(Some(0.05))
13452 },
13453 min_precision: if args.no_thresholds {
13454 None
13455 } else {
13456 args.min_precision
13457 },
13458 min_recall: if args.no_thresholds {
13459 None
13460 } else {
13461 args.min_recall
13462 },
13463 ..Default::default()
13464 };
13465 if let Some(path) = args.link_gold {
13466 print_benchmark_or_exit(benchmark::task_envelope(
13467 &frontier,
13468 None,
13469 benchmark::BenchmarkMode::Link,
13470 Some(&path),
13471 &thresholds,
13472 None,
13473 ));
13474 } else if let Some(path) = args.entity_gold {
13475 print_benchmark_or_exit(benchmark::task_envelope(
13476 &frontier,
13477 None,
13478 benchmark::BenchmarkMode::Entity,
13479 Some(&path),
13480 &thresholds,
13481 None,
13482 ));
13483 } else if let Some(path) = args.gold {
13484 if args.json {
13485 print_benchmark_or_exit(benchmark::task_envelope(
13486 &frontier,
13487 None,
13488 benchmark::BenchmarkMode::Finding,
13489 Some(&path),
13490 &thresholds,
13491 None,
13492 ));
13493 } else {
13494 benchmark::run(&frontier, &path, false);
13495 }
13496 } else {
13497 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
13498 }
13499}
13500
13501fn print_benchmark_or_exit(result: Result<Value, String>) {
13502 let payload = result.unwrap_or_else(|e| fail_return(&e));
13503 println!(
13504 "{}",
13505 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
13506 );
13507 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13508 std::process::exit(1);
13509 }
13510}
13511
13512fn cmd_packet(action: PacketAction) {
13513 let (result, json_output) = match action {
13514 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
13515 PacketAction::Validate { path, json } => (packet::validate(&path), json),
13516 };
13517 match result {
13518 Ok(output) if json_output => {
13519 println!(
13520 "{}",
13521 serde_json::to_string_pretty(&json!({
13522 "ok": true,
13523 "command": "packet",
13524 "result": output,
13525 }))
13526 .expect("failed to serialize packet response")
13527 );
13528 }
13529 Ok(output) => println!("{output}"),
13530 Err(e) => fail(&e),
13531 }
13532}
13533
13534fn cmd_verify(path: &Path, json_output: bool) {
13539 let result = packet::validate(path);
13540 match result {
13541 Ok(output) if json_output => {
13542 println!(
13543 "{}",
13544 serde_json::to_string_pretty(&json!({
13545 "ok": true,
13546 "command": "verify",
13547 "result": output,
13548 }))
13549 .expect("failed to serialize verify response")
13550 );
13551 }
13552 Ok(output) => {
13553 println!("{output}");
13554 println!(
13555 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
13556 );
13557 }
13558 Err(e) => fail(&e),
13559 }
13560}
13561
13562fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
13563 if path.join(".vela").exists() {
13564 fail(&format!(
13565 "already initialized: {} exists",
13566 path.join(".vela").display()
13567 ));
13568 }
13569 let payload = frontier_repo::initialize(
13570 path,
13571 frontier_repo::InitOptions {
13572 name,
13573 template,
13574 initialize_git,
13575 },
13576 )
13577 .unwrap_or_else(|e| fail_return(&e));
13578 if json_output {
13579 println!(
13580 "{}",
13581 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
13582 );
13583 } else {
13584 println!(
13585 "{} initialized frontier repository in {}",
13586 style::ok("ok"),
13587 path.display()
13588 );
13589 }
13590}
13591
13592fn cmd_quickstart(
13599 path: &Path,
13600 name: &str,
13601 reviewer: &str,
13602 assertion: Option<&str>,
13603 keys_out: Option<&Path>,
13604 json_output: bool,
13605) {
13606 use std::process::Command;
13607
13608 if path.join(".vela").exists() {
13609 fail(&format!(
13610 "already initialized: {} exists",
13611 path.join(".vela").display()
13612 ));
13613 }
13614
13615 let exe = std::env::current_exe()
13616 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
13617 let keys_dir = keys_out
13618 .map(Path::to_path_buf)
13619 .unwrap_or_else(|| path.join("keys"));
13620 let assertion_text =
13621 assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
13622
13623 let run_step = |label: &str, args: &[&str]| -> std::process::Output {
13624 let out = Command::new(&exe)
13625 .args(args)
13626 .output()
13627 .unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
13628 if !out.status.success() {
13629 let stderr = String::from_utf8_lossy(&out.stderr);
13630 fail(&format!("{label} failed:\n{stderr}"));
13631 }
13632 out
13633 };
13634
13635 run_step(
13637 "init",
13638 &[
13639 "init",
13640 path.to_string_lossy().as_ref(),
13641 "--name",
13642 name,
13643 "--no-git",
13644 "--json",
13645 ],
13646 );
13647
13648 let keys_out_str = keys_dir.to_string_lossy().into_owned();
13650 let keypair_out = run_step(
13651 "sign.generate-keypair",
13652 &[
13653 "sign",
13654 "generate-keypair",
13655 "--out",
13656 keys_out_str.as_ref(),
13657 "--json",
13658 ],
13659 );
13660 let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
13661 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
13662 let public_key = keypair_json
13663 .get("public_key")
13664 .and_then(|v| v.as_str())
13665 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
13666 .to_string();
13667
13668 run_step(
13670 "actor.add",
13671 &[
13672 "actor",
13673 "add",
13674 path.to_string_lossy().as_ref(),
13675 reviewer,
13676 "--pubkey",
13677 public_key.as_str(),
13678 "--json",
13679 ],
13680 );
13681
13682 let finding_out = run_step(
13684 "finding.add",
13685 &[
13686 "finding",
13687 "add",
13688 path.to_string_lossy().as_ref(),
13689 "--assertion",
13690 assertion_text,
13691 "--author",
13692 reviewer,
13693 "--apply",
13694 "--json",
13695 ],
13696 );
13697 let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
13698 .unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
13699 let finding_id = finding_json
13700 .get("finding_id")
13701 .and_then(|v| v.as_str())
13702 .map(str::to_string);
13703
13704 if json_output {
13705 let payload = json!({
13706 "ok": true,
13707 "command": "quickstart",
13708 "frontier": path.display().to_string(),
13709 "name": name,
13710 "reviewer": reviewer,
13711 "public_key": public_key,
13712 "keys_dir": keys_dir.display().to_string(),
13713 "finding_id": finding_id,
13714 "next_steps": [
13715 format!("vela serve {}", path.display()),
13716 format!(
13717 "vela ingest <paper.pdf|doi:...> --frontier {}",
13718 path.display()
13719 ),
13720 format!("vela log {}", path.display()),
13721 ],
13722 });
13723 println!(
13724 "{}",
13725 serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
13726 );
13727 return;
13728 }
13729
13730 println!();
13731 println!(
13732 " {}",
13733 format!("VELA · QUICKSTART · {}", path.display())
13734 .to_uppercase()
13735 .dimmed()
13736 );
13737 println!(" {}", style::tick_row(60));
13738 println!(" frontier: {}", path.display());
13739 println!(" name: {name}");
13740 println!(" reviewer: {reviewer}");
13741 println!(" keys: {}", keys_dir.display());
13742 println!(" pubkey: {}…", &public_key[..16]);
13743 if let Some(id) = finding_id.as_deref() {
13744 println!(" finding: {id}");
13745 }
13746 println!();
13747 println!(" {}", style::ok("done"));
13748 println!(" next:");
13749 println!(" vela serve {}", path.display());
13750 println!(
13751 " vela ingest <paper.pdf|doi:10.xxx|pmid:xxx> --frontier {}",
13752 path.display()
13753 );
13754 println!(" vela log {}", path.display());
13755 println!();
13756}
13757
13758fn cmd_lock(path: &Path, check: bool, json_output: bool) {
13764 if check {
13765 cmd_lock_check(path, json_output);
13766 return;
13767 }
13768 let payload = crate::frontier_repo::materialize(path).unwrap_or_else(|e| fail_return(&e));
13769 if json_output {
13770 println!(
13771 "{}",
13772 serde_json::to_string_pretty(&json!({
13773 "ok": true,
13774 "command": "lock",
13775 "path": path.display().to_string(),
13776 "snapshot_hash": payload.get("snapshot_hash"),
13777 "event_log_hash": payload.get("event_log_hash"),
13778 "proposal_state_hash": payload.get("proposal_state_hash"),
13779 }))
13780 .expect("failed to serialize lock report")
13781 );
13782 return;
13783 }
13784 println!();
13785 println!(
13786 " {}",
13787 format!("VELA · LOCK · {}", path.display())
13788 .to_uppercase()
13789 .dimmed()
13790 );
13791 println!(" {}", style::tick_row(60));
13792 println!(
13793 " snapshot_hash: {}",
13794 payload
13795 .get("snapshot_hash")
13796 .and_then(|v| v.as_str())
13797 .unwrap_or("?")
13798 );
13799 println!(
13800 " event_log_hash: {}",
13801 payload
13802 .get("event_log_hash")
13803 .and_then(|v| v.as_str())
13804 .unwrap_or("?")
13805 );
13806 println!(
13807 " proposal_state_hash: {}",
13808 payload
13809 .get("proposal_state_hash")
13810 .and_then(|v| v.as_str())
13811 .unwrap_or("?")
13812 );
13813 println!();
13814 println!(" {}", style::ok("locked"));
13815}
13816
13817fn cmd_lock_check(path: &Path, json_output: bool) {
13818 use crate::frontier_repo::read_lock;
13819 let lock = read_lock(path).unwrap_or_else(|e| fail_return(&e));
13820 let Some(lock) = lock else {
13821 fail("lock --check: no vela.lock found at path");
13822 };
13823 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
13824 let current_snapshot = format!("sha256:{}", crate::events::snapshot_hash(&project));
13825 let current_event_log = format!("sha256:{}", crate::events::event_log_hash(&project.events));
13826 let mut drift: Vec<String> = Vec::new();
13827 if lock.snapshot_hash != current_snapshot {
13828 drift.push(format!(
13829 "snapshot_hash: lock={} current={}",
13830 lock.snapshot_hash, current_snapshot
13831 ));
13832 }
13833 if lock.event_log_hash != current_event_log {
13834 drift.push(format!(
13835 "event_log_hash: lock={} current={}",
13836 lock.event_log_hash, current_event_log
13837 ));
13838 }
13839 let ok = drift.is_empty();
13840 if json_output {
13841 println!(
13842 "{}",
13843 serde_json::to_string_pretty(&json!({
13844 "ok": ok,
13845 "command": "lock.check",
13846 "path": path.display().to_string(),
13847 "drift": drift,
13848 "lock_snapshot_hash": lock.snapshot_hash,
13849 "current_snapshot_hash": current_snapshot,
13850 "lock_event_log_hash": lock.event_log_hash,
13851 "current_event_log_hash": current_event_log,
13852 "dependency_count": lock.dependencies.len(),
13853 }))
13854 .expect("failed to serialize lock check report")
13855 );
13856 } else {
13857 println!();
13858 println!(
13859 " {}",
13860 format!("VELA · LOCK · CHECK · {}", path.display())
13861 .to_uppercase()
13862 .dimmed()
13863 );
13864 println!(" {}", style::tick_row(60));
13865 if ok {
13866 println!(" snapshot_hash: {}", lock.snapshot_hash);
13867 println!(" event_log_hash: {}", lock.event_log_hash);
13868 println!(" dependencies pinned: {}", lock.dependencies.len());
13869 println!();
13870 println!(" {} on-disk state matches vela.lock", style::ok("ok"));
13871 } else {
13872 println!(" {} drift detected:", style::err_prefix());
13873 for d in &drift {
13874 println!(" - {d}");
13875 }
13876 }
13877 }
13878 if !ok {
13879 std::process::exit(1);
13880 }
13881}
13882
13883fn cmd_doc(path: &Path, out: Option<&Path>, json_output: bool) {
13888 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
13889 let out_dir = out
13890 .map(Path::to_path_buf)
13891 .unwrap_or_else(|| path.join("doc"));
13892 let report =
13893 crate::doc_render::write_site(&project, &out_dir).unwrap_or_else(|e| fail_return(&e));
13894 if json_output {
13895 println!(
13896 "{}",
13897 serde_json::to_string_pretty(&report).expect("failed to serialize doc report")
13898 );
13899 return;
13900 }
13901 println!();
13902 println!(
13903 " {}",
13904 format!("VELA · DOC · {}", path.display())
13905 .to_uppercase()
13906 .dimmed()
13907 );
13908 println!(" {}", style::tick_row(60));
13909 println!(" frontier_id: {}", report.frontier_id);
13910 println!(" out: {}", report.out);
13911 println!(" files written: {}", report.files_written);
13912 println!(" findings: {}", report.findings_documented);
13913 println!(" events: {}", report.events_documented);
13914 println!();
13915 println!(
13916 " {} open {}/index.html in a browser",
13917 style::ok("ok"),
13918 report.out
13919 );
13920}
13921
13922fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
13923 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
13924 let target = into
13925 .map(Path::to_path_buf)
13926 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
13927 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
13928 println!(
13929 "{} {} findings · {}",
13930 style::ok("imported"),
13931 frontier.findings.len(),
13932 target.display()
13933 );
13934}
13935
13936fn cmd_locator_repair(
13937 path: &Path,
13938 atom_id: &str,
13939 locator_override: Option<&str>,
13940 reviewer: &str,
13941 reason: &str,
13942 apply: bool,
13943 json_output: bool,
13944) {
13945 let report = state::repair_evidence_atom_locator(
13946 path,
13947 atom_id,
13948 locator_override,
13949 reviewer,
13950 reason,
13951 apply,
13952 )
13953 .unwrap_or_else(|e| fail_return(&e));
13954 print_state_report(&report, json_output);
13955}
13956
13957async fn cmd_source_fetch(
13962 identifier: &str,
13963 cache_root: Option<&Path>,
13964 out_path: Option<&Path>,
13965 refresh: bool,
13966 _json_output: bool,
13967) {
13968 use sha2::{Digest, Sha256};
13969
13970 let normalized = normalize_source_identifier(identifier);
13971 let cache_path = cache_root.map(|root| {
13972 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
13973 root.join("sources")
13974 .join("cache")
13975 .join(format!("{hash}.json"))
13976 });
13977
13978 if !refresh
13979 && let Some(p) = cache_path.as_ref()
13980 && p.is_file()
13981 {
13982 let body = std::fs::read_to_string(p)
13983 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
13984 emit_source_fetch_result(&body, out_path);
13985 return;
13986 }
13987
13988 let result = fetch_source_metadata(&normalized).await;
13989 let json = match result {
13990 Ok(value) => serde_json::to_string_pretty(&value)
13991 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
13992 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
13993 };
13994
13995 if let Some(p) = cache_path.as_ref() {
13996 if let Some(parent) = p.parent() {
13997 std::fs::create_dir_all(parent)
13998 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
13999 }
14000 std::fs::write(p, &json)
14001 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
14002 }
14003 emit_source_fetch_result(&json, out_path);
14004}
14005
14006fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
14007 if let Some(p) = out_path {
14008 if let Some(parent) = p.parent() {
14009 let _ = std::fs::create_dir_all(parent);
14010 }
14011 std::fs::write(p, body)
14012 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
14013 } else {
14014 println!("{body}");
14015 }
14016}
14017
14018fn normalize_source_identifier(raw: &str) -> String {
14019 let trimmed = raw.trim();
14020 if trimmed.starts_with("doi:")
14021 || trimmed.starts_with("pmid:")
14022 || trimmed.starts_with("nct:")
14023 || trimmed.starts_with("pmc:")
14024 {
14025 return trimmed.to_string();
14026 }
14027 if trimmed.starts_with("10.") {
14028 return format!("doi:{trimmed}");
14029 }
14030 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
14031 return format!(
14032 "nct:{}",
14033 trimmed
14034 .to_uppercase()
14035 .trim_start_matches("NCT")
14036 .to_string()
14037 .split_at(0)
14038 .0
14039 );
14040 }
14041 if trimmed.chars().all(|c| c.is_ascii_digit()) {
14042 return format!("pmid:{trimmed}");
14043 }
14044 trimmed.to_string()
14045}
14046
14047async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
14048 let client = Client::builder()
14049 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
14050 .timeout(std::time::Duration::from_secs(30))
14051 .build()
14052 .map_err(|e| format!("client build: {e}"))?;
14053 if let Some(rest) = normalized.strip_prefix("doi:") {
14054 let mut record = fetch_via_crossref(&client, rest).await?;
14061 let crossref_abstract = record
14062 .get("abstract")
14063 .and_then(|v| v.as_str())
14064 .unwrap_or("");
14065 if crossref_abstract.is_empty()
14066 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
14067 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
14068 {
14069 let pubmed_abstract = pubmed_record
14070 .get("abstract")
14071 .and_then(|v| v.as_str())
14072 .unwrap_or("")
14073 .to_string();
14074 if !pubmed_abstract.is_empty()
14075 && let Some(obj) = record.as_object_mut()
14076 {
14077 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
14078 obj.insert(
14079 "abstract_source".to_string(),
14080 Value::String(format!("pubmed:{pmid}")),
14081 );
14082 }
14083 }
14084 return Ok(record);
14085 }
14086 if let Some(rest) = normalized.strip_prefix("pmid:") {
14087 return fetch_via_pubmed(&client, rest).await;
14088 }
14089 if let Some(rest) = normalized.strip_prefix("nct:") {
14090 return fetch_via_ctgov(&client, rest).await;
14091 }
14092 Err(format!(
14093 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
14094 ))
14095}
14096
14097async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
14101 let url = format!(
14102 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
14103 urlencoding::encode(doi)
14104 );
14105 let resp = client.get(&url).send().await.ok()?;
14106 if !resp.status().is_success() {
14107 return None;
14108 }
14109 let body: Value = resp.json().await.ok()?;
14110 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
14111 if id_list.len() != 1 {
14112 return None;
14115 }
14116 id_list.first()?.as_str().map(|s| s.to_string())
14117}
14118
14119async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
14120 let url = format!("https://api.crossref.org/works/{doi}");
14121 let resp = client
14122 .get(&url)
14123 .send()
14124 .await
14125 .map_err(|e| format!("crossref get: {e}"))?;
14126 if !resp.status().is_success() {
14127 return Err(format!("crossref returned {}", resp.status()));
14128 }
14129 let body: Value = resp
14130 .json()
14131 .await
14132 .map_err(|e| format!("crossref json: {e}"))?;
14133 let work = body.get("message").cloned().unwrap_or(Value::Null);
14134 let title = work
14135 .get("title")
14136 .and_then(|v| v.as_array())
14137 .and_then(|a| a.first())
14138 .and_then(|v| v.as_str())
14139 .unwrap_or("")
14140 .to_string();
14141 let abstract_html = work
14142 .get("abstract")
14143 .and_then(|v| v.as_str())
14144 .unwrap_or("")
14145 .to_string();
14146 let abstract_text = strip_jats_tags(&abstract_html);
14147 let year = work
14148 .get("issued")
14149 .and_then(|v| v.get("date-parts"))
14150 .and_then(|v| v.as_array())
14151 .and_then(|a| a.first())
14152 .and_then(|v| v.as_array())
14153 .and_then(|a| a.first())
14154 .and_then(|v| v.as_i64());
14155 let journal = work
14156 .get("container-title")
14157 .and_then(|v| v.as_array())
14158 .and_then(|a| a.first())
14159 .and_then(|v| v.as_str())
14160 .unwrap_or("")
14161 .to_string();
14162 let authors = work
14163 .get("author")
14164 .and_then(|v| v.as_array())
14165 .map(|arr| {
14166 arr.iter()
14167 .filter_map(|a| {
14168 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
14169 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
14170 let combined = format!("{given} {family}").trim().to_string();
14171 if combined.is_empty() {
14172 None
14173 } else {
14174 Some(combined)
14175 }
14176 })
14177 .collect::<Vec<_>>()
14178 })
14179 .unwrap_or_default();
14180 Ok(json!({
14181 "schema": "vela.source_fetch.v0.1",
14182 "identifier": format!("doi:{doi}"),
14183 "source": "crossref",
14184 "title": title,
14185 "abstract": abstract_text,
14186 "year": year,
14187 "journal": journal,
14188 "authors": authors,
14189 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14190 }))
14191}
14192
14193async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
14194 let url = format!(
14195 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
14196 );
14197 let resp = client
14198 .get(&url)
14199 .send()
14200 .await
14201 .map_err(|e| format!("pubmed get: {e}"))?;
14202 if !resp.status().is_success() {
14203 return Err(format!("pubmed returned {}", resp.status()));
14204 }
14205 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
14206 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
14207 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
14208 let year = extract_xml_text(&xml, "<Year>", "</Year>")
14209 .parse::<i64>()
14210 .ok();
14211 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
14212 Ok(json!({
14213 "schema": "vela.source_fetch.v0.1",
14214 "identifier": format!("pmid:{pmid}"),
14215 "source": "pubmed",
14216 "title": title,
14217 "abstract": abstract_text,
14218 "year": year,
14219 "journal": journal,
14220 "authors": Vec::<String>::new(),
14221 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14222 }))
14223}
14224
14225async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
14226 let nct_clean = nct.trim();
14227 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
14228 nct_clean.to_uppercase()
14229 } else {
14230 format!("NCT{nct_clean}")
14231 };
14232 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
14233 let resp = client
14234 .get(&url)
14235 .send()
14236 .await
14237 .map_err(|e| format!("ctgov get: {e}"))?;
14238 if !resp.status().is_success() {
14239 return Err(format!("ctgov returned {}", resp.status()));
14240 }
14241 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
14242 let title = body
14243 .pointer("/protocolSection/identificationModule/briefTitle")
14244 .and_then(|v| v.as_str())
14245 .unwrap_or("")
14246 .to_string();
14247 let abstract_text = body
14248 .pointer("/protocolSection/descriptionModule/briefSummary")
14249 .and_then(|v| v.as_str())
14250 .unwrap_or("")
14251 .to_string();
14252 let phase = body
14253 .pointer("/protocolSection/designModule/phases")
14254 .and_then(|v| v.as_array())
14255 .and_then(|a| a.first())
14256 .and_then(|v| v.as_str())
14257 .unwrap_or("")
14258 .to_string();
14259 Ok(json!({
14260 "schema": "vela.source_fetch.v0.1",
14261 "identifier": format!("nct:{nct_id}"),
14262 "source": "clinicaltrials.gov",
14263 "title": title,
14264 "abstract": abstract_text,
14265 "year": Value::Null,
14266 "journal": phase,
14267 "authors": Vec::<String>::new(),
14268 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14269 }))
14270}
14271
14272fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
14273 if let Some(start) = xml.find(open) {
14274 let after = &xml[start + open.len()..];
14275 if let Some(end) = after.find(close) {
14276 return after[..end].trim().to_string();
14277 }
14278 }
14279 String::new()
14280}
14281
14282fn strip_jats_tags(html: &str) -> String {
14283 let mut out = String::with_capacity(html.len());
14284 let mut in_tag = false;
14285 for c in html.chars() {
14286 match c {
14287 '<' => in_tag = true,
14288 '>' => in_tag = false,
14289 _ if !in_tag => out.push(c),
14290 _ => {}
14291 }
14292 }
14293 out.split_whitespace().collect::<Vec<_>>().join(" ")
14294}
14295
14296fn cmd_span_repair(
14297 path: &Path,
14298 finding_id: &str,
14299 section: &str,
14300 text: &str,
14301 reviewer: &str,
14302 reason: &str,
14303 apply: bool,
14304 json_output: bool,
14305) {
14306 let report =
14307 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
14308 .unwrap_or_else(|e| fail_return(&e));
14309 print_state_report(&report, json_output);
14310}
14311
14312#[allow(clippy::too_many_arguments)]
14313fn cmd_entity_resolve(
14314 path: &Path,
14315 finding_id: &str,
14316 entity_name: &str,
14317 source: &str,
14318 id: &str,
14319 confidence: f64,
14320 matched_name: Option<&str>,
14321 resolution_method: &str,
14322 reviewer: &str,
14323 reason: &str,
14324 apply: bool,
14325 json_output: bool,
14326) {
14327 let report = state::resolve_finding_entity(
14328 path,
14329 finding_id,
14330 entity_name,
14331 source,
14332 id,
14333 confidence,
14334 matched_name,
14335 resolution_method,
14336 reviewer,
14337 reason,
14338 apply,
14339 )
14340 .unwrap_or_else(|e| fail_return(&e));
14341 print_state_report(&report, json_output);
14342}
14343
14344fn cmd_propagate(
14345 path: &Path,
14346 retract: Option<String>,
14347 reduce_confidence: Option<String>,
14348 to: Option<f64>,
14349 output: Option<&Path>,
14350) {
14351 let mut frontier = load_frontier_or_fail(path);
14352 let (finding_id, action, label) = if let Some(id) = retract {
14353 (id, propagate::PropagationAction::Retracted, "retraction")
14354 } else if let Some(id) = reduce_confidence {
14355 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
14356 if !(0.0..=1.0).contains(&score) {
14357 fail("--to must be between 0.0 and 1.0");
14358 }
14359 (
14360 id,
14361 propagate::PropagationAction::ConfidenceReduced { new_score: score },
14362 "confidence reduction",
14363 )
14364 } else {
14365 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
14366 };
14367 if !frontier.findings.iter().any(|f| f.id == finding_id) {
14368 fail(&format!("finding not found: {finding_id}"));
14369 }
14370 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
14371 frontier.review_events.extend(result.events.clone());
14376 project::recompute_stats(&mut frontier);
14377 propagate::print_result(&result, label, &finding_id);
14378 let out = output.unwrap_or(path);
14379 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
14380 println!(" output: {}", out.display());
14381}
14382
14383fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
14384 let source_desc = source
14385 .map(|p| p.display().to_string())
14386 .or_else(|| frontiers.map(|p| p.display().to_string()))
14387 .unwrap_or_else(|| "frontier.json".to_string());
14388 let args = if let Some(path) = source {
14389 format!(r#""serve", "{}""#, path.display())
14390 } else if let Some(path) = frontiers {
14391 format!(r#""serve", "--frontiers", "{}""#, path.display())
14392 } else {
14393 r#""serve", "frontier.json""#.to_string()
14394 };
14395 println!(
14396 r#"Add this MCP server configuration to your client:
14397
14398{{
14399 "mcpServers": {{
14400 "vela": {{
14401 "command": "vela",
14402 "args": [{args}]
14403 }}
14404 }}
14405}}
14406
14407Source: {source_desc}"#
14408 );
14409}
14410
14411fn parse_entities(input: &str) -> Vec<(String, String)> {
14412 if input.trim().is_empty() {
14413 return Vec::new();
14414 }
14415 input
14416 .split(',')
14417 .filter_map(|pair| {
14418 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
14419 if parts.len() == 2 {
14420 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
14421 } else {
14422 eprintln!(
14423 "{} skipping malformed entity '{}'",
14424 style::warn("warn"),
14425 pair.trim()
14426 );
14427 None
14428 }
14429 })
14430 .collect()
14431}
14432
14433fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
14434 inputs
14435 .iter()
14436 .filter_map(|input| {
14437 let trimmed = input.trim();
14438 if trimmed.is_empty() {
14439 return None;
14440 }
14441 if trimmed.starts_with('{') {
14442 match serde_json::from_str::<Value>(trimmed) {
14443 Ok(value @ Value::Object(_)) => return Some(value),
14444 Ok(_) | Err(_) => {
14445 eprintln!(
14446 "{} evidence span JSON should be an object; storing as text",
14447 style::warn("warn")
14448 );
14449 }
14450 }
14451 }
14452 Some(json!({
14453 "section": "curator_source",
14454 "text": trimmed,
14455 }))
14456 })
14457 .collect()
14458}
14459
14460fn hash_path(path: &Path) -> Result<String, String> {
14461 let mut hasher = Sha256::new();
14462 if path.is_file() {
14463 let bytes = std::fs::read(path)
14464 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
14465 hasher.update(&bytes);
14466 } else if path.is_dir() {
14467 let mut files = Vec::new();
14468 collect_hash_files(path, path, &mut files)?;
14469 files.sort();
14470 for rel in files {
14471 hasher.update(rel.to_string_lossy().as_bytes());
14472 let bytes = std::fs::read(path.join(&rel))
14473 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
14474 hasher.update(bytes);
14475 }
14476 } else {
14477 return Err(format!("Cannot hash missing path {}", path.display()));
14478 }
14479 Ok(format!("{:x}", hasher.finalize()))
14480}
14481
14482fn load_frontier_or_fail(path: &Path) -> project::Project {
14483 repo::load_from_path(path).unwrap_or_else(|e| {
14484 fail_return(&format!(
14485 "Failed to load frontier '{}': {e}",
14486 path.display()
14487 ))
14488 })
14489}
14490
14491fn hash_path_or_fail(path: &Path) -> String {
14492 hash_path(path).unwrap_or_else(|e| {
14493 fail_return(&format!(
14494 "Failed to hash frontier '{}': {e}",
14495 path.display()
14496 ))
14497 })
14498}
14499
14500fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
14501 for entry in
14502 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
14503 {
14504 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
14505 let path = entry.path();
14506 if path.is_dir() {
14507 collect_hash_files(root, &path, files)?;
14508 } else if path.is_file() {
14509 files.push(
14510 path.strip_prefix(root)
14511 .map_err(|e| e.to_string())?
14512 .to_path_buf(),
14513 );
14514 }
14515 }
14516 Ok(())
14517}
14518
14519fn schema_error_suggestion(error: &str) -> &'static str {
14520 if schema_error_action(error).is_some() {
14521 "Run `vela normalize` to repair deterministic frontier state."
14522 } else {
14523 "Inspect and correct the referenced frontier field."
14524 }
14525}
14526
14527fn schema_error_fix(error: &str) -> bool {
14528 schema_error_action(error).is_some()
14529}
14530
14531fn schema_error_action(error: &str) -> Option<&'static str> {
14532 if error.contains("stats.findings")
14533 || error.contains("stats.links")
14534 || error.contains("Invalid compiler")
14535 || error.contains("Invalid vela_version")
14536 || error.contains("Invalid schema")
14537 {
14538 Some("normalize_metadata_and_stats")
14539 } else if error.contains("does not match content-address") {
14540 Some("rewrite_ids")
14541 } else {
14542 None
14543 }
14544}
14545
14546fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
14547 let mut actions = std::collections::BTreeMap::<String, usize>::new();
14548 for diagnostic in diagnostics {
14549 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
14550 *actions.entry(action.to_string()).or_default() += 1;
14551 }
14552 }
14553 actions
14554 .into_iter()
14555 .map(|(action, count)| {
14556 let command = if action == "rewrite_ids" {
14557 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
14558 } else {
14559 "vela normalize <frontier> --write"
14560 };
14561 json!({
14562 "action": action,
14563 "count": count,
14564 "command": command,
14565 })
14566 })
14567 .collect()
14568}
14569
14570fn cmd_integrity(frontier: &Path, json: bool) {
14571 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
14572 if json {
14573 println!(
14574 "{}",
14575 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
14576 );
14577 } else {
14578 println!("vela integrity");
14579 println!(" frontier: {}", frontier.display());
14580 println!(" status: {}", report.status);
14581 println!(" proof freshness: {}", report.proof_freshness);
14582 println!(" structural errors: {}", report.structural_errors.len());
14583 for error in report.structural_errors.iter().take(8) {
14584 println!(" - {}: {}", error.rule_id, error.message);
14585 }
14586 }
14587}
14588
14589fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
14590 let report =
14591 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
14592 if json {
14593 println!(
14594 "{}",
14595 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
14596 );
14597 } else {
14598 println!("vela impact");
14599 println!(" finding: {}", report.target.id);
14600 println!(" frontier: {}", report.frontier.vfr_id);
14601 println!(" direct dependents: {}", report.summary.direct_dependents);
14602 println!(" downstream: {}", report.summary.total_downstream);
14603 println!(" open proposals: {}", report.summary.open_proposals);
14604 println!(" accepted events: {}", report.summary.accepted_events);
14605 println!(" proof: {}", report.summary.proof_status);
14606 }
14607}
14608
14609fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
14610 use crate::discord::DiscordKind;
14611 use crate::discord_compute::compute_discord_assignment;
14612
14613 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
14614 let assignment = compute_discord_assignment(&project);
14615 let support = assignment.frontier_support();
14616
14617 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
14620 for context in support.iter() {
14621 let set = assignment.get(context);
14622 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
14623 if let Some(filter) = kind_filter
14624 && !kinds.iter().any(|k| k == filter)
14625 {
14626 continue;
14627 }
14628 rows.push((context.clone(), kinds));
14629 }
14630
14631 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
14635 std::collections::BTreeMap::new();
14636 for kind in DiscordKind::ALL {
14637 let count = assignment
14638 .iter()
14639 .filter(|(_, set)| set.contains(*kind))
14640 .count();
14641 if count > 0 {
14642 histogram.insert(kind.as_str(), count);
14643 }
14644 }
14645
14646 let total_findings = project.findings.len();
14647 let frontier_id = project
14648 .frontier_id
14649 .clone()
14650 .unwrap_or_else(|| String::from("<unknown>"));
14651
14652 if json {
14653 let row_value = |row: &(String, Vec<String>)| {
14654 serde_json::json!({
14655 "finding_id": row.0,
14656 "discord_kinds": row.1,
14657 })
14658 };
14659 let report = serde_json::json!({
14660 "frontier_id": frontier_id,
14661 "total_findings": total_findings,
14662 "frontier_support_size": support.len(),
14663 "filtered_row_count": rows.len(),
14664 "filter_kind": kind_filter,
14665 "histogram": histogram,
14666 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
14667 });
14668 println!(
14669 "{}",
14670 serde_json::to_string_pretty(&report).expect("serialize discord report")
14671 );
14672 return;
14673 }
14674
14675 println!("vela discord");
14676 println!(" frontier: {frontier_id}");
14677 println!(" total findings: {total_findings}");
14678 println!(
14679 " frontier support (any discord): {} of {}",
14680 support.len(),
14681 total_findings
14682 );
14683 if let Some(k) = kind_filter {
14684 println!(" filter: kind = {k}");
14685 }
14686 println!();
14687 if histogram.is_empty() {
14688 println!(" no discord detected.");
14689 } else {
14690 println!(" discord histogram:");
14691 for (k, n) in &histogram {
14692 println!(" {n:>4} {k}");
14693 }
14694 }
14695 if !rows.is_empty() {
14696 println!();
14697 println!(" findings with discord (showing up to 50):");
14698 for (fid, kinds) in rows.iter().take(50) {
14699 println!(" {fid} · {}", kinds.join(", "));
14700 }
14701 if rows.len() > 50 {
14702 println!(" ... and {} more", rows.len() - 50);
14703 }
14704 }
14705}
14706
14707fn empty_signal_report() -> signals::SignalReport {
14708 signals::SignalReport {
14709 schema: "vela.signals.v0".to_string(),
14710 frontier: "unavailable".to_string(),
14711 signals: Vec::new(),
14712 review_queue: Vec::new(),
14713 proof_readiness: signals::ProofReadiness {
14714 status: "unavailable".to_string(),
14715 blockers: 0,
14716 warnings: 0,
14717 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
14718 },
14719 }
14720}
14721
14722fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
14723 println!();
14724 println!(" {}", "SIGNALS".dimmed());
14725 println!(" {}", style::tick_row(60));
14726 println!(" total signals: {}", report.signals.len());
14727 println!(" proof readiness: {}", report.proof_readiness.status);
14728 if !report.review_queue.is_empty() {
14729 println!(" review queue: {} items", report.review_queue.len());
14730 }
14731 if strict && report.proof_readiness.status != "ready" {
14732 println!(
14733 " {} proof readiness has blocking signals.",
14734 style::lost("strict check failed")
14735 );
14736 }
14737}
14738
14739fn append_packet_json_file(
14740 packet_dir: &Path,
14741 relative_path: &str,
14742 value: &Value,
14743) -> Result<(), String> {
14744 let content = serde_json::to_vec_pretty(value)
14745 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
14746 let path = packet_dir.join(relative_path);
14747 if let Some(parent) = path.parent() {
14748 std::fs::create_dir_all(parent)
14749 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
14750 }
14751 std::fs::write(&path, &content)
14752 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
14753 let entry = json!({
14754 "path": relative_path,
14755 "sha256": hex::encode(Sha256::digest(&content)),
14756 "bytes": content.len(),
14757 });
14758
14759 for manifest_name in ["manifest.json", "packet.lock.json"] {
14760 let manifest_path = packet_dir.join(manifest_name);
14761 let data = std::fs::read_to_string(&manifest_path)
14762 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14763 let mut manifest: Value = serde_json::from_str(&data)
14764 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14765 let array_key = if manifest_name == "manifest.json" {
14766 "included_files"
14767 } else {
14768 "files"
14769 };
14770 let files = manifest
14771 .get_mut(array_key)
14772 .and_then(Value::as_array_mut)
14773 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
14774 files.retain(|file| {
14775 file.get("path")
14776 .and_then(Value::as_str)
14777 .is_none_or(|path| path != relative_path)
14778 });
14779 files.push(entry.clone());
14780 std::fs::write(
14781 &manifest_path,
14782 serde_json::to_vec_pretty(&manifest)
14783 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14784 )
14785 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14786 }
14787
14788 let lock_path = packet_dir.join("packet.lock.json");
14789 let lock_content = std::fs::read(&lock_path)
14790 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
14791 let lock_entry = json!({
14792 "path": "packet.lock.json",
14793 "sha256": hex::encode(Sha256::digest(&lock_content)),
14794 "bytes": lock_content.len(),
14795 });
14796 let manifest_path = packet_dir.join("manifest.json");
14797 let data = std::fs::read_to_string(&manifest_path)
14798 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14799 let mut manifest: Value = serde_json::from_str(&data)
14800 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14801 let files = manifest
14802 .get_mut("included_files")
14803 .and_then(Value::as_array_mut)
14804 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
14805 files.retain(|file| {
14806 file.get("path")
14807 .and_then(Value::as_str)
14808 .is_none_or(|path| path != "packet.lock.json")
14809 });
14810 files.push(lock_entry);
14811 std::fs::write(
14812 &manifest_path,
14813 serde_json::to_vec_pretty(&manifest)
14814 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14815 )
14816 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14817 Ok(())
14818}
14819
14820fn print_tool_check_report(report: &Value) {
14821 let summary = report.get("summary").unwrap_or(&Value::Null);
14822 let frontier = report.get("frontier").unwrap_or(&Value::Null);
14823 println!();
14824 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
14825 println!(" {}", style::tick_row(60));
14826 println!(
14827 "frontier: {}",
14828 frontier
14829 .get("name")
14830 .and_then(Value::as_str)
14831 .unwrap_or("unknown")
14832 );
14833 println!(
14834 "findings: {}",
14835 frontier
14836 .get("findings")
14837 .and_then(Value::as_u64)
14838 .unwrap_or_default()
14839 );
14840 println!(
14841 "checks: {} passed, {} failed",
14842 summary
14843 .get("passed")
14844 .and_then(Value::as_u64)
14845 .unwrap_or_default(),
14846 summary
14847 .get("failed")
14848 .and_then(Value::as_u64)
14849 .unwrap_or_default()
14850 );
14851 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
14852 let names = tools
14853 .iter()
14854 .filter_map(Value::as_str)
14855 .collect::<Vec<_>>()
14856 .join(", ");
14857 println!("tools: {names}");
14858 }
14859 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
14860 for check in checks {
14861 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
14862 style::ok("ok")
14863 } else {
14864 style::lost("lost")
14865 };
14866 println!(
14867 " {} {}",
14868 status,
14869 check
14870 .get("tool")
14871 .and_then(Value::as_str)
14872 .unwrap_or("unknown")
14873 );
14874 }
14875 }
14876}
14877
14878fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
14879 if json_output {
14880 println!(
14881 "{}",
14882 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
14883 );
14884 } else {
14885 println!("{}", report.message);
14886 println!(" frontier: {}", report.frontier);
14887 println!(" finding: {}", report.finding_id);
14888 println!(" proposal: {}", report.proposal_id);
14889 println!(" status: {}", report.proposal_status);
14890 if let Some(event_id) = &report.applied_event_id {
14891 println!(" event: {}", event_id);
14892 }
14893 println!(" wrote: {}", report.wrote_to);
14894 }
14895}
14896
14897fn print_history(payload: &Value) {
14898 let finding = payload.get("finding").unwrap_or(&Value::Null);
14899 println!("vela history");
14900 println!(
14901 " finding: {}",
14902 finding
14903 .get("id")
14904 .and_then(Value::as_str)
14905 .unwrap_or("unknown")
14906 );
14907 println!(
14908 " assertion: {}",
14909 finding
14910 .get("assertion")
14911 .and_then(Value::as_str)
14912 .unwrap_or("")
14913 );
14914 println!(
14915 " confidence: {:.3}",
14916 finding
14917 .get("confidence")
14918 .and_then(Value::as_f64)
14919 .unwrap_or_default()
14920 );
14921 let reviews = payload
14922 .get("review_events")
14923 .and_then(Value::as_array)
14924 .map_or(0, Vec::len);
14925 let updates = payload
14926 .get("confidence_updates")
14927 .and_then(Value::as_array)
14928 .map_or(0, Vec::len);
14929 let annotations = finding
14930 .get("annotations")
14931 .and_then(Value::as_array)
14932 .map_or(0, Vec::len);
14933 let sources = payload
14934 .get("sources")
14935 .and_then(Value::as_array)
14936 .map_or(0, Vec::len);
14937 let atoms = payload
14938 .get("evidence_atoms")
14939 .and_then(Value::as_array)
14940 .map_or(0, Vec::len);
14941 let conditions = payload
14942 .get("condition_records")
14943 .and_then(Value::as_array)
14944 .map_or(0, Vec::len);
14945 let proposals = payload
14946 .get("proposals")
14947 .and_then(Value::as_array)
14948 .map_or(0, Vec::len);
14949 let events = payload
14950 .get("events")
14951 .and_then(Value::as_array)
14952 .map_or(0, Vec::len);
14953 println!(" review events: {reviews}");
14954 println!(" confidence updates: {updates}");
14955 println!(" annotations: {annotations}");
14956 println!(" sources: {sources}");
14957 println!(" evidence atoms: {atoms}");
14958 println!(" condition records: {conditions}");
14959 println!(" proposals: {proposals}");
14960 println!(" canonical events: {events}");
14961 if let Some(status) = payload
14962 .get("proof_state")
14963 .and_then(|value| value.get("latest_packet"))
14964 .and_then(|value| value.get("status"))
14965 .and_then(Value::as_str)
14966 {
14967 println!(" proof state: {status}");
14968 }
14969 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
14970 for event in events.iter().take(8) {
14971 println!(
14972 " - {} {} {}",
14973 event
14974 .get("reviewed_at")
14975 .and_then(Value::as_str)
14976 .unwrap_or(""),
14977 event.get("id").and_then(Value::as_str).unwrap_or(""),
14978 event.get("reason").and_then(Value::as_str).unwrap_or("")
14979 );
14980 }
14981 }
14982}
14983
14984#[derive(Debug, Serialize)]
14985pub struct ProofTrace {
14986 pub trace_version: String,
14987 pub command: Vec<String>,
14988 pub source: String,
14989 pub source_hash: String,
14990 pub schema_version: String,
14991 pub checked_artifacts: Vec<String>,
14992 pub benchmark: Option<Value>,
14993 pub packet_manifest: String,
14994 pub packet_validation: String,
14995 pub caveats: Vec<String>,
14996 pub status: String,
14997 pub trace_path: String,
14998}
14999
15000const SCIENCE_SUBCOMMANDS: &[&str] = &[
15001 "compile-notes",
15002 "compile-code",
15003 "compile-data",
15004 "review-pending",
15005 "find-tensions",
15006 "plan-experiments",
15007 "scout",
15008 "check",
15009 "normalize",
15010 "integrity",
15011 "impact",
15012 "discord",
15013 "quickstart",
15014 "proof",
15015 "repo",
15016 "serve",
15017 "stats",
15018 "search",
15019 "tensions",
15020 "gaps",
15021 "bridge",
15022 "export",
15023 "packet",
15024 "bench",
15025 "conformance",
15026 "version",
15027 "sign",
15028 "actor",
15029 "frontier",
15030 "queue",
15031 "registry",
15032 "init",
15033 "import",
15034 "lock",
15035 "doc",
15036 "diff",
15037 "proposals",
15038 "finding",
15039 "link",
15040 "entity",
15041 "review",
15042 "note",
15043 "caveat",
15044 "revise",
15045 "reject",
15046 "history",
15047 "import-events",
15048 "retract",
15049 "propagate",
15050 "replicate",
15052 "replications",
15053 "dataset-add",
15056 "datasets",
15057 "code-add",
15058 "code-artifacts",
15059 "artifact-add",
15060 "artifact-to-state",
15061 "bridge-kit",
15062 "source-adapter",
15063 "runtime-adapter",
15064 "artifacts",
15065 "artifact-audit",
15066 "decision-brief",
15067 "trial-summary",
15068 "source-verification",
15069 "source-ingest-plan",
15070 "clinical-trial-import",
15071 "negative-result-add",
15073 "negative-results",
15074 "trajectory-create",
15076 "trajectory-step",
15077 "trajectories",
15078 "tier-set",
15080 "locator-repair",
15082 "span-repair",
15084 "entity-resolve",
15086 "entity-add",
15088 "source-fetch",
15090 "predict",
15093 "resolve",
15094 "predictions",
15095 "predictions-expire",
15096 "calibration",
15097 "consensus",
15100 "federation",
15102 "causal",
15104 "status",
15108 "log",
15109 "inbox",
15110 "ask",
15111 "bridges",
15113 "workbench",
15115 "verify",
15117 "ingest",
15121 "propose",
15122 "accept",
15123 "attest",
15124 "lineage",
15125 "carina",
15128 "atlas",
15131 "constellation",
15134];
15135
15136pub fn is_science_subcommand(name: &str) -> bool {
15137 SCIENCE_SUBCOMMANDS.contains(&name)
15138}
15139
15140fn print_strict_help() {
15141 println!(
15142 r#"Vela {}
15143Version control for scientific state.
15144
15145Usage:
15146 vela <COMMAND>
15147
15148Core flow (v0.74):
15149 init Initialize a split frontier repo
15150 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
15151 propose Create a finding.review proposal
15152 diff Preview a `vpr_*` proposal, or compare two frontier files
15153 accept Apply a proposal under reviewer authority
15154 attest Sign findings under your private key
15155 log Recent canonical state events
15156 lineage State-transition replay for one finding
15157 serve Local Workbench (findings, evidence, diff, lineage)
15158
15159Read-only inspection:
15160 check Validate a frontier, repo, or proof packet
15161 integrity Check accepted frontier state integrity
15162 impact Report downstream finding impact
15163 normalize Apply deterministic frontier-state repairs
15164 proof Export and validate a proof packet
15165 repo Inspect split frontier repository status and shape
15166 stats Show frontier statistics
15167 search Search findings
15168 tensions List candidate contradictions and tensions
15169 gaps Inspect and rank candidate gap review leads
15170 bridge Find candidate cross-domain connections
15171
15172Advanced (proposal-creation, agent inboxes, federation):
15173 scout Run Literature Scout against a folder of PDFs (writes proposals)
15174 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
15175 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
15176 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
15177 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
15178 find-tensions Run Contradiction Finder: surface real contradictions among findings
15179 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
15180 export Export frontier artifacts
15181 packet Inspect or validate proof packets
15182 bench Run deterministic benchmark gates
15183 conformance Run protocol conformance vectors
15184 sign Optional signing and signature verification
15185 runtime-adapter
15186 Normalize external runtime exports into reviewable proposals
15187 version Show version information
15188 import Import frontier.json into a .vela repo
15189 proposals Inspect, validate, export, import, accept, or reject write proposals
15190 artifact-to-state
15191 Import a Carina artifact packet as reviewable proposals
15192 bridge-kit
15193 Validate Carina artifact packets before importing runtime output
15194 source-adapter
15195 Run reviewed source adapters into artifact-to-state proposals
15196 finding Add or manage finding bundles as frontier state
15197 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
15198 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
15199 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
15200 actor Register Ed25519 publisher identities in a frontier
15201 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
15202 review Create a review proposal or review interactively
15203 note Add a lightweight note to a finding
15204 caveat Create an explicit caveat proposal
15205 revise Create a confidence revision proposal
15206 reject Create a rejection proposal
15207 history Show state-transition history for one finding (v0.74 alias: `lineage`)
15208 import-events Import review/state events from a packet or JSON file
15209 retract Create a retraction proposal
15210 propagate Simulate impact over declared dependency links
15211 artifact-add Register a content-addressed artifact
15212 artifacts List content-addressed artifacts
15213 artifact-audit Audit artifact locators, hashes, references, and profiles
15214 decision-brief Show the validated decision brief projection
15215 trial-summary Show the validated trial outcome projection
15216 source-verification Show the validated source verification projection
15217 source-ingest-plan Show the validated source ingest plan
15218 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
15219 locator-repair Mechanically repair an evidence atom's missing source locator
15220 span-repair Mechanically repair a finding's missing evidence span
15221 entity-resolve Resolve a finding entity to a canonical id
15222 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
15223 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
15224 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
15225
15226Quick start (the demo):
15227 vela init demo --name "Your bounded question"
15228 vela ingest paper.pdf --frontier demo
15229 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
15230 vela diff <vpr_id> --frontier demo
15231 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
15232 vela serve --path demo
15233
15234Substrate health:
15235 vela frontier materialize my-frontier --json
15236 vela repo status my-frontier --json
15237 vela proof verify my-frontier --json
15238 vela check my-frontier --strict --json
15239
15240Monolithic frontier file:
15241 vela frontier new frontier.json --name "Your bounded question"
15242 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
15243 vela check frontier.json --json
15244 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
15245 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
15246
15247Publish your own frontier (see docs/PUBLISHING.md):
15248 vela frontier new ./frontier.json --name "Your bounded question"
15249 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
15250 vela sign generate-keypair --out keys
15251 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
15252 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
15253 --to https://vela-hub.fly.dev
15254"#,
15255 env!("CARGO_PKG_VERSION")
15256 );
15257}
15258
15259pub type ScoutHandler = fn(
15268 folder: PathBuf,
15269 frontier: PathBuf,
15270 backend: Option<String>,
15271 dry_run: bool,
15272 json: bool,
15273) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15274
15275static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
15276
15277pub fn register_scout_handler(handler: ScoutHandler) {
15281 let _ = SCOUT_HANDLER.set(handler);
15282}
15283
15284pub type AtlasInitHandler = fn(
15288 atlases_root: PathBuf,
15289 name: String,
15290 domain: String,
15291 scope_note: Option<String>,
15292 frontiers: Vec<PathBuf>,
15293 json: bool,
15294) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15295
15296static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
15297
15298pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
15299 let _ = ATLAS_INIT_HANDLER.set(handler);
15300}
15301
15302pub type AtlasMaterializeHandler =
15304 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15305
15306static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
15307
15308pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
15309 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
15310}
15311
15312pub type AtlasServeHandler = fn(
15317 atlases_root: PathBuf,
15318 name: String,
15319 port: u16,
15320 open_browser: bool,
15321) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15322
15323static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
15324
15325pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
15326 let _ = ATLAS_SERVE_HANDLER.set(handler);
15327}
15328
15329pub type AtlasUpdateHandler = fn(
15334 atlases_root: PathBuf,
15335 name: String,
15336 add_frontier: Vec<PathBuf>,
15337 remove_vfr_id: Vec<String>,
15338 json: bool,
15339) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15340
15341static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
15342
15343pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
15344 let _ = ATLAS_UPDATE_HANDLER.set(handler);
15345}
15346
15347pub type ConstellationInitHandler = fn(
15351 constellations_root: PathBuf,
15352 name: String,
15353 scope_note: Option<String>,
15354 atlases: Vec<PathBuf>,
15355 json: bool,
15356) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15357
15358static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
15359
15360pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
15361 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
15362}
15363
15364pub type ConstellationMaterializeHandler = fn(
15365 constellations_root: PathBuf,
15366 name: String,
15367 json: bool,
15368) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15369
15370static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
15371 OnceLock::new();
15372
15373pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
15374 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
15375}
15376
15377pub type ConstellationServeHandler = fn(
15378 constellations_root: PathBuf,
15379 name: String,
15380 port: u16,
15381 open_browser: bool,
15382) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15383
15384static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
15385
15386pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
15387 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
15388}
15389
15390pub type NotesHandler = fn(
15394 vault: PathBuf,
15395 frontier: PathBuf,
15396 backend: Option<String>,
15397 max_files: Option<usize>,
15398 max_items_per_category: Option<usize>,
15399 dry_run: bool,
15400 json: bool,
15401) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15402
15403static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
15404
15405pub fn register_notes_handler(handler: NotesHandler) {
15407 let _ = NOTES_HANDLER.set(handler);
15408}
15409
15410pub type CodeHandler = fn(
15412 root: PathBuf,
15413 frontier: PathBuf,
15414 backend: Option<String>,
15415 max_files: Option<usize>,
15416 dry_run: bool,
15417 json: bool,
15418) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15419
15420static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
15421
15422pub fn register_code_handler(handler: CodeHandler) {
15424 let _ = CODE_HANDLER.set(handler);
15425}
15426
15427pub type DatasetsHandler = fn(
15429 root: PathBuf,
15430 frontier: PathBuf,
15431 backend: Option<String>,
15432 sample_rows: Option<usize>,
15433 dry_run: bool,
15434 json: bool,
15435) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15436
15437static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
15438
15439pub fn register_datasets_handler(handler: DatasetsHandler) {
15441 let _ = DATASETS_HANDLER.set(handler);
15442}
15443
15444pub type ReviewerHandler = fn(
15446 frontier: PathBuf,
15447 backend: Option<String>,
15448 max_proposals: Option<usize>,
15449 batch_size: usize,
15450 dry_run: bool,
15451 json: bool,
15452) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15453
15454static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
15455
15456pub fn register_reviewer_handler(handler: ReviewerHandler) {
15458 let _ = REVIEWER_HANDLER.set(handler);
15459}
15460
15461pub type TensionsHandler = fn(
15463 frontier: PathBuf,
15464 backend: Option<String>,
15465 max_findings: Option<usize>,
15466 dry_run: bool,
15467 json: bool,
15468) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15469
15470static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
15471
15472pub fn register_tensions_handler(handler: TensionsHandler) {
15474 let _ = TENSIONS_HANDLER.set(handler);
15475}
15476
15477pub type ExperimentsHandler = fn(
15479 frontier: PathBuf,
15480 backend: Option<String>,
15481 max_findings: Option<usize>,
15482 dry_run: bool,
15483 json: bool,
15484) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15485
15486static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
15487
15488pub fn register_experiments_handler(handler: ExperimentsHandler) {
15490 let _ = EXPERIMENTS_HANDLER.set(handler);
15491}
15492
15493fn find_vela_repo() -> Option<PathBuf> {
15509 let mut cur = std::env::current_dir().ok()?;
15510 loop {
15511 if cur.join(".vela").is_dir() {
15512 return Some(cur);
15513 }
15514 if !cur.pop() {
15515 return None;
15516 }
15517 }
15518}
15519
15520fn print_session_help() {
15521 println!();
15522 println!(
15523 " Vela {} · Version control for scientific state.",
15524 env!("CARGO_PKG_VERSION")
15525 );
15526 println!();
15527 println!(" USAGE");
15528 println!(" vela Open a session against the nearest .vela/ repo");
15529 println!(" vela <command> Run a specific subcommand");
15530 println!(" vela help advanced Full subcommand list (30+ commands)");
15531 println!();
15532 println!(" CORE FLOW (v0.74)");
15533 println!(" init Initialize a split frontier repo");
15534 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
15535 println!(" propose Create a finding.review proposal");
15536 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
15537 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
15538 println!(" attest Sign findings under your private key");
15539 println!(" log Recent canonical state events");
15540 println!(" lineage <vf_id> State-transition replay for one finding");
15541 println!(" serve Local Workbench (find, evidence, diff, lineage)");
15542 println!();
15543 println!(" DAILY ALSO-RANS");
15544 println!(" status One-screen frontier health");
15545 println!(" inbox Pending review proposals");
15546 println!(" review Review a proposal interactively");
15547 println!(" ask <question> Plain-text query against the frontier");
15548 println!();
15549 println!(" REASONING (Pearl 1 → 2 → 3)");
15550 println!(" causal audit Per-finding identifiability");
15551 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
15552 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
15553 println!();
15554 println!(" COMPOSITION");
15555 println!(" bridge <a> <b> Cross-frontier hypotheses");
15556 println!(" consensus <vf> Field consensus over similar claims");
15557 println!();
15558 println!(" PUBLISH");
15559 println!(" registry publish Push a signed manifest to the hub");
15560 println!(" federation peer-add Federate with another hub");
15561 println!();
15562 println!(" In session, type a single letter for a quick verb, or any");
15563 println!(" question in plain text. `q` or `exit` quits.");
15564 println!();
15565}
15566
15567fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
15568 use crate::causal_reasoning::{audit_frontier, summarize_audit};
15569
15570 let label = frontier_label(project);
15571 let vfr = project.frontier_id();
15572 let vfr_short = vfr.chars().take(16).collect::<String>();
15573
15574 let mut pending = 0usize;
15575 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
15576 for p in &project.proposals {
15577 if p.status == "pending_review" {
15578 pending += 1;
15579 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
15580 }
15581 }
15582
15583 let audit = audit_frontier(project);
15584 let audit_summary = summarize_audit(&audit);
15585
15586 let bridges_dir = repo_path.join(".vela/bridges");
15587 let mut bridge_total = 0usize;
15588 let mut bridge_confirmed = 0usize;
15589 let mut bridge_derived = 0usize;
15590 if bridges_dir.is_dir()
15591 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
15592 {
15593 for entry in entries.flatten() {
15594 let path = entry.path();
15595 if path.extension().and_then(|s| s.to_str()) != Some("json") {
15596 continue;
15597 }
15598 bridge_total += 1;
15599 if let Ok(data) = std::fs::read_to_string(&path)
15600 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
15601 {
15602 match b.status {
15603 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
15604 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
15605 _ => {}
15606 }
15607 }
15608 }
15609 }
15610
15611 let mut targets_with_success = std::collections::HashSet::new();
15612 let mut failed_replications = 0usize;
15613 for r in &project.replications {
15614 if r.outcome == "replicated" {
15615 targets_with_success.insert(r.target_finding.clone());
15616 } else if r.outcome == "failed" {
15617 failed_replications += 1;
15618 }
15619 }
15620
15621 println!();
15622 let version = crate::project::VELA_COMPILER_VERSION
15623 .strip_prefix("vela/")
15624 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
15625 println!(
15626 " {}",
15627 format!("VELA · {version} · {label}")
15628 .to_uppercase()
15629 .dimmed()
15630 );
15631 println!(" {}", style::tick_row(60));
15632 println!(
15633 " vfr_id {}… repo {}",
15634 vfr_short,
15635 repo_path.display()
15636 );
15637 println!(
15638 " findings {:>4} events {} proposals pending {}",
15639 project.findings.len(),
15640 project.events.len(),
15641 pending
15642 );
15643
15644 if pending > 0 {
15645 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
15646 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
15647 }
15648 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
15649 println!(
15650 " {} · {} underidentified · {} conditional",
15651 if audit_summary.underidentified > 0 {
15652 style::lost("audit")
15653 } else {
15654 style::warn("audit")
15655 },
15656 audit_summary.underidentified,
15657 audit_summary.conditional,
15658 );
15659 }
15660 if bridge_total > 0 {
15661 println!(
15662 " {} · {} total · {} confirmed · {} awaiting review",
15663 style::ok("bridges"),
15664 bridge_total,
15665 bridge_confirmed,
15666 bridge_derived
15667 );
15668 }
15669 if !project.replications.is_empty() {
15670 println!(
15671 " {} · {} records · {} findings replicated · {} failed",
15672 style::ok("replications"),
15673 project.replications.len(),
15674 targets_with_success.len(),
15675 failed_replications,
15676 );
15677 }
15678
15679 println!();
15680 println!(" type a verb or ask anything:");
15681 println!(" a audit problems i inbox (pending) b bridges");
15682 println!(" g causal graph l log (recent) c counterfactuals");
15683 println!(" s refresh status h help (more verbs) q quit");
15684 println!();
15685}
15686
15687fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
15689 match verb {
15690 "a" | "audit" => {
15691 let action = CausalAction::Audit {
15692 frontier: repo_path.to_path_buf(),
15693 problems_only: true,
15694 json: false,
15695 };
15696 cmd_causal(action);
15697 true
15698 }
15699 "i" | "inbox" => {
15700 let action = ProposalAction::List {
15701 frontier: repo_path.to_path_buf(),
15702 status: Some("pending_review".into()),
15703 json: false,
15704 };
15705 cmd_proposals(action);
15706 true
15707 }
15708 "b" | "bridges" => {
15709 let action = BridgesAction::List {
15710 frontier: repo_path.to_path_buf(),
15711 status: None,
15712 json: false,
15713 };
15714 cmd_bridges(action);
15715 true
15716 }
15717 "g" | "graph" => {
15718 let action = CausalAction::Graph {
15719 frontier: repo_path.to_path_buf(),
15720 node: None,
15721 json: false,
15722 };
15723 cmd_causal(action);
15724 true
15725 }
15726 "l" | "log" => {
15727 cmd_log(repo_path, 10, None, false);
15728 true
15729 }
15730 "c" | "counterfactual" | "counterfactuals" => {
15731 let project = match repo::load_from_path(repo_path) {
15734 Ok(p) => p,
15735 Err(e) => {
15736 eprintln!("{} {e}", style::err_prefix());
15737 return true;
15738 }
15739 };
15740 println!();
15741 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
15742 println!(" {}", style::tick_row(60));
15743 let mut pairs = 0usize;
15747 for child in &project.findings {
15748 for link in &child.links {
15749 if !matches!(link.link_type.as_str(), "depends" | "supports") {
15750 continue;
15751 }
15752 if link.mechanism.is_none() {
15753 continue;
15754 }
15755 let parent = link
15756 .target
15757 .split_once(':')
15758 .map_or(link.target.as_str(), |(_, r)| r);
15759 pairs += 1;
15760 if pairs <= 10 {
15761 println!(" · do({parent}) → {}", child.id);
15762 }
15763 }
15764 }
15765 if pairs == 0 {
15766 println!(" no mechanism-annotated edges found.");
15767 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
15768 } else {
15769 println!();
15770 println!(" {pairs} live pair(s). Run with:");
15771 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
15772 }
15773 println!();
15774 true
15775 }
15776 "s" | "status" | "refresh" => {
15777 match repo::load_from_path(repo_path) {
15779 Ok(p) => print_session_dashboard(&p, repo_path),
15780 Err(e) => eprintln!("{} {e}", style::err_prefix()),
15781 }
15782 true
15783 }
15784 "h" | "help" | "?" => {
15785 print_session_help();
15786 true
15787 }
15788 _ => false,
15789 }
15790}
15791
15792fn run_session() {
15793 let repo_path = match find_vela_repo() {
15794 Some(p) => p,
15795 None => {
15796 println!();
15797 println!(
15798 " {}",
15799 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
15800 );
15801 println!(" {}", style::tick_row(60));
15802 println!(" Run `vela init` here to create a frontier, or cd into one.");
15803 println!(" Or run `vela help` for the command list.");
15804 println!();
15805 return;
15806 }
15807 };
15808
15809 let project = match repo::load_from_path(&repo_path) {
15810 Ok(p) => p,
15811 Err(e) => {
15812 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
15813 std::process::exit(1);
15814 }
15815 };
15816
15817 print_session_dashboard(&project, &repo_path);
15818
15819 use std::io::{BufRead, Write};
15820 let stdin = std::io::stdin();
15821 let mut stdout = std::io::stdout();
15822 loop {
15823 print!(" > ");
15824 stdout.flush().ok();
15825 let mut line = String::new();
15826 if stdin.lock().read_line(&mut line).is_err() {
15827 break;
15828 }
15829 let input = line.trim();
15830 if input.is_empty() {
15831 continue;
15832 }
15833 if matches!(input, "q" | "quit" | "exit") {
15834 break;
15835 }
15836 if run_session_verb(input, &repo_path) {
15837 continue;
15838 }
15839 let project = match repo::load_from_path(&repo_path) {
15841 Ok(p) => p,
15842 Err(e) => {
15843 eprintln!("{} {e}", style::err_prefix());
15844 continue;
15845 }
15846 };
15847 answer(&project, input, false);
15848 }
15849}
15850
15851pub fn run_from_args() {
15852 style::init();
15853 let args = std::env::args().collect::<Vec<_>>();
15854 match args.get(1).map(String::as_str) {
15855 None => {
15859 run_session();
15860 return;
15861 }
15862 Some("-h" | "--help" | "help") => {
15863 if args.get(2).map(String::as_str) == Some("advanced") {
15866 print_strict_help();
15867 } else {
15868 print_session_help();
15869 }
15870 return;
15871 }
15872 Some("-V" | "--version" | "version") => {
15873 println!("vela {}", env!("CARGO_PKG_VERSION"));
15874 return;
15875 }
15876 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
15877 let json = args.iter().any(|arg| arg == "--json");
15878 let frontier = args
15879 .iter()
15880 .skip(3)
15881 .find(|arg| !arg.starts_with('-'))
15882 .map(PathBuf::from)
15883 .unwrap_or_else(|| {
15884 eprintln!(
15885 "{} proof verify requires a frontier repo",
15886 style::err_prefix()
15887 );
15888 std::process::exit(2);
15889 });
15890 cmd_proof_verify(&frontier, json);
15891 return;
15892 }
15893 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
15894 let frontier = args
15895 .iter()
15896 .skip(3)
15897 .find(|arg| !arg.starts_with('-'))
15898 .map(PathBuf::from)
15899 .unwrap_or_else(|| {
15900 eprintln!(
15901 "{} proof explain requires a frontier repo",
15902 style::err_prefix()
15903 );
15904 std::process::exit(2);
15905 });
15906 cmd_proof_explain(&frontier);
15907 return;
15908 }
15909 Some(cmd) if !is_science_subcommand(cmd) => {
15910 eprintln!(
15911 "{} unknown or non-release command: {cmd}",
15912 style::err_prefix()
15913 );
15914 eprintln!("run `vela --help` for the strict v0 command surface.");
15915 std::process::exit(2);
15916 }
15917 Some(_) => {}
15918 }
15919 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
15920 runtime.block_on(run_command());
15921}
15922
15923fn fail(message: &str) -> ! {
15924 eprintln!("{} {message}", style::err_prefix());
15925 std::process::exit(1);
15926}
15927
15928fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
15933 if !valid.contains(&value) {
15934 fail(&format!(
15935 "invalid {flag} '{value}'. Valid: {}",
15936 valid.join(", ")
15937 ));
15938 }
15939}
15940
15941fn fail_return<T>(message: &str) -> T {
15942 fail(message)
15943}