1#![allow(clippy::too_many_lines)]
4
5use std::collections::{HashMap, HashSet};
6use std::io::{self, BufRead, Write};
7use std::path::{Path, PathBuf};
8use std::sync::Arc;
9
10use axum::{
11 Json, Router,
12 extract::State,
13 http::{HeaderMap, StatusCode},
14 routing::{get, post},
15};
16use reqwest::Client;
17use serde::Serialize;
18use serde_json::{Value, json};
19use tokio::sync::Mutex;
20use tower_http::cors::CorsLayer;
21
22use crate::bundle::FindingBundle;
23use crate::project::{self, ConfidenceDistribution, Project, ProjectStats};
24use crate::{bridge, decision, events, observer, repo, signals, sources, state, tool_registry};
25
26pub enum ProjectSource {
27 Single(PathBuf),
28 Directory(PathBuf),
29}
30
31impl ProjectSource {
32 pub fn from_args(single: Option<&Path>, dir: Option<&Path>) -> Self {
33 if let Some(d) = dir {
34 Self::Directory(d.to_path_buf())
35 } else if let Some(s) = single {
36 Self::Single(s.to_path_buf())
37 } else {
38 eprintln!(
39 "{} provide either a frontier file or --frontiers <dir>",
40 crate::cli_style::err_prefix()
41 );
42 std::process::exit(1);
43 }
44 }
45}
46
47#[derive(Clone)]
48pub struct ProjectInfo {
49 pub name: String,
50 pub file: String,
51 pub findings_count: usize,
52 pub links_count: usize,
53 pub papers: usize,
54}
55
56pub fn load_projects(source: &ProjectSource) -> (Project, Vec<ProjectInfo>) {
57 match source {
58 ProjectSource::Single(path) => {
59 let mut frontier = repo::load_from_path(path).unwrap_or_else(|e| {
60 eprintln!(
61 "{} failed to load frontier: {e}",
62 crate::cli_style::err_prefix()
63 );
64 std::process::exit(1);
65 });
66 sources::materialize_project(&mut frontier);
67 let info = ProjectInfo {
68 name: frontier.project.name.clone(),
69 file: path.display().to_string(),
70 findings_count: frontier.findings.len(),
71 links_count: frontier.stats.links,
72 papers: frontier.project.papers_processed,
73 };
74 (frontier, vec![info])
75 }
76 ProjectSource::Directory(dir) => {
77 let mut entries: Vec<PathBuf> = std::fs::read_dir(dir)
78 .unwrap_or_else(|e| {
79 eprintln!(
80 "{} failed to read directory: {e}",
81 crate::cli_style::err_prefix()
82 );
83 std::process::exit(1);
84 })
85 .filter_map(Result::ok)
86 .map(|entry| entry.path())
87 .filter(|path| {
88 (path.is_dir() && path.join(".vela").exists())
89 || path.extension().is_some_and(|ext| ext == "json")
90 })
91 .collect();
92 entries.sort();
93 if entries.is_empty() {
94 eprintln!("no frontier files found in {}", dir.display());
95 std::process::exit(1);
96 }
97
98 let mut named = Vec::new();
99 for path in &entries {
100 let mut frontier = repo::load_from_path(path).unwrap_or_else(|e| {
101 eprintln!(
102 "{} failed to load {}: {e}",
103 crate::cli_style::err_prefix(),
104 path.display()
105 );
106 std::process::exit(1);
107 });
108 sources::materialize_project(&mut frontier);
109 let name = path
110 .file_stem()
111 .unwrap_or_default()
112 .to_string_lossy()
113 .to_string();
114 named.push((name, frontier));
115 }
116 let infos = named
117 .iter()
118 .map(|(name, frontier)| ProjectInfo {
119 name: frontier.project.name.clone(),
120 file: name.clone(),
121 findings_count: frontier.findings.len(),
122 links_count: frontier.stats.links,
123 papers: frontier.project.papers_processed,
124 })
125 .collect::<Vec<_>>();
126 (merge_projects(named), infos)
127 }
128 }
129}
130
131fn merge_projects(frontiers: Vec<(String, Project)>) -> Project {
132 let mut findings = Vec::<FindingBundle>::new();
133 let mut categories = HashMap::<String, usize>::new();
134 let mut link_types = HashMap::<String, usize>::new();
135 let mut names = Vec::new();
136 let mut papers_processed = 0usize;
137 let mut errors = 0usize;
138 let mut replications = Vec::new();
144 let mut datasets = Vec::new();
145 let mut code_artifacts = Vec::new();
146 let mut artifacts = Vec::new();
147 let mut predictions = Vec::new();
148 let mut resolutions = Vec::new();
149
150 for (name, frontier) in frontiers {
151 names.push(name);
152 papers_processed += frontier.project.papers_processed;
153 errors += frontier.project.errors;
154 for (category, count) in frontier.stats.categories {
155 *categories.entry(category).or_default() += count;
156 }
157 for (link_type, count) in frontier.stats.link_types {
158 *link_types.entry(link_type).or_default() += count;
159 }
160 findings.extend(frontier.findings);
161 replications.extend(frontier.replications);
162 datasets.extend(frontier.datasets);
163 code_artifacts.extend(frontier.code_artifacts);
164 artifacts.extend(frontier.artifacts);
165 predictions.extend(frontier.predictions);
166 resolutions.extend(frontier.resolutions);
167 }
168
169 let mut deduped = Vec::<FindingBundle>::new();
170 let mut seen = HashMap::<String, usize>::new();
171 for finding in findings {
172 if let Some(existing) = seen.get(&finding.id).copied() {
173 if finding.confidence.score > deduped[existing].confidence.score {
174 deduped[existing] = finding;
175 }
176 } else {
177 seen.insert(finding.id.clone(), deduped.len());
178 deduped.push(finding);
179 }
180 }
181
182 let links = deduped.iter().map(|finding| finding.links.len()).sum();
183 let mut targets_with_success: HashSet<&str> = HashSet::new();
187 let mut targets_with_any_record: HashSet<&str> = HashSet::new();
188 for r in &replications {
189 targets_with_any_record.insert(r.target_finding.as_str());
190 if r.outcome == "replicated" {
191 targets_with_success.insert(r.target_finding.as_str());
192 }
193 }
194 let replicated = deduped
195 .iter()
196 .filter(|finding| {
197 if targets_with_any_record.contains(finding.id.as_str()) {
198 targets_with_success.contains(finding.id.as_str())
199 } else {
200 finding.evidence.replicated
201 }
202 })
203 .count();
204 let avg_confidence = if deduped.is_empty() {
205 0.0
206 } else {
207 (deduped
208 .iter()
209 .map(|finding| finding.confidence.score)
210 .sum::<f64>()
211 / deduped.len() as f64
212 * 1000.0)
213 .round()
214 / 1000.0
215 };
216 let stats = ProjectStats {
217 findings: deduped.len(),
218 links,
219 replicated,
220 unreplicated: deduped.len().saturating_sub(replicated),
221 avg_confidence,
222 gaps: deduped.iter().filter(|finding| finding.flags.gap).count(),
223 negative_space: deduped
224 .iter()
225 .filter(|finding| finding.flags.negative_space)
226 .count(),
227 contested: deduped
228 .iter()
229 .filter(|finding| finding.flags.contested)
230 .count(),
231 categories,
232 link_types,
233 human_reviewed: deduped
234 .iter()
235 .filter(|finding| {
236 finding
237 .provenance
238 .review
239 .as_ref()
240 .is_some_and(|review| review.reviewed)
241 })
242 .count(),
243 review_event_count: 0,
244 confidence_update_count: 0,
245 event_count: 0,
246 source_count: 0,
247 evidence_atom_count: 0,
248 condition_record_count: 0,
249 proposal_count: 0,
250 confidence_distribution: ConfidenceDistribution {
251 high_gt_80: deduped
252 .iter()
253 .filter(|finding| finding.confidence.score > 0.8)
254 .count(),
255 medium_60_80: deduped
256 .iter()
257 .filter(|finding| (0.6..=0.8).contains(&finding.confidence.score))
258 .count(),
259 low_lt_60: deduped
260 .iter()
261 .filter(|finding| finding.confidence.score < 0.6)
262 .count(),
263 },
264 };
265
266 let mut project = Project {
267 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
268 schema: project::VELA_SCHEMA_URL.to_string(),
269 frontier_id: None,
270 project: project::ProjectMeta {
271 name: format!("merged: {}", names.join(", ")),
272 description: format!("Merged from {} frontiers", names.len()),
273 compiled_at: chrono::Utc::now().to_rfc3339(),
274 compiler: project::VELA_COMPILER_VERSION.to_string(),
275 papers_processed,
276 errors,
277 dependencies: Vec::new(),
278 },
279 stats,
280 findings: deduped,
281 sources: Vec::new(),
282 evidence_atoms: Vec::new(),
283 condition_records: Vec::new(),
284 review_events: Vec::new(),
285 confidence_updates: Vec::new(),
286 events: Vec::new(),
287 proposals: Vec::new(),
288 proof_state: Default::default(),
289 signatures: Vec::new(),
290 actors: Vec::new(),
291 replications,
292 datasets,
293 code_artifacts,
294 artifacts,
295 predictions,
296 resolutions,
297 peers: Vec::new(),
298 negative_results: Vec::new(),
299 trajectories: Vec::new(),
300 };
301 sources::materialize_project(&mut project);
302 project
303}
304
305pub async fn run(source: ProjectSource, _backend: Option<&str>) {
306 dotenvy::dotenv().ok();
307 let (frontier, project_infos) = load_projects(&source);
308 let source_path: Option<PathBuf> = match &source {
309 ProjectSource::Single(path) => Some(path.clone()),
310 ProjectSource::Directory(_) => None,
311 };
312 let frontier = Arc::new(Mutex::new(frontier));
313 let client = Client::new();
314 let stdin = io::stdin();
315 let stdout = io::stdout();
316
317 for line in stdin.lock().lines() {
318 let Ok(line) = line else {
319 break;
320 };
321 if line.trim().is_empty() {
322 continue;
323 }
324 let Ok(request) = serde_json::from_str::<Value>(&line) else {
325 continue;
326 };
327 let id = request.get("id").cloned();
328 let method = request["method"].as_str().unwrap_or_default();
329 let response = match method {
330 "initialize" => json_rpc_result(
331 &id,
332 json!({
333 "protocolVersion": "2024-11-05",
334 "capabilities": {"tools": {}},
335 "serverInfo": {"name": "vela", "version": project::VELA_SCHEMA_VERSION}
336 }),
337 ),
338 "notifications/initialized" => continue,
339 "tools/list" => json_rpc_result(&id, json!({"tools": tool_registry::mcp_tools_json()})),
340 "tools/call" => {
341 let name = request["params"]["name"].as_str().unwrap_or_default();
342 let args = request["params"]["arguments"].clone();
343 handle_tool_call(
344 &id,
345 name,
346 &args,
347 &frontier,
348 &client,
349 &project_infos,
350 source_path.as_deref(),
351 )
352 .await
353 }
354 "ping" => json_rpc_result(&id, json!({})),
355 _ => json_rpc_error(&id, -32601, "Method not found"),
356 };
357 let mut out = stdout.lock();
358 let _ = serde_json::to_writer(&mut out, &response);
359 let _ = out.write_all(b"\n");
360 let _ = out.flush();
361 }
362}
363
364pub async fn run_http(source: ProjectSource, backend: Option<&str>, port: u16, workbench: bool) {
365 let _ = backend;
366 dotenvy::dotenv().ok();
367 let (frontier, project_infos) = load_projects(&source);
368 let source_path = match &source {
369 ProjectSource::Single(path) => Some(path.clone()),
370 ProjectSource::Directory(_) => None,
371 };
372 let state = AppState {
373 project: Arc::new(Mutex::new(frontier)),
374 project_infos,
375 client: Client::new(),
376 source_path,
377 };
378
379 let mut app = Router::new()
380 .route("/health", get(http_health))
381 .route("/healthz", get(http_health))
382 .route("/api/frontier", get(http_frontier))
383 .route("/api/findings", get(http_findings))
384 .route("/api/findings/{id}", get(http_finding_by_id))
385 .route("/api/contradictions", get(http_contradictions))
386 .route("/api/tensions", get(http_tensions))
387 .route("/api/gaps", get(http_gaps))
388 .route("/api/artifacts", get(http_artifacts))
389 .route("/api/artifact-audit", get(http_artifact_audit))
390 .route("/api/decision-brief", get(http_decision_brief))
391 .route("/api/trials", get(http_trials))
392 .route("/api/source-verification", get(http_source_verification))
393 .route("/api/source-ingest-plan", get(http_source_ingest_plan))
394 .route("/api/observer/{policy}", get(http_observer))
395 .route("/api/propagate/{id}", get(http_propagate))
396 .route("/api/hypotheses", get(http_bridges))
397 .route("/api/stats", get(http_stats))
398 .route("/api/frontiers", get(http_frontiers))
399 .route("/api/pubmed", get(http_pubmed))
400 .route("/api/events", get(http_events))
405 .route("/api/queue", post(http_queue_append))
410 .route("/api/tools", get(http_tools_list))
411 .route("/mcp/tools", get(http_tools_list))
412 .route("/api/tool", post(http_tool_call));
413
414 if workbench {
419 let web_dir = workbench_web_dir();
420 if web_dir.exists() {
421 app = app.fallback_service(tower_http::services::ServeDir::new(web_dir));
422 } else {
423 eprintln!(
424 "{} --workbench: web/ directory not found at expected location; serving API only",
425 crate::cli_style::err_prefix()
426 );
427 }
428 }
429
430 let app = app.layer(CorsLayer::permissive()).with_state(state);
431
432 let addr = format!("0.0.0.0:{port}");
433 eprintln!(
434 " {}",
435 if workbench {
436 format!("VELA · WORKBENCH :{port}").to_uppercase()
437 } else {
438 format!("VELA · SERVE · HTTP :{port}").to_uppercase()
439 }
440 .as_str()
441 );
442 eprintln!(" {}", crate::cli_style::tick_row(60));
443 eprintln!(" listening on http://{addr}");
444 if workbench {
445 eprintln!(" workbench UI: https://vela-site.fly.dev/frontiers/view?api=http://{addr}");
453 eprintln!(
454 " (or http://localhost:4321/frontiers/view?api=http://{addr} for a local site)"
455 );
456 }
457 eprintln!(
458 " endpoints: /api/frontier, /api/findings, /api/events, /api/decision-brief, /api/trials, /api/source-verification, /api/source-ingest-plan, /api/artifacts, /api/artifact-audit, /api/gaps, /api/tensions, /api/tool"
459 );
460 let listener = tokio::net::TcpListener::bind(&addr)
461 .await
462 .unwrap_or_else(|e| {
463 eprintln!(
464 "{} failed to bind to {addr}: {e}",
465 crate::cli_style::err_prefix()
466 );
467 std::process::exit(1);
468 });
469 axum::serve(listener, app).await.unwrap();
470}
471
472pub fn check_tools(source: ProjectSource) -> Result<Value, String> {
473 let started = std::time::Instant::now();
474 let (frontier, _project_infos) = load_projects(&source);
475 let first_id = frontier.findings.first().map(|finding| finding.id.clone());
476 let mut checks = vec![
477 check_tool_result("frontier_stats", tool_frontier_stats(&frontier), started),
478 check_tool_result(
479 "search_findings",
480 tool_search_findings(&json!({"query": "amyloid", "limit": 3}), &frontier),
481 started,
482 ),
483 check_tool_result("list_gaps", tool_list_gaps(&frontier), started),
484 check_tool_result(
485 "list_contradictions",
486 tool_list_contradictions(&frontier),
487 started,
488 ),
489 check_tool_result(
490 "find_bridges",
491 tool_find_bridges(&json!({"limit": 5, "min_categories": 2}), &frontier),
492 started,
493 ),
494 check_tool_result(
495 "apply_observer",
496 tool_apply_observer(&json!({"policy": "academic", "limit": 5}), &frontier),
497 started,
498 ),
499 check_tool_result(
500 "propagate_retraction",
501 tool_propagate_retraction(&json!({"finding_id": "vf_missing"}), &frontier),
502 started,
503 ),
504 ];
505 if let Some(id) = first_id {
506 checks.push(check_tool_result(
507 "get_finding",
508 tool_get_finding(&json!({"id": id}), &frontier),
509 started,
510 ));
511 checks.push(check_tool_result(
512 "get_finding_history",
513 tool_get_finding_history(&json!({"id": id}), &frontier),
514 started,
515 ));
516 checks.push(check_tool_result(
517 "trace_evidence_chain",
518 tool_trace_evidence_chain(&json!({"finding_id": id}), &frontier),
519 started,
520 ));
521 }
522 let failures = checks
523 .iter()
524 .filter(|check| check.get("ok").and_then(Value::as_bool) != Some(true))
525 .filter_map(|check| {
526 check
527 .get("tool")
528 .and_then(Value::as_str)
529 .map(str::to_string)
530 })
531 .collect::<Vec<_>>();
532 let checked_tools = checks
533 .iter()
534 .filter_map(|check| check.get("tool").and_then(Value::as_str))
535 .map(str::to_string)
536 .collect::<Vec<_>>();
537 let registered_tools = tool_registry::all_tools()
538 .into_iter()
539 .map(|tool| tool.name)
540 .collect::<Vec<_>>();
541
542 Ok(json!({
543 "ok": failures.is_empty(),
544 "command": "serve --check-tools",
545 "schema": "vela.tool-check.v0",
546 "frontier": {
547 "name": frontier.project.name,
548 "findings": frontier.stats.findings,
549 "links": frontier.stats.links,
550 },
551 "summary": {
552 "checks": checks.len(),
553 "passed": checks.len().saturating_sub(failures.len()),
554 "failed": failures.len(),
555 },
556 "tool_count": checked_tools.len(),
557 "tools": checked_tools,
558 "registered_tool_count": registered_tools.len(),
559 "registered_tools": registered_tools,
560 "checks": checks,
561 "failures": failures,
562 }))
563}
564
565#[derive(Clone)]
566struct AppState {
567 project: Arc<Mutex<Project>>,
568 project_infos: Vec<ProjectInfo>,
569 client: Client,
570 source_path: Option<PathBuf>,
575}
576
577#[derive(Debug, Clone, Serialize)]
578struct ToolResult {
579 tool: String,
580 ok: bool,
581 data: Value,
582 markdown: String,
583 signals: Vec<signals::SignalItem>,
584 caveats: Vec<String>,
585 duration_ms: u128,
586}
587
588impl ToolResult {
589 fn from_text(
590 tool: &str,
591 text: String,
592 duration_ms: u128,
593 is_error: bool,
594 frontier: Option<&Project>,
595 ) -> Self {
596 let data = serde_json::from_str(&text).unwrap_or_else(|_| json!({"text": text}));
597 let signal_items = frontier
598 .map(|project| signals::analyze(project, &[]).signals)
599 .unwrap_or_default();
600 Self {
601 tool: tool.to_string(),
602 ok: !is_error,
603 data,
604 markdown: text,
605 signals: signal_items,
606 caveats: tool_registry::tool_caveats(tool),
607 duration_ms,
608 }
609 }
610
611 fn metadata(&self) -> Value {
612 json!({
613 "tool": self.tool,
614 "ok": self.ok,
615 "duration_ms": self.duration_ms,
616 "signals": self.signals,
617 "caveats": self.caveats,
618 "definition": tool_registry::get_tool(&self.tool),
619 })
620 }
621
622 fn to_json_text(&self) -> String {
623 serde_json::to_string_pretty(self).unwrap_or_else(|_| "{}".to_string())
624 }
625}
626
627async fn handle_tool_call(
628 id: &Option<Value>,
629 name: &str,
630 args: &Value,
631 frontier: &Arc<Mutex<Project>>,
632 client: &Client,
633 project_infos: &[ProjectInfo],
634 source_path: Option<&Path>,
635) -> Value {
636 let started = std::time::Instant::now();
637 let (result, snapshot) =
638 execute_tool(name, args, frontier, client, project_infos, source_path).await;
639 match result {
640 Ok(text) => {
641 let output = ToolResult::from_text(
642 name,
643 text,
644 started.elapsed().as_millis(),
645 false,
646 snapshot.as_ref(),
647 );
648 json_rpc_result(
649 id,
650 json!({
651 "content": [{"type": "text", "text": output.to_json_text()}],
652 "isError": false,
653 "_meta": output.metadata()
654 }),
655 )
656 }
657 Err(error) => {
658 let output = ToolResult::from_text(
659 name,
660 error,
661 started.elapsed().as_millis(),
662 true,
663 snapshot.as_ref(),
664 );
665 json_rpc_result(
666 id,
667 json!({
668 "content": [{"type": "text", "text": output.to_json_text()}],
669 "isError": true,
670 "_meta": output.metadata()
671 }),
672 )
673 }
674 }
675}
676
677async fn execute_tool(
678 name: &str,
679 args: &Value,
680 frontier: &Arc<Mutex<Project>>,
681 client: &Client,
682 _project_infos: &[ProjectInfo],
683 source_path: Option<&Path>,
684) -> (Result<String, String>, Option<Project>) {
685 match name {
686 "search_findings" => {
687 let project = frontier.lock().await;
688 (
689 tool_search_findings(args, &project),
690 Some(clone_project(&project)),
691 )
692 }
693 "get_finding" => {
694 let project = frontier.lock().await;
695 (
696 tool_get_finding(args, &project),
697 Some(clone_project(&project)),
698 )
699 }
700 "get_finding_history" => {
701 let project = frontier.lock().await;
702 (
703 tool_get_finding_history(args, &project),
704 Some(clone_project(&project)),
705 )
706 }
707 "list_gaps" => {
708 let project = frontier.lock().await;
709 (tool_list_gaps(&project), Some(clone_project(&project)))
710 }
711 "list_contradictions" => {
712 let project = frontier.lock().await;
713 (
714 tool_list_contradictions(&project),
715 Some(clone_project(&project)),
716 )
717 }
718 "frontier_stats" => {
719 let project = frontier.lock().await;
720 (tool_frontier_stats(&project), Some(clone_project(&project)))
721 }
722 "find_bridges" => {
723 let project = frontier.lock().await;
724 (
725 tool_find_bridges(args, &project),
726 Some(clone_project(&project)),
727 )
728 }
729 "propagate_retraction" => {
730 let project = frontier.lock().await;
731 (
732 tool_propagate_retraction(args, &project),
733 Some(clone_project(&project)),
734 )
735 }
736 "apply_observer" => {
737 let project = frontier.lock().await;
738 (
739 tool_apply_observer(args, &project),
740 Some(clone_project(&project)),
741 )
742 }
743 "trace_evidence_chain" => {
744 let project = frontier.lock().await;
745 (
746 tool_trace_evidence_chain(args, &project),
747 Some(clone_project(&project)),
748 )
749 }
750 "check_pubmed" => (tool_check_pubmed(args, client).await, None),
751 "list_events_since" => {
752 let project = frontier.lock().await;
753 (
754 tool_list_events_since(args, &project),
755 Some(clone_project(&project)),
756 )
757 }
758 "propose_review" => {
762 let result = write_tool_propose(
763 args,
764 frontier,
765 source_path,
766 "finding.review",
767 |args| {
768 let status = args
769 .get("status")
770 .and_then(Value::as_str)
771 .ok_or("propose_review requires `status`")?;
772 if !matches!(
773 status,
774 "accepted" | "approved" | "contested" | "needs_revision" | "rejected"
775 ) {
776 return Err(format!("invalid review status '{status}'"));
777 }
778 Ok(json!({"status": status}))
779 },
780 false,
781 )
782 .await;
783 let snapshot = Some(clone_project(&*frontier.lock().await));
784 (result, snapshot)
785 }
786 "propose_note" => {
787 let result = write_tool_propose(
788 args,
789 frontier,
790 source_path,
791 "finding.note",
792 |args| build_note_payload(args, "propose_note"),
793 false,
794 )
795 .await;
796 let snapshot = Some(clone_project(&*frontier.lock().await));
797 (result, snapshot)
798 }
799 "propose_and_apply_note" => {
805 let result = write_tool_propose(
806 args,
807 frontier,
808 source_path,
809 "finding.note",
810 |args| build_note_payload(args, "propose_and_apply_note"),
811 true,
812 )
813 .await;
814 let snapshot = Some(clone_project(&*frontier.lock().await));
815 (result, snapshot)
816 }
817 "propose_revise_confidence" => {
818 let result = write_tool_propose(
819 args,
820 frontier,
821 source_path,
822 "finding.confidence_revise",
823 |args| {
824 let new_score = args
825 .get("new_score")
826 .and_then(Value::as_f64)
827 .ok_or("propose_revise_confidence requires `new_score`")?;
828 if !(0.0..=1.0).contains(&new_score) {
829 return Err(format!("new_score {new_score} out of [0.0, 1.0]"));
830 }
831 Ok(json!({"new_score": new_score}))
832 },
833 false,
834 )
835 .await;
836 let snapshot = Some(clone_project(&*frontier.lock().await));
837 (result, snapshot)
838 }
839 "propose_retract" => {
840 let result = write_tool_propose(
841 args,
842 frontier,
843 source_path,
844 "finding.retract",
845 |_args| Ok(json!({})),
846 false,
847 )
848 .await;
849 let snapshot = Some(clone_project(&*frontier.lock().await));
850 (result, snapshot)
851 }
852 "accept_proposal" => {
853 let result = write_tool_decision(args, frontier, source_path, "accept").await;
854 let snapshot = Some(clone_project(&*frontier.lock().await));
855 (result, snapshot)
856 }
857 "reject_proposal" => {
858 let result = write_tool_decision(args, frontier, source_path, "reject").await;
859 let snapshot = Some(clone_project(&*frontier.lock().await));
860 (result, snapshot)
861 }
862 _ => (Err(format!("Unknown tool: {name}")), None),
863 }
864}
865
866fn build_note_payload(args: &Value, tool_name: &str) -> Result<Value, String> {
872 let text = args
873 .get("text")
874 .and_then(Value::as_str)
875 .ok_or_else(|| format!("{tool_name} requires `text`"))?;
876 if text.trim().is_empty() {
877 return Err("text must be non-empty".to_string());
878 }
879 let mut payload = json!({"text": text});
880 if let Some(prov) = args.get("provenance") {
881 let prov_obj = prov
882 .as_object()
883 .ok_or("provenance must be a JSON object when present")?;
884 let has_id = ["doi", "pmid", "title"].iter().any(|k| {
885 prov_obj
886 .get(*k)
887 .and_then(Value::as_str)
888 .is_some_and(|s| !s.trim().is_empty())
889 });
890 if !has_id {
891 return Err("provenance must include at least one of doi/pmid/title".to_string());
892 }
893 payload["provenance"] = prov.clone();
894 }
895 Ok(payload)
896}
897
898async fn write_tool_propose<F>(
906 args: &Value,
907 frontier: &Arc<Mutex<Project>>,
908 source_path: Option<&Path>,
909 kind: &str,
910 payload_builder: F,
911 apply_if_tier_permits: bool,
912) -> Result<String, String>
913where
914 F: Fn(&Value) -> Result<Value, String>,
915{
916 let path = source_path.ok_or_else(|| {
917 "Write tools require a single-file frontier (--frontier <PATH>); rejected in --frontiers <DIR> mode".to_string()
918 })?;
919 let actor_id = args
920 .get("actor_id")
921 .and_then(Value::as_str)
922 .ok_or("write tool requires `actor_id`")?;
923 let target_finding_id = args
924 .get("target_finding_id")
925 .and_then(Value::as_str)
926 .ok_or("write tool requires `target_finding_id`")?;
927 let reason = args
928 .get("reason")
929 .and_then(Value::as_str)
930 .ok_or("write tool requires `reason`")?;
931 let signature_hex = args
932 .get("signature")
933 .and_then(Value::as_str)
934 .ok_or("write tool requires `signature` (Ed25519 over canonical proposal preimage)")?;
935 let created_at = args
936 .get("created_at")
937 .and_then(Value::as_str)
938 .map(String::from)
939 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
940 let payload = payload_builder(args)?;
941
942 let (pubkey, tier_permits_apply) = {
944 let project = frontier.lock().await;
945 let actor = project
946 .actors
947 .iter()
948 .find(|actor| actor.id == actor_id)
949 .ok_or_else(|| {
950 format!(
951 "actor '{actor_id}' is not registered in this frontier; register via `vela actor add` before writing"
952 )
953 })?;
954 let tier_permits = crate::sign::actor_can_auto_apply(actor, kind);
955 if apply_if_tier_permits && !tier_permits {
959 let tier_label = actor.tier.as_deref().unwrap_or("none");
960 return Err(format!(
961 "actor '{actor_id}' tier '{tier_label}' does not permit auto-apply for {kind}"
962 ));
963 }
964 (actor.public_key.clone(), tier_permits)
965 };
966
967 let mut proposal = crate::proposals::new_proposal(
970 kind,
971 crate::events::StateTarget {
972 r#type: "finding".to_string(),
973 id: target_finding_id.to_string(),
974 },
975 actor_id,
976 "human",
977 reason,
978 payload,
979 Vec::new(),
980 Vec::new(),
981 );
982 proposal.created_at = created_at;
983 proposal.id = crate::proposals::proposal_id(&proposal);
984
985 let valid = crate::sign::verify_proposal_signature(&proposal, signature_hex, &pubkey)?;
986 if !valid {
987 return Err(format!(
988 "Signature does not verify for actor '{actor_id}' on this proposal"
989 ));
990 }
991
992 let apply = apply_if_tier_permits && tier_permits_apply;
996 let result = crate::proposals::create_or_apply(path, proposal, apply)
997 .map_err(|e| format!("create_or_apply failed: {e}"))?;
998
999 let fresh =
1001 crate::repo::load_from_path(path).map_err(|e| format!("reload after write failed: {e}"))?;
1002 let mut project = frontier.lock().await;
1003 *project = fresh;
1004
1005 serde_json::to_string(&json!({
1006 "proposal_id": result.proposal_id,
1007 "finding_id": result.finding_id,
1008 "status": result.status,
1009 "applied_event_id": result.applied_event_id,
1010 }))
1011 .map_err(|e| format!("serialize write result: {e}"))
1012}
1013
1014async fn write_tool_decision(
1018 args: &Value,
1019 frontier: &Arc<Mutex<Project>>,
1020 source_path: Option<&Path>,
1021 action: &str,
1022) -> Result<String, String> {
1023 let path = source_path.ok_or_else(|| {
1024 "Write tools require a single-file frontier (--frontier <PATH>); rejected in --frontiers <DIR> mode".to_string()
1025 })?;
1026 let proposal_id = args
1027 .get("proposal_id")
1028 .and_then(Value::as_str)
1029 .ok_or("decision tool requires `proposal_id`")?;
1030 let reviewer_id = args
1031 .get("reviewer_id")
1032 .and_then(Value::as_str)
1033 .ok_or("decision tool requires `reviewer_id`")?;
1034 let reason = args
1035 .get("reason")
1036 .and_then(Value::as_str)
1037 .ok_or("decision tool requires `reason`")?;
1038 let signature_hex = args
1039 .get("signature")
1040 .and_then(Value::as_str)
1041 .ok_or("decision tool requires `signature`")?;
1042 let timestamp = args
1043 .get("timestamp")
1044 .and_then(Value::as_str)
1045 .map(String::from)
1046 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
1047
1048 let preimage = json!({
1050 "action": action,
1051 "proposal_id": proposal_id,
1052 "reviewer_id": reviewer_id,
1053 "reason": reason,
1054 "timestamp": timestamp,
1055 });
1056 let signing_bytes = crate::canonical::to_canonical_bytes(&preimage)?;
1057
1058 let pubkey = {
1060 let project = frontier.lock().await;
1061 project
1062 .actors
1063 .iter()
1064 .find(|actor| actor.id == reviewer_id)
1065 .map(|actor| actor.public_key.clone())
1066 .ok_or_else(|| format!("reviewer '{reviewer_id}' is not registered"))?
1067 };
1068
1069 let valid = crate::sign::verify_action_signature(&signing_bytes, signature_hex, &pubkey)?;
1070 if !valid {
1071 return Err(format!(
1072 "Signature does not verify for reviewer '{reviewer_id}' on {action} of {proposal_id}"
1073 ));
1074 }
1075
1076 let outcome = match action {
1077 "accept" => {
1078 let event_id = crate::proposals::accept_at_path(path, proposal_id, reviewer_id, reason)
1079 .map_err(|e| format!("accept failed: {e}"))?;
1080 json!({
1081 "proposal_id": proposal_id,
1082 "applied_event_id": event_id,
1083 "status": "applied",
1084 })
1085 }
1086 "reject" => {
1087 crate::proposals::reject_at_path(path, proposal_id, reviewer_id, reason)
1088 .map_err(|e| format!("reject failed: {e}"))?;
1089 json!({
1090 "proposal_id": proposal_id,
1091 "applied_event_id": Value::Null,
1092 "status": "rejected",
1093 })
1094 }
1095 other => return Err(format!("unsupported decision action '{other}'")),
1096 };
1097
1098 let fresh =
1100 crate::repo::load_from_path(path).map_err(|e| format!("reload after write failed: {e}"))?;
1101 let mut project = frontier.lock().await;
1102 *project = fresh;
1103
1104 serde_json::to_string(&outcome).map_err(|e| format!("serialize decision: {e}"))
1105}
1106
1107fn tool_list_events_since(args: &Value, project: &Project) -> Result<String, String> {
1111 let cursor = args.get("cursor").and_then(Value::as_str);
1112 let limit = args
1113 .get("limit")
1114 .and_then(Value::as_u64)
1115 .map_or(100usize, |n| (n as usize).min(500));
1116 let start_idx: usize = match cursor {
1117 None => 0,
1118 Some(c) => match project.events.iter().position(|event| event.id == c) {
1119 Some(idx) => idx + 1,
1120 None => {
1121 return Err(format!(
1122 "cursor '{c}' not found in event log; client is out of sync"
1123 ));
1124 }
1125 },
1126 };
1127 let end_idx = (start_idx + limit).min(project.events.len());
1128 let slice = &project.events[start_idx..end_idx];
1129 let next_cursor = if end_idx < project.events.len() {
1130 slice.last().map(|event| event.id.clone())
1131 } else {
1132 None
1133 };
1134 let payload = json!({
1135 "events": slice,
1136 "count": slice.len(),
1137 "next_cursor": next_cursor,
1138 "log_total": project.events.len(),
1139 });
1140 serde_json::to_string(&payload).map_err(|e| format!("serialize list_events_since: {e}"))
1141}
1142
1143fn check_tool_result(
1144 name: &str,
1145 result: Result<String, String>,
1146 started: std::time::Instant,
1147) -> Value {
1148 let output = ToolResult::from_text(
1149 name,
1150 result.unwrap_or_else(|e| e),
1151 started.elapsed().as_millis(),
1152 false,
1153 None,
1154 );
1155 let has_data = !output.data.is_null();
1156 let has_markdown = !output.markdown.trim().is_empty();
1157 let has_signals = true;
1158 let has_caveats = true;
1159 json!({
1160 "tool": name,
1161 "ok": has_data && has_markdown && has_signals && has_caveats,
1162 "data": output.data,
1163 "markdown": output.markdown,
1164 "has_data": has_data,
1165 "has_markdown": has_markdown,
1166 "has_signals": has_signals,
1167 "has_caveats": has_caveats,
1168 "signals": output.signals,
1169 "caveats": output.caveats,
1170 "duration_ms": output.duration_ms,
1171 })
1172}
1173
1174async fn http_events(
1188 State(state): State<AppState>,
1189 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1190) -> (StatusCode, Json<Value>) {
1191 let project = state.project.lock().await;
1192 let limit = params
1193 .get("limit")
1194 .and_then(|v| v.parse::<usize>().ok())
1195 .unwrap_or(100)
1196 .min(500);
1197 let start_idx: usize = match params.get("since") {
1198 None => 0,
1199 Some(cursor) => match project.events.iter().position(|event| &event.id == cursor) {
1200 Some(idx) => idx + 1,
1201 None => {
1202 return (
1203 StatusCode::BAD_REQUEST,
1204 Json(json!({
1205 "error": format!(
1206 "cursor '{cursor}' not found in event log; client is out of sync"
1207 ),
1208 })),
1209 );
1210 }
1211 },
1212 };
1213 let kind_filter = params.get("kind").map(String::as_str);
1219 let target_filter = params.get("target").map(String::as_str);
1220 let filtered: Vec<&crate::events::StateEvent> = project
1221 .events
1222 .iter()
1223 .skip(start_idx)
1224 .filter(|e| kind_filter.is_none_or(|k| e.kind == k))
1225 .filter(|e| target_filter.is_none_or(|t| e.target.id == t))
1226 .collect();
1227 let total_filtered = filtered.len();
1228 let take_n = limit.min(total_filtered);
1229 let slice: Vec<&crate::events::StateEvent> = filtered.into_iter().take(take_n).collect();
1230 let next_cursor = if take_n < total_filtered {
1231 slice.last().map(|event| event.id.clone())
1232 } else {
1233 None
1234 };
1235 (
1236 StatusCode::OK,
1237 Json(json!({
1238 "events": slice,
1239 "count": slice.len(),
1240 "next_cursor": next_cursor,
1241 "log_total": project.events.len(),
1242 "filtered_total": total_filtered,
1243 })),
1244 )
1245}
1246
1247async fn http_queue_append(
1258 State(state): State<AppState>,
1259 Json(body): Json<Value>,
1260) -> (StatusCode, Json<Value>) {
1261 let path = match &state.source_path {
1262 Some(p) => p.clone(),
1263 None => {
1264 return (
1265 StatusCode::BAD_REQUEST,
1266 Json(
1267 json!({"error": "Workbench queue requires a single-file frontier (--frontier <PATH>)"}),
1268 ),
1269 );
1270 }
1271 };
1272 let kind = match body.get("kind").and_then(Value::as_str) {
1273 Some(k) => k.to_string(),
1274 None => {
1275 return (
1276 StatusCode::BAD_REQUEST,
1277 Json(json!({"error": "POST /api/queue requires `kind`"})),
1278 );
1279 }
1280 };
1281 let valid_kinds = [
1282 "propose_review",
1283 "propose_note",
1284 "propose_revise_confidence",
1285 "propose_retract",
1286 "accept_proposal",
1287 "reject_proposal",
1288 ];
1289 if !valid_kinds.contains(&kind.as_str()) {
1290 return (
1291 StatusCode::BAD_REQUEST,
1292 Json(json!({"error": format!("unsupported queue kind '{kind}'")})),
1293 );
1294 }
1295 let args = body.get("args").cloned().unwrap_or(Value::Null);
1296 let queued_at = chrono::Utc::now().to_rfc3339();
1297 let action = crate::queue::QueuedAction {
1298 kind,
1299 frontier: path,
1300 args,
1301 queued_at: queued_at.clone(),
1302 };
1303 let queue_path = crate::queue::default_queue_path();
1304 if let Err(error) = crate::queue::append(&queue_path, action) {
1305 return (
1306 StatusCode::INTERNAL_SERVER_ERROR,
1307 Json(json!({"error": format!("append to queue: {error}")})),
1308 );
1309 }
1310 (
1311 StatusCode::OK,
1312 Json(json!({
1313 "ok": true,
1314 "queue_file": queue_path.display().to_string(),
1315 "queued_at": queued_at,
1316 "next_step": "run `vela queue sign` to apply queued drafts",
1317 })),
1318 )
1319}
1320
1321fn workbench_web_dir() -> PathBuf {
1326 if let Ok(path) = std::env::var("VELA_WEB_DIR") {
1327 return PathBuf::from(path);
1328 }
1329 let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
1330 let candidates = [
1331 cwd.join("web"),
1332 PathBuf::from("./web"),
1333 PathBuf::from("web"),
1334 ];
1335 for candidate in candidates {
1336 if candidate.exists() {
1337 return candidate;
1338 }
1339 }
1340 cwd.join("web")
1341}
1342
1343fn requesting_clearance(
1356 headers: &HeaderMap,
1357 project: &Project,
1358) -> Option<crate::access_tier::AccessTier> {
1359 let actor_id = headers
1360 .get("x-vela-actor")
1361 .and_then(|v| v.to_str().ok())?
1362 .trim();
1363 if actor_id.is_empty() {
1364 return None;
1365 }
1366 let actor = project.actors.iter().find(|a| a.id == actor_id)?;
1367 actor.access_clearance
1368}
1369
1370async fn http_frontier(State(state): State<AppState>, headers: HeaderMap) -> Json<Value> {
1371 let project = state.project.lock().await;
1372 let clearance = requesting_clearance(&headers, &project);
1373 let view = crate::access_tier::redact_for_actor(&project, clearance);
1374 Json(serde_json::to_value(&view).unwrap_or_else(|_| json!({"error": "serialization failed"})))
1375}
1376
1377async fn http_findings(
1378 State(state): State<AppState>,
1379 headers: HeaderMap,
1380 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1381) -> Json<Value> {
1382 let project = state.project.lock().await;
1383 let clearance = requesting_clearance(&headers, &project);
1384 let view = crate::access_tier::redact_for_actor(&project, clearance);
1385 let args = json!({
1386 "query": params.get("query"),
1387 "entity": params.get("entity"),
1388 "entity_type": params.get("entity_type"),
1389 "assertion_type": params.get("type"),
1390 "limit": params.get("limit").and_then(|v| v.parse::<u64>().ok()).unwrap_or(50),
1391 });
1392 match tool_search_findings(&args, &view) {
1393 Ok(text) => Json(json!({"result": text})),
1394 Err(error) => Json(json!({"error": error})),
1395 }
1396}
1397
1398async fn http_finding_by_id(
1399 State(state): State<AppState>,
1400 headers: HeaderMap,
1401 axum::extract::Path(id): axum::extract::Path<String>,
1402) -> (StatusCode, Json<Value>) {
1403 let project = state.project.lock().await;
1404 let clearance = requesting_clearance(&headers, &project);
1405 match project
1406 .findings
1407 .iter()
1408 .find(|finding| finding.id == id || finding.id.starts_with(&id))
1409 {
1410 Some(finding) => {
1411 if !crate::access_tier::actor_may_read(finding.access_tier, clearance) {
1412 return (
1415 StatusCode::NOT_FOUND,
1416 Json(json!({"error": format!("Finding not found: {id}")})),
1417 );
1418 }
1419 (
1420 StatusCode::OK,
1421 Json(serde_json::to_value(finding).unwrap_or_default()),
1422 )
1423 }
1424 None => (
1425 StatusCode::NOT_FOUND,
1426 Json(json!({"error": format!("Finding not found: {id}")})),
1427 ),
1428 }
1429}
1430
1431async fn http_contradictions(State(state): State<AppState>) -> Json<Value> {
1432 let project = state.project.lock().await;
1433 Json(
1434 serde_json::from_str(&tool_list_contradictions(&project).unwrap_or_default())
1435 .unwrap_or_else(
1436 |_| json!({"result": tool_list_contradictions(&project).unwrap_or_default()}),
1437 ),
1438 )
1439}
1440
1441async fn http_health(State(state): State<AppState>) -> Json<Value> {
1442 let project = state.project.lock().await;
1443 Json(json!({
1444 "ok": true,
1445 "frontier": {
1446 "name": project.project.name,
1447 "findings": project.stats.findings,
1448 "events": project.events.len(),
1449 }
1450 }))
1451}
1452
1453async fn http_artifacts(State(state): State<AppState>) -> Json<Value> {
1454 let project = state.project.lock().await;
1455 Json(json!({
1456 "ok": true,
1457 "count": project.artifacts.len(),
1458 "artifacts": project.artifacts,
1459 }))
1460}
1461
1462async fn http_artifact_audit(State(state): State<AppState>) -> Json<Value> {
1463 let source_path = state.source_path.clone();
1464 let project = state.project.lock().await;
1465 let Some(path) = source_path else {
1466 return Json(json!({
1467 "ok": false,
1468 "available": false,
1469 "issues": [],
1470 "error": "artifact audit requires a single frontier source",
1471 }));
1472 };
1473 Json(
1474 serde_json::to_value(crate::artifact_audit::audit_artifacts(&path, &project))
1475 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1476 )
1477}
1478
1479async fn http_decision_brief(State(state): State<AppState>) -> Json<Value> {
1480 let source_path = state.source_path.clone();
1481 let project = state.project.lock().await;
1482 let Some(path) = source_path else {
1483 return Json(json!({
1484 "ok": false,
1485 "available": false,
1486 "projection": null,
1487 "issues": [],
1488 "error": "decision projections require a single frontier source",
1489 }));
1490 };
1491 Json(
1492 serde_json::to_value(decision::load_decision_brief(&path, &project))
1493 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1494 )
1495}
1496
1497async fn http_trials(State(state): State<AppState>) -> Json<Value> {
1498 let source_path = state.source_path.clone();
1499 let project = state.project.lock().await;
1500 let Some(path) = source_path else {
1501 return Json(json!({
1502 "ok": false,
1503 "available": false,
1504 "projection": null,
1505 "issues": [],
1506 "error": "trial projections require a single frontier source",
1507 }));
1508 };
1509 Json(
1510 serde_json::to_value(decision::load_trial_outcomes(&path, &project))
1511 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1512 )
1513}
1514
1515async fn http_source_verification(State(state): State<AppState>) -> Json<Value> {
1516 let source_path = state.source_path.clone();
1517 let project = state.project.lock().await;
1518 let Some(path) = source_path else {
1519 return Json(json!({
1520 "ok": false,
1521 "available": false,
1522 "projection": null,
1523 "issues": [],
1524 "error": "source verification requires a single frontier source",
1525 }));
1526 };
1527 Json(
1528 serde_json::to_value(decision::load_source_verification(&path, &project))
1529 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1530 )
1531}
1532
1533async fn http_source_ingest_plan(State(state): State<AppState>) -> Json<Value> {
1534 let source_path = state.source_path.clone();
1535 let project = state.project.lock().await;
1536 let Some(path) = source_path else {
1537 return Json(json!({
1538 "ok": false,
1539 "available": false,
1540 "projection": null,
1541 "issues": [],
1542 "error": "source ingest plan requires a single frontier source",
1543 }));
1544 };
1545 Json(
1546 serde_json::to_value(decision::load_source_ingest_plan(&path, &project))
1547 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1548 )
1549}
1550
1551async fn http_gaps(State(state): State<AppState>) -> Json<Value> {
1552 let project = state.project.lock().await;
1553 let gaps = project
1554 .findings
1555 .iter()
1556 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
1557 .map(|finding| {
1558 json!({
1559 "id": finding.id,
1560 "assertion": finding.assertion.text,
1561 "confidence": finding.confidence.score,
1562 "conditions": finding.conditions.text,
1563 "source": finding.provenance.title,
1564 })
1565 })
1566 .collect::<Vec<_>>();
1567 Json(json!({
1568 "ok": true,
1569 "count": gaps.len(),
1570 "gaps": gaps,
1571 "caveats": ["Candidate gap rankings are review leads, not confirmed experiment targets."],
1572 }))
1573}
1574
1575async fn http_tensions(State(state): State<AppState>) -> Json<Value> {
1576 let project = state.project.lock().await;
1577 let lookup = project
1578 .findings
1579 .iter()
1580 .map(|finding| (finding.id.as_str(), finding))
1581 .collect::<HashMap<_, _>>();
1582 let mut tensions = Vec::new();
1583 for finding in &project.findings {
1584 for link in &finding.links {
1585 if link.link_type != "contradicts" {
1586 continue;
1587 }
1588 let target = lookup.get(link.target.as_str());
1589 tensions.push(json!({
1590 "source": {
1591 "id": finding.id,
1592 "assertion": finding.assertion.text,
1593 "confidence": finding.confidence.score,
1594 },
1595 "target": target.map(|target| json!({
1596 "id": target.id,
1597 "assertion": target.assertion.text,
1598 "confidence": target.confidence.score,
1599 })),
1600 "type": link.link_type,
1601 "note": link.note,
1602 "resolved": finding.flags.retracted || target.is_some_and(|target| target.flags.retracted),
1603 }));
1604 }
1605 }
1606 Json(json!({
1607 "ok": true,
1608 "count": tensions.len(),
1609 "tensions": tensions,
1610 "caveats": ["Candidate tensions are review surfaces, not definitive contradictions."],
1611 }))
1612}
1613
1614async fn http_observer(
1615 State(state): State<AppState>,
1616 axum::extract::Path(policy): axum::extract::Path<String>,
1617 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1618) -> Json<Value> {
1619 let project = state.project.lock().await;
1620 let args = json!({
1621 "policy": policy,
1622 "limit": params.get("limit").and_then(|v| v.parse::<u64>().ok()).unwrap_or(20),
1623 });
1624 match tool_apply_observer(&args, &project) {
1625 Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1626 Err(error) => Json(json!({"error": error})),
1627 }
1628}
1629
1630async fn http_propagate(
1631 State(state): State<AppState>,
1632 axum::extract::Path(id): axum::extract::Path<String>,
1633) -> Json<Value> {
1634 let project = state.project.lock().await;
1635 let args = json!({"finding_id": id});
1636 match tool_propagate_retraction(&args, &project) {
1637 Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1638 Err(error) => Json(json!({"error": error})),
1639 }
1640}
1641
1642async fn http_bridges(
1643 State(state): State<AppState>,
1644 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1645) -> Json<Value> {
1646 let project = state.project.lock().await;
1647 let args = json!({
1648 "min_categories": params.get("min_categories").and_then(|v| v.parse::<u64>().ok()).unwrap_or(2),
1649 "limit": params.get("limit").and_then(|v| v.parse::<u64>().ok()).unwrap_or(15),
1650 });
1651 match tool_find_bridges(&args, &project) {
1652 Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1653 Err(error) => Json(json!({"error": error})),
1654 }
1655}
1656
1657async fn http_stats(State(state): State<AppState>) -> Json<Value> {
1658 let project = state.project.lock().await;
1659 Json(json!({
1660 "frontier": {
1661 "name": project.project.name,
1662 "compiled_at": project.project.compiled_at,
1663 "compiler": project.project.compiler,
1664 },
1665 "stats": project.stats,
1666 "signals": signals::analyze(&project, &[]).signals,
1667 }))
1668}
1669
1670async fn http_frontiers(State(state): State<AppState>) -> Json<Value> {
1671 Json(
1672 serde_json::from_str(&frontier_index_json(&state.project_infos).unwrap_or_default())
1673 .unwrap_or_else(|_| json!({"frontier_count": 0, "frontiers": []})),
1674 )
1675}
1676
1677async fn http_pubmed(
1678 State(state): State<AppState>,
1679 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1680) -> Json<Value> {
1681 let args = json!({"query": params.get("query").cloned().unwrap_or_default()});
1682 match tool_check_pubmed(&args, &state.client).await {
1683 Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1684 Err(error) => Json(json!({"error": error})),
1685 }
1686}
1687
1688async fn http_tools_list() -> Json<Value> {
1689 Json(tool_registry::mcp_tools_json())
1690}
1691
1692async fn http_tool_call(
1693 State(state): State<AppState>,
1694 Json(body): Json<Value>,
1695) -> (StatusCode, Json<Value>) {
1696 let name = body["name"].as_str().unwrap_or_default();
1697 let args = &body["arguments"];
1698 let started = std::time::Instant::now();
1699 let (result, snapshot) = execute_tool(
1700 name,
1701 args,
1702 &state.project,
1703 &state.client,
1704 &state.project_infos,
1705 state.source_path.as_deref(),
1706 )
1707 .await;
1708 match result {
1709 Ok(text) => {
1710 let output = ToolResult::from_text(
1711 name,
1712 text,
1713 started.elapsed().as_millis(),
1714 false,
1715 snapshot.as_ref(),
1716 );
1717 (
1718 StatusCode::OK,
1719 Json(json!({
1720 "result": output.markdown,
1721 "tool": output.tool,
1722 "ok": output.ok,
1723 "data": output.data,
1724 "markdown": output.markdown,
1725 "signals": output.signals,
1726 "caveats": output.caveats,
1727 "duration_ms": output.duration_ms,
1728 "metadata": output.metadata(),
1729 })),
1730 )
1731 }
1732 Err(error) => {
1733 let output = ToolResult::from_text(
1734 name,
1735 error,
1736 started.elapsed().as_millis(),
1737 true,
1738 snapshot.as_ref(),
1739 );
1740 (
1741 StatusCode::INTERNAL_SERVER_ERROR,
1742 Json(json!({
1743 "error": output.markdown,
1744 "tool": output.tool,
1745 "ok": output.ok,
1746 "data": output.data,
1747 "markdown": output.markdown,
1748 "signals": output.signals,
1749 "caveats": output.caveats,
1750 "duration_ms": output.duration_ms,
1751 "metadata": output.metadata(),
1752 })),
1753 )
1754 }
1755 }
1756}
1757
1758fn tool_search_findings(args: &Value, frontier: &Project) -> Result<String, String> {
1759 let query = args["query"].as_str().map(str::to_lowercase);
1760 let entity = args["entity"].as_str().map(str::to_lowercase);
1761 let entity_type = args["entity_type"].as_str().map(str::to_lowercase);
1762 let assertion_type = args["assertion_type"].as_str().map(str::to_lowercase);
1763 let limit = args["limit"].as_u64().unwrap_or(20) as usize;
1764 let results = frontier
1765 .findings
1766 .iter()
1767 .filter(|finding| {
1768 query.as_ref().is_none_or(|q| {
1769 finding.assertion.text.to_lowercase().contains(q)
1770 || finding.conditions.text.to_lowercase().contains(q)
1771 || finding
1772 .assertion
1773 .entities
1774 .iter()
1775 .any(|e| e.name.to_lowercase().contains(q))
1776 }) && entity.as_ref().is_none_or(|needle| {
1777 finding
1778 .assertion
1779 .entities
1780 .iter()
1781 .any(|e| e.name.to_lowercase().contains(needle))
1782 }) && entity_type.as_ref().is_none_or(|needle| {
1783 finding
1784 .assertion
1785 .entities
1786 .iter()
1787 .any(|e| e.entity_type.to_lowercase() == *needle)
1788 }) && assertion_type
1789 .as_ref()
1790 .is_none_or(|needle| finding.assertion.assertion_type.to_lowercase() == *needle)
1791 })
1792 .take(limit)
1793 .collect::<Vec<_>>();
1794
1795 if results.is_empty() {
1796 return Ok("No findings matched the search criteria.".to_string());
1797 }
1798 let mut out = format!("{} findings matched:\n\n", results.len());
1799 for finding in results {
1800 let entities = finding
1801 .assertion
1802 .entities
1803 .iter()
1804 .map(|e| format!("{} ({})", e.name, e.entity_type))
1805 .collect::<Vec<_>>();
1806 out.push_str(&format!(
1807 "**{}** [conf: {}, type: {}]\n{}\nEntities: {}\nReplicated: {} | Gap: {} | Contested: {}\nSource: {} ({})\n\n",
1808 finding.id,
1809 finding.confidence.score,
1810 finding.assertion.assertion_type,
1811 finding.assertion.text,
1812 entities.join(", "),
1813 finding.evidence.replicated,
1814 finding.flags.gap,
1815 finding.flags.contested,
1816 finding.provenance.title,
1817 finding.provenance.year.map(|y| y.to_string()).unwrap_or_else(|| "?".to_string()),
1818 ));
1819 }
1820 Ok(out)
1821}
1822
1823fn tool_get_finding(args: &Value, frontier: &Project) -> Result<String, String> {
1824 let id = args["id"].as_str().ok_or("Missing 'id' argument")?;
1825 let finding = frontier
1826 .findings
1827 .iter()
1828 .find(|finding| finding.id == id || finding.id.starts_with(id))
1829 .ok_or_else(|| format!("Finding '{id}' not found"))?;
1830 let mut context = state::finding_context(frontier, &finding.id)?;
1831 if let Value::Object(map) = &mut context {
1832 map.insert(
1833 "caveats".to_string(),
1834 json!([
1835 "Finding-local events are canonical state transitions; review_events are projection artifacts.",
1836 "Sources identify artifacts; evidence atoms identify source-grounded units that bear on the finding."
1837 ]),
1838 );
1839 }
1840 serde_json::to_string_pretty(&context).map_err(|e| format!("Serialization error: {e}"))
1841}
1842
1843fn tool_get_finding_history(args: &Value, frontier: &Project) -> Result<String, String> {
1848 let id = args["id"].as_str().ok_or("Missing 'id' argument")?;
1849 let mut events: Vec<&crate::events::StateEvent> = frontier
1850 .events
1851 .iter()
1852 .filter(|e| {
1853 e.target.r#type == "finding" && (e.target.id == id || e.target.id.starts_with(id))
1854 })
1855 .collect();
1856 events.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
1857 let payload = json!({
1858 "finding_id": id,
1859 "event_count": events.len(),
1860 "events": events,
1861 "caveats": [
1862 "Events are the canonical state-transition log; events without a 'finding' target are excluded.",
1863 "Use payload.new_finding_id on finding.superseded events to walk forward in the supersedes chain."
1864 ],
1865 });
1866 serde_json::to_string_pretty(&payload).map_err(|e| format!("Serialization error: {e}"))
1867}
1868
1869fn tool_list_gaps(frontier: &Project) -> Result<String, String> {
1870 let gaps = frontier
1871 .findings
1872 .iter()
1873 .filter(|finding| finding.flags.gap)
1874 .collect::<Vec<_>>();
1875 if gaps.is_empty() {
1876 return Ok("No gap-flagged findings in this frontier.".to_string());
1877 }
1878 let mut out = format!(
1879 "{} candidate gap review leads:\nTreat these as navigation signals, not confirmed experiment targets.\n\n",
1880 gaps.len()
1881 );
1882 for finding in gaps {
1883 out.push_str(&format!(
1884 "**{}** [conf: {}]\n{}\nConditions: {}\n\n",
1885 finding.id, finding.confidence.score, finding.assertion.text, finding.conditions.text
1886 ));
1887 }
1888 Ok(out)
1889}
1890
1891fn tool_list_contradictions(frontier: &Project) -> Result<String, String> {
1892 let lookup = frontier
1893 .findings
1894 .iter()
1895 .map(|finding| (finding.id.as_str(), finding))
1896 .collect::<HashMap<_, _>>();
1897 let mut contradictions = Vec::new();
1898 for finding in &frontier.findings {
1899 for link in &finding.links {
1900 if matches!(link.link_type.as_str(), "contradicts" | "disputes") {
1901 let target = lookup
1902 .get(link.target.as_str())
1903 .map(|f| f.assertion.text.as_str())
1904 .unwrap_or("(unknown target)");
1905 contradictions.push(format!(
1906 "**{}** {} **{}**\n {} --[{}]--> {}\n Note: {}\n",
1907 finding.id,
1908 link.link_type,
1909 link.target,
1910 trunc(&finding.assertion.text, 80),
1911 link.link_type,
1912 trunc(target, 80),
1913 link.note,
1914 ));
1915 }
1916 }
1917 }
1918 if contradictions.is_empty() {
1919 return Ok("No candidate contradiction links in this frontier.".to_string());
1920 }
1921 Ok(format!(
1922 "{} candidate contradiction links:\n\n{}",
1923 contradictions.len(),
1924 contradictions.join("\n")
1925 ))
1926}
1927
1928fn tool_frontier_stats(frontier: &Project) -> Result<String, String> {
1929 serde_json::to_string_pretty(&json!({
1930 "frontier": {
1931 "name": frontier.project.name,
1932 "description": frontier.project.description,
1933 "compiled_at": frontier.project.compiled_at,
1934 "compiler": frontier.project.compiler,
1935 "papers_processed": frontier.project.papers_processed,
1936 "errors": frontier.project.errors,
1937 },
1938 "stats": frontier.stats,
1939 "source_registry": sources::source_summary(frontier),
1940 "evidence_atoms": sources::evidence_summary(frontier),
1941 "conditions": sources::condition_summary(frontier),
1942 "proposals": crate::proposals::summary(frontier),
1943 "proof_state": frontier.proof_state,
1944 "events": {
1945 "count": frontier.events.len(),
1946 "summary": events::summarize(frontier),
1947 "replay": events::replay_report(frontier),
1948 },
1949 "signals": signals::analyze(frontier, &[]).signals,
1950 }))
1951 .map_err(|e| format!("Serialization error: {e}"))
1952}
1953
1954fn tool_find_bridges(args: &Value, frontier: &Project) -> Result<String, String> {
1955 let min_categories = args["min_categories"].as_u64().unwrap_or(2) as usize;
1956 let limit = args["limit"].as_u64().unwrap_or(15) as usize;
1957 let mut entity_categories = HashMap::<String, HashSet<String>>::new();
1958 let mut entity_counts = HashMap::<String, usize>::new();
1959 for finding in &frontier.findings {
1960 for entity in &finding.assertion.entities {
1961 let key = entity.name.to_lowercase();
1962 entity_categories
1963 .entry(key.clone())
1964 .or_default()
1965 .insert(finding.assertion.assertion_type.clone());
1966 *entity_counts.entry(key).or_default() += 1;
1967 }
1968 }
1969 let mut bridges = entity_categories
1970 .iter()
1971 .filter(|(name, categories)| {
1972 categories.len() >= min_categories && !bridge::is_obvious(name)
1973 })
1974 .map(|(name, categories)| {
1975 json!({
1976 "entity": name,
1977 "categories": categories.iter().cloned().collect::<Vec<_>>(),
1978 "category_count": categories.len(),
1979 "finding_count": entity_counts.get(name).copied().unwrap_or(0),
1980 })
1981 })
1982 .collect::<Vec<_>>();
1983 bridges.sort_by(|a, b| {
1984 b["category_count"]
1985 .as_u64()
1986 .unwrap_or(0)
1987 .cmp(&a["category_count"].as_u64().unwrap_or(0))
1988 });
1989 bridges.truncate(limit);
1990 serde_json::to_string_pretty(&json!({"count": bridges.len(), "bridges": bridges}))
1991 .map_err(|e| format!("Serialization error: {e}"))
1992}
1993
1994fn tool_propagate_retraction(args: &Value, frontier: &Project) -> Result<String, String> {
1995 let id = args["finding_id"]
1996 .as_str()
1997 .ok_or("Missing 'finding_id' argument")?;
1998 let target = frontier
1999 .findings
2000 .iter()
2001 .find(|finding| finding.id == id || finding.id.starts_with(id))
2002 .ok_or_else(|| format!("Finding '{id}' not found"))?;
2003
2004 let reverse_idx = frontier.build_reverse_dep_index();
2011 let dependent_ids = reverse_idx.dependents_of(&target.id);
2012 let id_to_finding: std::collections::HashMap<&str, &crate::bundle::FindingBundle> = frontier
2013 .findings
2014 .iter()
2015 .map(|f| (f.id.as_str(), f))
2016 .collect();
2017
2018 let mut affected = Vec::new();
2019 for dep_id in dependent_ids {
2020 let Some(dependent) = id_to_finding.get(dep_id.as_str()) else {
2021 continue;
2022 };
2023 for link in &dependent.links {
2024 if matches!(link.link_type.as_str(), "supports" | "depends") && link.target == target.id
2025 {
2026 affected.push(json!({
2027 "id": dependent.id,
2028 "assertion": trunc(&dependent.assertion.text, 100),
2029 "link_type": link.link_type,
2030 }));
2031 }
2032 }
2033 }
2034 serde_json::to_string_pretty(&json!({
2035 "retracted": {"id": target.id, "assertion": trunc(&target.assertion.text, 120)},
2036 "directly_affected": affected.len(),
2037 "affected_findings": affected,
2038 "caveat": "Retraction impact is simulated over declared dependency links.",
2039 }))
2040 .map_err(|e| format!("Serialization error: {e}"))
2041}
2042
2043fn tool_apply_observer(args: &Value, frontier: &Project) -> Result<String, String> {
2044 let policy_name = args["policy"].as_str().ok_or("Missing 'policy' argument")?;
2045 let limit = args["limit"].as_u64().unwrap_or(15) as usize;
2046 let policy = observer::policy_by_name(policy_name).unwrap_or_else(observer::academic);
2047 let view = observer::observe(&frontier.findings, &frontier.replications, &policy);
2048 let top = view
2049 .findings
2050 .iter()
2051 .take(limit)
2052 .map(|scored| {
2053 let finding = frontier
2054 .findings
2055 .iter()
2056 .find(|finding| finding.id == scored.finding_id);
2057 json!({
2058 "id": scored.finding_id,
2059 "original_confidence": scored.original_confidence,
2060 "observer_score": scored.observer_score,
2061 "rank": scored.rank,
2062 "assertion": finding.map(|f| trunc(&f.assertion.text, 100)).unwrap_or_default(),
2063 })
2064 })
2065 .collect::<Vec<_>>();
2066 serde_json::to_string_pretty(&json!({
2067 "policy": policy_name,
2068 "shown": top.len(),
2069 "hidden": view.hidden,
2070 "top_findings": top,
2071 "caveat": "Observer output is policy-weighted reranking, not definitive disagreement.",
2072 }))
2073 .map_err(|e| format!("Serialization error: {e}"))
2074}
2075
2076async fn tool_check_pubmed(args: &Value, client: &Client) -> Result<String, String> {
2077 let query = args["query"].as_str().ok_or("Missing 'query' argument")?;
2078 let count = bridge::check_novelty(client, query).await?;
2079 serde_json::to_string_pretty(&json!({
2080 "query": query,
2081 "pubmed_results": count,
2082 "rough_prior_art_clear": count == 0,
2083 "caveat": "PubMed counts are rough prior-art signals, not proof of novelty.",
2084 }))
2085 .map_err(|e| format!("Serialization error: {e}"))
2086}
2087
2088fn frontier_index_json(project_infos: &[ProjectInfo]) -> Result<String, String> {
2089 let frontiers = project_infos
2090 .iter()
2091 .map(|info| {
2092 json!({
2093 "name": info.name,
2094 "file": info.file,
2095 "findings": info.findings_count,
2096 "links": info.links_count,
2097 "papers": info.papers,
2098 })
2099 })
2100 .collect::<Vec<_>>();
2101 serde_json::to_string_pretty(&json!({
2102 "frontier_count": frontiers.len(),
2103 "frontiers": frontiers,
2104 }))
2105 .map_err(|e| format!("Serialization error: {e}"))
2106}
2107
2108fn tool_trace_evidence_chain(args: &Value, frontier: &Project) -> Result<String, String> {
2109 let id = args["finding_id"]
2110 .as_str()
2111 .ok_or("Missing 'finding_id' argument")?;
2112 let depth = args["depth"].as_u64().unwrap_or(2) as usize;
2113 let lookup = frontier
2114 .findings
2115 .iter()
2116 .map(|finding| (finding.id.as_str(), finding))
2117 .collect::<HashMap<_, _>>();
2118 let finding = lookup
2119 .get(id)
2120 .copied()
2121 .or_else(|| {
2122 frontier
2123 .findings
2124 .iter()
2125 .find(|finding| finding.id.starts_with(id))
2126 })
2127 .ok_or_else(|| format!("Finding '{id}' not found"))?;
2128 let links = finding
2129 .links
2130 .iter()
2131 .take(depth.saturating_mul(10).max(10))
2132 .map(|link| {
2133 let target = lookup.get(link.target.as_str());
2134 json!({
2135 "target": link.target,
2136 "type": link.link_type,
2137 "note": link.note,
2138 "target_assertion": target.map(|f| trunc(&f.assertion.text, 120)),
2139 })
2140 })
2141 .collect::<Vec<_>>();
2142 let evidence_span_count = finding.evidence.evidence_spans.len();
2143 let source_ref = finding
2144 .provenance
2145 .doi
2146 .as_deref()
2147 .or(finding.provenance.pmid.as_deref())
2148 .unwrap_or(&finding.provenance.title);
2149 let review_state = finding
2150 .provenance
2151 .review
2152 .as_ref()
2153 .map(|review| {
2154 if review.reviewed {
2155 "reviewed"
2156 } else {
2157 "pending_review"
2158 }
2159 })
2160 .unwrap_or("pending_review");
2161 let finding_events = events::events_for_finding(frontier, &finding.id);
2162 let linked_sources = sources::sources_for_finding(frontier, &finding.id);
2163 let linked_atoms = sources::evidence_atoms_for_finding(frontier, &finding.id);
2164 let linked_conditions = sources::condition_records_for_finding(frontier, &finding.id);
2165 let linked_proposals = crate::proposals::proposals_for_finding(frontier, &finding.id);
2166 serde_json::to_string_pretty(&json!({
2167 "finding": {"id": finding.id, "assertion": finding.assertion.text},
2168 "sources": linked_sources,
2169 "evidence_atoms": linked_atoms,
2170 "condition_records": linked_conditions,
2171 "proposals": linked_proposals,
2172 "source_to_state": [
2173 {"step": "source", "value": linked_sources, "fallback": source_ref},
2174 {"step": "evidence_atom", "value": linked_atoms},
2175 {"step": "condition_boundary", "value": linked_conditions},
2176 {"step": "proposal_lineage", "value": linked_proposals},
2177 {"step": "legacy_evidence", "value": {"type": finding.evidence.evidence_type, "spans": evidence_span_count, "method": finding.evidence.method}},
2178 {"step": "finding", "value": {"id": finding.id, "assertion_type": finding.assertion.assertion_type, "confidence": finding.confidence.score}},
2179 {"step": "event_history", "value": finding_events},
2180 {"step": "links", "value": {"declared": finding.links.len()}},
2181 {"step": "review_state", "value": review_state}
2182 ],
2183 "state_events": finding_events,
2184 "path_explanation": format!(
2185 "source -> evidence spans ({}) -> finding {} -> {} declared links -> {}",
2186 evidence_span_count,
2187 finding.id,
2188 finding.links.len(),
2189 review_state
2190 ),
2191 "depth": depth,
2192 "links": links,
2193 "caveat": "Evidence-chain strength is heuristic and depends on declared links.",
2194 }))
2195 .map_err(|e| format!("Serialization error: {e}"))
2196}
2197
2198fn clone_project(project: &Project) -> Project {
2199 serde_json::from_value(serde_json::to_value(project).unwrap_or_default()).unwrap_or_else(|_| {
2200 project::assemble("unavailable", Vec::new(), 0, 1, "failed to clone frontier")
2201 })
2202}
2203
2204fn json_rpc_result(id: &Option<Value>, result: Value) -> Value {
2205 json!({"jsonrpc": "2.0", "id": id, "result": result})
2206}
2207
2208fn json_rpc_error(id: &Option<Value>, code: i32, message: &str) -> Value {
2209 json!({"jsonrpc": "2.0", "id": id, "error": {"code": code, "message": message}})
2210}
2211
2212fn trunc(s: &str, max: usize) -> String {
2213 if s.len() <= max {
2214 return s.to_string();
2215 }
2216 let mut end = max;
2217 while end > 0 && !s.is_char_boundary(end) {
2218 end -= 1;
2219 }
2220 format!("{}...", &s[..end])
2221}