1#![allow(clippy::too_many_lines)]
4
5use std::collections::{HashMap, HashSet};
6use std::io::{self, BufRead, Write};
7use std::path::{Path, PathBuf};
8use std::sync::Arc;
9
10use axum::{
11 Json, Router,
12 extract::State,
13 http::{HeaderMap, StatusCode},
14 routing::{get, post},
15};
16use reqwest::Client;
17use serde::Serialize;
18use serde_json::{Value, json};
19use tokio::sync::Mutex;
20use tower_http::cors::CorsLayer;
21
22use crate::bundle::FindingBundle;
23use crate::project::{self, ConfidenceDistribution, Project, ProjectStats};
24use crate::{bridge, decision, events, observer, repo, signals, sources, state, tool_registry};
25
26pub enum ProjectSource {
27 Single(PathBuf),
28 Directory(PathBuf),
29}
30
31impl ProjectSource {
32 pub fn from_args(single: Option<&Path>, dir: Option<&Path>) -> Self {
33 if let Some(d) = dir {
34 Self::Directory(d.to_path_buf())
35 } else if let Some(s) = single {
36 Self::Single(s.to_path_buf())
37 } else {
38 eprintln!(
39 "{} provide either a frontier file or --frontiers <dir>",
40 crate::cli_style::err_prefix()
41 );
42 std::process::exit(1);
43 }
44 }
45}
46
47#[derive(Clone)]
48pub struct ProjectInfo {
49 pub name: String,
50 pub file: String,
51 pub findings_count: usize,
52 pub links_count: usize,
53 pub papers: usize,
54}
55
56pub fn load_projects(source: &ProjectSource) -> (Project, Vec<ProjectInfo>) {
57 match source {
58 ProjectSource::Single(path) => {
59 let mut frontier = repo::load_from_path(path).unwrap_or_else(|e| {
60 eprintln!(
61 "{} failed to load frontier: {e}",
62 crate::cli_style::err_prefix()
63 );
64 std::process::exit(1);
65 });
66 sources::materialize_project(&mut frontier);
67 let info = ProjectInfo {
68 name: frontier.project.name.clone(),
69 file: path.display().to_string(),
70 findings_count: frontier.findings.len(),
71 links_count: frontier.stats.links,
72 papers: frontier.project.papers_processed,
73 };
74 (frontier, vec![info])
75 }
76 ProjectSource::Directory(dir) => {
77 let mut entries: Vec<PathBuf> = std::fs::read_dir(dir)
78 .unwrap_or_else(|e| {
79 eprintln!(
80 "{} failed to read directory: {e}",
81 crate::cli_style::err_prefix()
82 );
83 std::process::exit(1);
84 })
85 .filter_map(Result::ok)
86 .map(|entry| entry.path())
87 .filter(|path| {
88 (path.is_dir() && path.join(".vela").exists())
89 || path.extension().is_some_and(|ext| ext == "json")
90 })
91 .collect();
92 entries.sort();
93 if entries.is_empty() {
94 eprintln!("no frontier files found in {}", dir.display());
95 std::process::exit(1);
96 }
97
98 let mut named = Vec::new();
99 for path in &entries {
100 let mut frontier = repo::load_from_path(path).unwrap_or_else(|e| {
101 eprintln!(
102 "{} failed to load {}: {e}",
103 crate::cli_style::err_prefix(),
104 path.display()
105 );
106 std::process::exit(1);
107 });
108 sources::materialize_project(&mut frontier);
109 let name = path
110 .file_stem()
111 .unwrap_or_default()
112 .to_string_lossy()
113 .to_string();
114 named.push((name, frontier));
115 }
116 let infos = named
117 .iter()
118 .map(|(name, frontier)| ProjectInfo {
119 name: frontier.project.name.clone(),
120 file: name.clone(),
121 findings_count: frontier.findings.len(),
122 links_count: frontier.stats.links,
123 papers: frontier.project.papers_processed,
124 })
125 .collect::<Vec<_>>();
126 (merge_projects(named), infos)
127 }
128 }
129}
130
131fn merge_projects(frontiers: Vec<(String, Project)>) -> Project {
132 let mut findings = Vec::<FindingBundle>::new();
133 let mut categories = HashMap::<String, usize>::new();
134 let mut link_types = HashMap::<String, usize>::new();
135 let mut names = Vec::new();
136 let mut papers_processed = 0usize;
137 let mut errors = 0usize;
138 let mut replications = Vec::new();
144 let mut datasets = Vec::new();
145 let mut code_artifacts = Vec::new();
146 let mut artifacts = Vec::new();
147 let mut predictions = Vec::new();
148 let mut resolutions = Vec::new();
149
150 for (name, frontier) in frontiers {
151 names.push(name);
152 papers_processed += frontier.project.papers_processed;
153 errors += frontier.project.errors;
154 for (category, count) in frontier.stats.categories {
155 *categories.entry(category).or_default() += count;
156 }
157 for (link_type, count) in frontier.stats.link_types {
158 *link_types.entry(link_type).or_default() += count;
159 }
160 findings.extend(frontier.findings);
161 replications.extend(frontier.replications);
162 datasets.extend(frontier.datasets);
163 code_artifacts.extend(frontier.code_artifacts);
164 artifacts.extend(frontier.artifacts);
165 predictions.extend(frontier.predictions);
166 resolutions.extend(frontier.resolutions);
167 }
168
169 let mut deduped = Vec::<FindingBundle>::new();
170 let mut seen = HashMap::<String, usize>::new();
171 for finding in findings {
172 if let Some(existing) = seen.get(&finding.id).copied() {
173 if finding.confidence.score > deduped[existing].confidence.score {
174 deduped[existing] = finding;
175 }
176 } else {
177 seen.insert(finding.id.clone(), deduped.len());
178 deduped.push(finding);
179 }
180 }
181
182 let links = deduped.iter().map(|finding| finding.links.len()).sum();
183 let mut targets_with_success: HashSet<&str> = HashSet::new();
187 let mut targets_with_any_record: HashSet<&str> = HashSet::new();
188 for r in &replications {
189 targets_with_any_record.insert(r.target_finding.as_str());
190 if r.outcome == "replicated" {
191 targets_with_success.insert(r.target_finding.as_str());
192 }
193 }
194 let replicated = deduped
195 .iter()
196 .filter(|finding| {
197 if targets_with_any_record.contains(finding.id.as_str()) {
198 targets_with_success.contains(finding.id.as_str())
199 } else {
200 finding.evidence.replicated
201 }
202 })
203 .count();
204 let avg_confidence = if deduped.is_empty() {
205 0.0
206 } else {
207 (deduped
208 .iter()
209 .map(|finding| finding.confidence.score)
210 .sum::<f64>()
211 / deduped.len() as f64
212 * 1000.0)
213 .round()
214 / 1000.0
215 };
216 let stats = ProjectStats {
217 findings: deduped.len(),
218 links,
219 replicated,
220 unreplicated: deduped.len().saturating_sub(replicated),
221 avg_confidence,
222 gaps: deduped.iter().filter(|finding| finding.flags.gap).count(),
223 negative_space: deduped
224 .iter()
225 .filter(|finding| finding.flags.negative_space)
226 .count(),
227 contested: deduped
228 .iter()
229 .filter(|finding| finding.flags.contested)
230 .count(),
231 categories,
232 link_types,
233 human_reviewed: deduped
234 .iter()
235 .filter(|finding| {
236 finding
237 .provenance
238 .review
239 .as_ref()
240 .is_some_and(|review| review.reviewed)
241 })
242 .count(),
243 review_event_count: 0,
244 confidence_update_count: 0,
245 event_count: 0,
246 source_count: 0,
247 evidence_atom_count: 0,
248 condition_record_count: 0,
249 proposal_count: 0,
250 confidence_distribution: ConfidenceDistribution {
251 high_gt_80: deduped
252 .iter()
253 .filter(|finding| finding.confidence.score > 0.8)
254 .count(),
255 medium_60_80: deduped
256 .iter()
257 .filter(|finding| (0.6..=0.8).contains(&finding.confidence.score))
258 .count(),
259 low_lt_60: deduped
260 .iter()
261 .filter(|finding| finding.confidence.score < 0.6)
262 .count(),
263 },
264 };
265
266 let mut project = Project {
267 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
268 schema: project::VELA_SCHEMA_URL.to_string(),
269 frontier_id: None,
270 project: project::ProjectMeta {
271 name: format!("merged: {}", names.join(", ")),
272 description: format!("Merged from {} frontiers", names.len()),
273 compiled_at: chrono::Utc::now().to_rfc3339(),
274 compiler: project::VELA_COMPILER_VERSION.to_string(),
275 papers_processed,
276 errors,
277 dependencies: Vec::new(),
278 },
279 stats,
280 findings: deduped,
281 sources: Vec::new(),
282 evidence_atoms: Vec::new(),
283 condition_records: Vec::new(),
284 review_events: Vec::new(),
285 confidence_updates: Vec::new(),
286 events: Vec::new(),
287 proposals: Vec::new(),
288 proof_state: Default::default(),
289 signatures: Vec::new(),
290 actors: Vec::new(),
291 replications,
292 datasets,
293 code_artifacts,
294 artifacts,
295 predictions,
296 resolutions,
297 peers: Vec::new(),
298 negative_results: Vec::new(),
299 trajectories: Vec::new(),
300 };
301 sources::materialize_project(&mut project);
302 project
303}
304
305pub async fn run(source: ProjectSource, _backend: Option<&str>) {
306 dotenvy::dotenv().ok();
307 let (frontier, project_infos) = load_projects(&source);
308 let source_path: Option<PathBuf> = match &source {
309 ProjectSource::Single(path) => Some(path.clone()),
310 ProjectSource::Directory(_) => None,
311 };
312 let frontier = Arc::new(Mutex::new(frontier));
313 let client = Client::new();
314 let stdin = io::stdin();
315 let stdout = io::stdout();
316
317 for line in stdin.lock().lines() {
318 let Ok(line) = line else {
319 break;
320 };
321 if line.trim().is_empty() {
322 continue;
323 }
324 let Ok(request) = serde_json::from_str::<Value>(&line) else {
325 continue;
326 };
327 let id = request.get("id").cloned();
328 let method = request["method"].as_str().unwrap_or_default();
329 let response = match method {
330 "initialize" => json_rpc_result(
331 &id,
332 json!({
333 "protocolVersion": "2024-11-05",
334 "capabilities": {"tools": {}},
335 "serverInfo": {"name": "vela", "version": project::VELA_SCHEMA_VERSION}
336 }),
337 ),
338 "notifications/initialized" => continue,
339 "tools/list" => json_rpc_result(&id, json!({"tools": tool_registry::mcp_tools_json()})),
340 "tools/call" => {
341 let name = request["params"]["name"].as_str().unwrap_or_default();
342 let args = request["params"]["arguments"].clone();
343 handle_tool_call(
344 &id,
345 name,
346 &args,
347 &frontier,
348 &client,
349 &project_infos,
350 source_path.as_deref(),
351 )
352 .await
353 }
354 "ping" => json_rpc_result(&id, json!({})),
355 _ => json_rpc_error(&id, -32601, "Method not found"),
356 };
357 let mut out = stdout.lock();
358 let _ = serde_json::to_writer(&mut out, &response);
359 let _ = out.write_all(b"\n");
360 let _ = out.flush();
361 }
362}
363
364pub async fn run_http(source: ProjectSource, backend: Option<&str>, port: u16, workbench: bool) {
365 let _ = backend;
366 dotenvy::dotenv().ok();
367 let (frontier, project_infos) = load_projects(&source);
368 let source_path = match &source {
369 ProjectSource::Single(path) => Some(path.clone()),
370 ProjectSource::Directory(_) => None,
371 };
372 let state = AppState {
373 project: Arc::new(Mutex::new(frontier)),
374 project_infos,
375 client: Client::new(),
376 source_path,
377 };
378
379 let mut app = Router::new()
380 .route("/health", get(http_health))
381 .route("/healthz", get(http_health))
382 .route("/api/frontier", get(http_frontier))
383 .route("/api/findings", get(http_findings))
384 .route("/api/findings/{id}", get(http_finding_by_id))
385 .route("/api/contradictions", get(http_contradictions))
386 .route("/api/discord", get(http_discord))
390 .route("/api/tensions", get(http_tensions))
391 .route("/api/gaps", get(http_gaps))
392 .route("/api/artifacts", get(http_artifacts))
393 .route("/api/artifact-audit", get(http_artifact_audit))
394 .route("/api/decision-brief", get(http_decision_brief))
395 .route("/api/trials", get(http_trials))
396 .route("/api/source-verification", get(http_source_verification))
397 .route("/api/source-ingest-plan", get(http_source_ingest_plan))
398 .route("/api/observer/{policy}", get(http_observer))
399 .route("/api/propagate/{id}", get(http_propagate))
400 .route("/api/hypotheses", get(http_bridges))
401 .route("/api/stats", get(http_stats))
402 .route("/api/frontiers", get(http_frontiers))
403 .route("/api/pubmed", get(http_pubmed))
404 .route("/api/events", get(http_events))
409 .route("/api/queue", post(http_queue_append))
414 .route("/api/proposals/from-carina", post(http_from_carina))
420 .route("/api/tools", get(http_tools_list))
421 .route("/mcp/tools", get(http_tools_list))
422 .route("/api/tool", post(http_tool_call));
423
424 if workbench {
429 let web_dir = workbench_web_dir();
430 if web_dir.exists() {
431 app = app.fallback_service(tower_http::services::ServeDir::new(web_dir));
432 } else {
433 eprintln!(
434 "{} --workbench: web/ directory not found at expected location; serving API only",
435 crate::cli_style::err_prefix()
436 );
437 }
438 }
439
440 let app = app.layer(CorsLayer::permissive()).with_state(state);
441
442 let addr = format!("0.0.0.0:{port}");
443 eprintln!(
444 " {}",
445 if workbench {
446 format!("VELA · WORKBENCH :{port}").to_uppercase()
447 } else {
448 format!("VELA · SERVE · HTTP :{port}").to_uppercase()
449 }
450 .as_str()
451 );
452 eprintln!(" {}", crate::cli_style::tick_row(60));
453 eprintln!(" listening on http://{addr}");
454 if workbench {
455 eprintln!(" workbench UI: https://vela-site.fly.dev/frontiers/view?api=http://{addr}");
463 eprintln!(
464 " (or http://localhost:4321/frontiers/view?api=http://{addr} for a local site)"
465 );
466 }
467 eprintln!(" endpoints:");
471 eprintln!(" health: GET /health");
472 eprintln!(" state: GET /api/frontier /api/frontiers /api/stats");
473 eprintln!(" findings: GET /api/findings /api/findings/{{id}}");
474 eprintln!(" (no params -> structured list; query=... -> search)");
475 eprintln!(" events: GET /api/events");
476 eprintln!(" artifacts: GET /api/artifacts /api/artifact-audit");
477 eprintln!(" discord: GET /api/contradictions /api/tensions /api/gaps");
478 eprintln!(" /api/hypotheses (cross-frontier bridges)");
479 eprintln!(" /api/discord (frontier-wide discord report)");
480 eprintln!(
481 " projections:GET /api/decision-brief /api/trials /api/source-verification"
482 );
483 eprintln!(" /api/source-ingest-plan /api/observer/{{policy}}");
484 eprintln!(" /api/propagate/{{id}} /api/pubmed");
485 eprintln!(" queue: POST /api/queue");
486 eprintln!(" agent: POST /api/proposals/from-carina (Carina artifact -> proposals)");
487 eprintln!(" tools: POST /api/tool/{{name}} (MCP-style tool dispatch)");
488 let listener = tokio::net::TcpListener::bind(&addr)
489 .await
490 .unwrap_or_else(|e| {
491 eprintln!(
492 "{} failed to bind to {addr}: {e}",
493 crate::cli_style::err_prefix()
494 );
495 std::process::exit(1);
496 });
497 axum::serve(listener, app).await.unwrap();
498}
499
500pub fn check_tools(source: ProjectSource) -> Result<Value, String> {
501 let started = std::time::Instant::now();
502 let (frontier, _project_infos) = load_projects(&source);
503 let first_id = frontier.findings.first().map(|finding| finding.id.clone());
504 let mut checks = vec![
505 check_tool_result("frontier_stats", tool_frontier_stats(&frontier), started),
506 check_tool_result(
507 "search_findings",
508 tool_search_findings(&json!({"query": "amyloid", "limit": 3}), &frontier),
509 started,
510 ),
511 check_tool_result("list_gaps", tool_list_gaps(&frontier), started),
512 check_tool_result(
513 "list_contradictions",
514 tool_list_contradictions(&frontier),
515 started,
516 ),
517 check_tool_result(
518 "find_bridges",
519 tool_find_bridges(&json!({"limit": 5, "min_categories": 2}), &frontier),
520 started,
521 ),
522 check_tool_result(
523 "apply_observer",
524 tool_apply_observer(&json!({"policy": "academic", "limit": 5}), &frontier),
525 started,
526 ),
527 check_tool_result(
528 "propagate_retraction",
529 tool_propagate_retraction(&json!({"finding_id": "vf_missing"}), &frontier),
530 started,
531 ),
532 ];
533 if let Some(id) = first_id {
534 checks.push(check_tool_result(
535 "get_finding",
536 tool_get_finding(&json!({"id": id}), &frontier),
537 started,
538 ));
539 checks.push(check_tool_result(
540 "get_finding_history",
541 tool_get_finding_history(&json!({"id": id}), &frontier),
542 started,
543 ));
544 checks.push(check_tool_result(
545 "trace_evidence_chain",
546 tool_trace_evidence_chain(&json!({"finding_id": id}), &frontier),
547 started,
548 ));
549 }
550 let failures = checks
551 .iter()
552 .filter(|check| check.get("ok").and_then(Value::as_bool) != Some(true))
553 .filter_map(|check| {
554 check
555 .get("tool")
556 .and_then(Value::as_str)
557 .map(str::to_string)
558 })
559 .collect::<Vec<_>>();
560 let checked_tools = checks
561 .iter()
562 .filter_map(|check| check.get("tool").and_then(Value::as_str))
563 .map(str::to_string)
564 .collect::<Vec<_>>();
565 let registered_tools = tool_registry::all_tools()
566 .into_iter()
567 .map(|tool| tool.name)
568 .collect::<Vec<_>>();
569
570 Ok(json!({
571 "ok": failures.is_empty(),
572 "command": "serve --check-tools",
573 "schema": "vela.tool-check.v0",
574 "frontier": {
575 "name": frontier.project.name,
576 "findings": frontier.stats.findings,
577 "links": frontier.stats.links,
578 },
579 "summary": {
580 "checks": checks.len(),
581 "passed": checks.len().saturating_sub(failures.len()),
582 "failed": failures.len(),
583 },
584 "tool_count": checked_tools.len(),
585 "tools": checked_tools,
586 "registered_tool_count": registered_tools.len(),
587 "registered_tools": registered_tools,
588 "checks": checks,
589 "failures": failures,
590 }))
591}
592
593#[derive(Clone)]
594struct AppState {
595 project: Arc<Mutex<Project>>,
596 project_infos: Vec<ProjectInfo>,
597 client: Client,
598 source_path: Option<PathBuf>,
603}
604
605#[derive(Debug, Clone, Serialize)]
606struct ToolResult {
607 tool: String,
608 ok: bool,
609 data: Value,
610 markdown: String,
611 signals: Vec<signals::SignalItem>,
612 caveats: Vec<String>,
613 duration_ms: u128,
614}
615
616impl ToolResult {
617 fn from_text(
618 tool: &str,
619 text: String,
620 duration_ms: u128,
621 is_error: bool,
622 frontier: Option<&Project>,
623 ) -> Self {
624 let data = serde_json::from_str(&text).unwrap_or_else(|_| json!({"text": text}));
625 let signal_items = frontier
626 .map(|project| signals::analyze(project, &[]).signals)
627 .unwrap_or_default();
628 Self {
629 tool: tool.to_string(),
630 ok: !is_error,
631 data,
632 markdown: text,
633 signals: signal_items,
634 caveats: tool_registry::tool_caveats(tool),
635 duration_ms,
636 }
637 }
638
639 fn metadata(&self) -> Value {
640 json!({
641 "tool": self.tool,
642 "ok": self.ok,
643 "duration_ms": self.duration_ms,
644 "signals": self.signals,
645 "caveats": self.caveats,
646 "definition": tool_registry::get_tool(&self.tool),
647 })
648 }
649
650 fn to_json_text(&self) -> String {
651 serde_json::to_string_pretty(self).unwrap_or_else(|_| "{}".to_string())
652 }
653}
654
655async fn handle_tool_call(
656 id: &Option<Value>,
657 name: &str,
658 args: &Value,
659 frontier: &Arc<Mutex<Project>>,
660 client: &Client,
661 project_infos: &[ProjectInfo],
662 source_path: Option<&Path>,
663) -> Value {
664 let started = std::time::Instant::now();
665 let (result, snapshot) =
666 execute_tool(name, args, frontier, client, project_infos, source_path).await;
667 match result {
668 Ok(text) => {
669 let output = ToolResult::from_text(
670 name,
671 text,
672 started.elapsed().as_millis(),
673 false,
674 snapshot.as_ref(),
675 );
676 json_rpc_result(
677 id,
678 json!({
679 "content": [{"type": "text", "text": output.to_json_text()}],
680 "isError": false,
681 "_meta": output.metadata()
682 }),
683 )
684 }
685 Err(error) => {
686 let output = ToolResult::from_text(
687 name,
688 error,
689 started.elapsed().as_millis(),
690 true,
691 snapshot.as_ref(),
692 );
693 json_rpc_result(
694 id,
695 json!({
696 "content": [{"type": "text", "text": output.to_json_text()}],
697 "isError": true,
698 "_meta": output.metadata()
699 }),
700 )
701 }
702 }
703}
704
705async fn execute_tool(
706 name: &str,
707 args: &Value,
708 frontier: &Arc<Mutex<Project>>,
709 client: &Client,
710 _project_infos: &[ProjectInfo],
711 source_path: Option<&Path>,
712) -> (Result<String, String>, Option<Project>) {
713 match name {
714 "search_findings" => {
715 let project = frontier.lock().await;
716 (
717 tool_search_findings(args, &project),
718 Some(clone_project(&project)),
719 )
720 }
721 "get_finding" => {
722 let project = frontier.lock().await;
723 (
724 tool_get_finding(args, &project),
725 Some(clone_project(&project)),
726 )
727 }
728 "get_finding_history" => {
729 let project = frontier.lock().await;
730 (
731 tool_get_finding_history(args, &project),
732 Some(clone_project(&project)),
733 )
734 }
735 "list_gaps" => {
736 let project = frontier.lock().await;
737 (tool_list_gaps(&project), Some(clone_project(&project)))
738 }
739 "list_contradictions" => {
740 let project = frontier.lock().await;
741 (
742 tool_list_contradictions(&project),
743 Some(clone_project(&project)),
744 )
745 }
746 "frontier_stats" => {
747 let project = frontier.lock().await;
748 (tool_frontier_stats(&project), Some(clone_project(&project)))
749 }
750 "find_bridges" => {
751 let project = frontier.lock().await;
752 (
753 tool_find_bridges(args, &project),
754 Some(clone_project(&project)),
755 )
756 }
757 "propagate_retraction" => {
758 let project = frontier.lock().await;
759 (
760 tool_propagate_retraction(args, &project),
761 Some(clone_project(&project)),
762 )
763 }
764 "apply_observer" => {
765 let project = frontier.lock().await;
766 (
767 tool_apply_observer(args, &project),
768 Some(clone_project(&project)),
769 )
770 }
771 "trace_evidence_chain" => {
772 let project = frontier.lock().await;
773 (
774 tool_trace_evidence_chain(args, &project),
775 Some(clone_project(&project)),
776 )
777 }
778 "check_pubmed" => (tool_check_pubmed(args, client).await, None),
779 "list_events_since" => {
780 let project = frontier.lock().await;
781 (
782 tool_list_events_since(args, &project),
783 Some(clone_project(&project)),
784 )
785 }
786 "propose_review" => {
790 let result = write_tool_propose(
791 args,
792 frontier,
793 source_path,
794 "finding.review",
795 |args| {
796 let status = args
797 .get("status")
798 .and_then(Value::as_str)
799 .ok_or("propose_review requires `status`")?;
800 if !matches!(
801 status,
802 "accepted" | "approved" | "contested" | "needs_revision" | "rejected"
803 ) {
804 return Err(format!("invalid review status '{status}'"));
805 }
806 Ok(json!({"status": status}))
807 },
808 false,
809 )
810 .await;
811 let snapshot = Some(clone_project(&*frontier.lock().await));
812 (result, snapshot)
813 }
814 "propose_note" => {
815 let result = write_tool_propose(
816 args,
817 frontier,
818 source_path,
819 "finding.note",
820 |args| build_note_payload(args, "propose_note"),
821 false,
822 )
823 .await;
824 let snapshot = Some(clone_project(&*frontier.lock().await));
825 (result, snapshot)
826 }
827 "propose_and_apply_note" => {
833 let result = write_tool_propose(
834 args,
835 frontier,
836 source_path,
837 "finding.note",
838 |args| build_note_payload(args, "propose_and_apply_note"),
839 true,
840 )
841 .await;
842 let snapshot = Some(clone_project(&*frontier.lock().await));
843 (result, snapshot)
844 }
845 "propose_revise_confidence" => {
846 let result = write_tool_propose(
847 args,
848 frontier,
849 source_path,
850 "finding.confidence_revise",
851 |args| {
852 let new_score = args
853 .get("new_score")
854 .and_then(Value::as_f64)
855 .ok_or("propose_revise_confidence requires `new_score`")?;
856 if !(0.0..=1.0).contains(&new_score) {
857 return Err(format!("new_score {new_score} out of [0.0, 1.0]"));
858 }
859 Ok(json!({"new_score": new_score}))
860 },
861 false,
862 )
863 .await;
864 let snapshot = Some(clone_project(&*frontier.lock().await));
865 (result, snapshot)
866 }
867 "propose_retract" => {
868 let result = write_tool_propose(
869 args,
870 frontier,
871 source_path,
872 "finding.retract",
873 |_args| Ok(json!({})),
874 false,
875 )
876 .await;
877 let snapshot = Some(clone_project(&*frontier.lock().await));
878 (result, snapshot)
879 }
880 "accept_proposal" => {
881 let result = write_tool_decision(args, frontier, source_path, "accept").await;
882 let snapshot = Some(clone_project(&*frontier.lock().await));
883 (result, snapshot)
884 }
885 "reject_proposal" => {
886 let result = write_tool_decision(args, frontier, source_path, "reject").await;
887 let snapshot = Some(clone_project(&*frontier.lock().await));
888 (result, snapshot)
889 }
890 _ => (Err(format!("Unknown tool: {name}")), None),
891 }
892}
893
894fn build_note_payload(args: &Value, tool_name: &str) -> Result<Value, String> {
900 let text = args
901 .get("text")
902 .and_then(Value::as_str)
903 .ok_or_else(|| format!("{tool_name} requires `text`"))?;
904 if text.trim().is_empty() {
905 return Err("text must be non-empty".to_string());
906 }
907 let mut payload = json!({"text": text});
908 if let Some(prov) = args.get("provenance") {
909 let prov_obj = prov
910 .as_object()
911 .ok_or("provenance must be a JSON object when present")?;
912 let has_id = ["doi", "pmid", "title"].iter().any(|k| {
913 prov_obj
914 .get(*k)
915 .and_then(Value::as_str)
916 .is_some_and(|s| !s.trim().is_empty())
917 });
918 if !has_id {
919 return Err("provenance must include at least one of doi/pmid/title".to_string());
920 }
921 payload["provenance"] = prov.clone();
922 }
923 Ok(payload)
924}
925
926async fn write_tool_propose<F>(
934 args: &Value,
935 frontier: &Arc<Mutex<Project>>,
936 source_path: Option<&Path>,
937 kind: &str,
938 payload_builder: F,
939 apply_if_tier_permits: bool,
940) -> Result<String, String>
941where
942 F: Fn(&Value) -> Result<Value, String>,
943{
944 let path = source_path.ok_or_else(|| {
945 "Write tools require a single-file frontier (--frontier <PATH>); rejected in --frontiers <DIR> mode".to_string()
946 })?;
947 let actor_id = args
948 .get("actor_id")
949 .and_then(Value::as_str)
950 .ok_or("write tool requires `actor_id`")?;
951 let target_finding_id = args
952 .get("target_finding_id")
953 .and_then(Value::as_str)
954 .ok_or("write tool requires `target_finding_id`")?;
955 let reason = args
956 .get("reason")
957 .and_then(Value::as_str)
958 .ok_or("write tool requires `reason`")?;
959 let signature_hex = args
960 .get("signature")
961 .and_then(Value::as_str)
962 .ok_or("write tool requires `signature` (Ed25519 over canonical proposal preimage)")?;
963 let created_at = args
964 .get("created_at")
965 .and_then(Value::as_str)
966 .map(String::from)
967 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
968 let payload = payload_builder(args)?;
969
970 let (pubkey, tier_permits_apply) = {
972 let project = frontier.lock().await;
973 let actor = project
974 .actors
975 .iter()
976 .find(|actor| actor.id == actor_id)
977 .ok_or_else(|| {
978 format!(
979 "actor '{actor_id}' is not registered in this frontier; register via `vela actor add` before writing"
980 )
981 })?;
982 let tier_permits = crate::sign::actor_can_auto_apply(actor, kind);
983 if apply_if_tier_permits && !tier_permits {
987 let tier_label = actor.tier.as_deref().unwrap_or("none");
988 return Err(format!(
989 "actor '{actor_id}' tier '{tier_label}' does not permit auto-apply for {kind}"
990 ));
991 }
992 (actor.public_key.clone(), tier_permits)
993 };
994
995 let mut proposal = crate::proposals::new_proposal(
998 kind,
999 crate::events::StateTarget {
1000 r#type: "finding".to_string(),
1001 id: target_finding_id.to_string(),
1002 },
1003 actor_id,
1004 "human",
1005 reason,
1006 payload,
1007 Vec::new(),
1008 Vec::new(),
1009 );
1010 proposal.created_at = created_at;
1011 proposal.id = crate::proposals::proposal_id(&proposal);
1012
1013 let valid = crate::sign::verify_proposal_signature(&proposal, signature_hex, &pubkey)?;
1014 if !valid {
1015 return Err(format!(
1016 "Signature does not verify for actor '{actor_id}' on this proposal"
1017 ));
1018 }
1019
1020 let apply = apply_if_tier_permits && tier_permits_apply;
1024 let result = crate::proposals::create_or_apply(path, proposal, apply)
1025 .map_err(|e| format!("create_or_apply failed: {e}"))?;
1026
1027 let fresh =
1029 crate::repo::load_from_path(path).map_err(|e| format!("reload after write failed: {e}"))?;
1030 let mut project = frontier.lock().await;
1031 *project = fresh;
1032
1033 serde_json::to_string(&json!({
1034 "proposal_id": result.proposal_id,
1035 "finding_id": result.finding_id,
1036 "status": result.status,
1037 "applied_event_id": result.applied_event_id,
1038 }))
1039 .map_err(|e| format!("serialize write result: {e}"))
1040}
1041
1042async fn write_tool_decision(
1046 args: &Value,
1047 frontier: &Arc<Mutex<Project>>,
1048 source_path: Option<&Path>,
1049 action: &str,
1050) -> Result<String, String> {
1051 let path = source_path.ok_or_else(|| {
1052 "Write tools require a single-file frontier (--frontier <PATH>); rejected in --frontiers <DIR> mode".to_string()
1053 })?;
1054 let proposal_id = args
1055 .get("proposal_id")
1056 .and_then(Value::as_str)
1057 .ok_or("decision tool requires `proposal_id`")?;
1058 let reviewer_id = args
1059 .get("reviewer_id")
1060 .and_then(Value::as_str)
1061 .ok_or("decision tool requires `reviewer_id`")?;
1062 let reason = args
1063 .get("reason")
1064 .and_then(Value::as_str)
1065 .ok_or("decision tool requires `reason`")?;
1066 let signature_hex = args
1067 .get("signature")
1068 .and_then(Value::as_str)
1069 .ok_or("decision tool requires `signature`")?;
1070 let timestamp = args
1071 .get("timestamp")
1072 .and_then(Value::as_str)
1073 .map(String::from)
1074 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
1075
1076 let preimage = json!({
1078 "action": action,
1079 "proposal_id": proposal_id,
1080 "reviewer_id": reviewer_id,
1081 "reason": reason,
1082 "timestamp": timestamp,
1083 });
1084 let signing_bytes = crate::canonical::to_canonical_bytes(&preimage)?;
1085
1086 let pubkey = {
1088 let project = frontier.lock().await;
1089 project
1090 .actors
1091 .iter()
1092 .find(|actor| actor.id == reviewer_id)
1093 .map(|actor| actor.public_key.clone())
1094 .ok_or_else(|| format!("reviewer '{reviewer_id}' is not registered"))?
1095 };
1096
1097 let valid = crate::sign::verify_action_signature(&signing_bytes, signature_hex, &pubkey)?;
1098 if !valid {
1099 return Err(format!(
1100 "Signature does not verify for reviewer '{reviewer_id}' on {action} of {proposal_id}"
1101 ));
1102 }
1103
1104 let outcome = match action {
1105 "accept" => {
1106 let event_id = crate::proposals::accept_at_path(path, proposal_id, reviewer_id, reason)
1107 .map_err(|e| format!("accept failed: {e}"))?;
1108 json!({
1109 "proposal_id": proposal_id,
1110 "applied_event_id": event_id,
1111 "status": "applied",
1112 })
1113 }
1114 "reject" => {
1115 crate::proposals::reject_at_path(path, proposal_id, reviewer_id, reason)
1116 .map_err(|e| format!("reject failed: {e}"))?;
1117 json!({
1118 "proposal_id": proposal_id,
1119 "applied_event_id": Value::Null,
1120 "status": "rejected",
1121 })
1122 }
1123 other => return Err(format!("unsupported decision action '{other}'")),
1124 };
1125
1126 let fresh =
1128 crate::repo::load_from_path(path).map_err(|e| format!("reload after write failed: {e}"))?;
1129 let mut project = frontier.lock().await;
1130 *project = fresh;
1131
1132 serde_json::to_string(&outcome).map_err(|e| format!("serialize decision: {e}"))
1133}
1134
1135fn tool_list_events_since(args: &Value, project: &Project) -> Result<String, String> {
1139 let cursor = args.get("cursor").and_then(Value::as_str);
1140 let limit = args
1141 .get("limit")
1142 .and_then(Value::as_u64)
1143 .map_or(100usize, |n| (n as usize).min(500));
1144 let start_idx: usize = match cursor {
1145 None => 0,
1146 Some(c) => match project.events.iter().position(|event| event.id == c) {
1147 Some(idx) => idx + 1,
1148 None => {
1149 return Err(format!(
1150 "cursor '{c}' not found in event log; client is out of sync"
1151 ));
1152 }
1153 },
1154 };
1155 let end_idx = (start_idx + limit).min(project.events.len());
1156 let slice = &project.events[start_idx..end_idx];
1157 let next_cursor = if end_idx < project.events.len() {
1158 slice.last().map(|event| event.id.clone())
1159 } else {
1160 None
1161 };
1162 let payload = json!({
1163 "events": slice,
1164 "count": slice.len(),
1165 "next_cursor": next_cursor,
1166 "log_total": project.events.len(),
1167 });
1168 serde_json::to_string(&payload).map_err(|e| format!("serialize list_events_since: {e}"))
1169}
1170
1171fn check_tool_result(
1172 name: &str,
1173 result: Result<String, String>,
1174 started: std::time::Instant,
1175) -> Value {
1176 let output = ToolResult::from_text(
1177 name,
1178 result.unwrap_or_else(|e| e),
1179 started.elapsed().as_millis(),
1180 false,
1181 None,
1182 );
1183 let has_data = !output.data.is_null();
1184 let has_markdown = !output.markdown.trim().is_empty();
1185 let has_signals = true;
1186 let has_caveats = true;
1187 json!({
1188 "tool": name,
1189 "ok": has_data && has_markdown && has_signals && has_caveats,
1190 "data": output.data,
1191 "markdown": output.markdown,
1192 "has_data": has_data,
1193 "has_markdown": has_markdown,
1194 "has_signals": has_signals,
1195 "has_caveats": has_caveats,
1196 "signals": output.signals,
1197 "caveats": output.caveats,
1198 "duration_ms": output.duration_ms,
1199 })
1200}
1201
1202async fn http_events(
1216 State(state): State<AppState>,
1217 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1218) -> (StatusCode, Json<Value>) {
1219 let project = state.project.lock().await;
1220 let limit = params
1221 .get("limit")
1222 .and_then(|v| v.parse::<usize>().ok())
1223 .unwrap_or(100)
1224 .min(500);
1225 let start_idx: usize = match params.get("since") {
1226 None => 0,
1227 Some(cursor) => match project.events.iter().position(|event| &event.id == cursor) {
1228 Some(idx) => idx + 1,
1229 None => {
1230 return (
1231 StatusCode::BAD_REQUEST,
1232 Json(json!({
1233 "error": format!(
1234 "cursor '{cursor}' not found in event log; client is out of sync"
1235 ),
1236 })),
1237 );
1238 }
1239 },
1240 };
1241 let kind_filter = params.get("kind").map(String::as_str);
1247 let target_filter = params.get("target").map(String::as_str);
1248 let filtered: Vec<&crate::events::StateEvent> = project
1249 .events
1250 .iter()
1251 .skip(start_idx)
1252 .filter(|e| kind_filter.is_none_or(|k| e.kind == k))
1253 .filter(|e| target_filter.is_none_or(|t| e.target.id == t))
1254 .collect();
1255 let total_filtered = filtered.len();
1256 let take_n = limit.min(total_filtered);
1257 let slice: Vec<&crate::events::StateEvent> = filtered.into_iter().take(take_n).collect();
1258 let next_cursor = if take_n < total_filtered {
1259 slice.last().map(|event| event.id.clone())
1260 } else {
1261 None
1262 };
1263 (
1264 StatusCode::OK,
1265 Json(json!({
1266 "events": slice,
1267 "count": slice.len(),
1268 "next_cursor": next_cursor,
1269 "log_total": project.events.len(),
1270 "filtered_total": total_filtered,
1271 })),
1272 )
1273}
1274
1275async fn http_queue_append(
1286 State(state): State<AppState>,
1287 Json(body): Json<Value>,
1288) -> (StatusCode, Json<Value>) {
1289 let path = match &state.source_path {
1290 Some(p) => p.clone(),
1291 None => {
1292 return (
1293 StatusCode::BAD_REQUEST,
1294 Json(
1295 json!({"error": "Workbench queue requires a single-file frontier (--frontier <PATH>)"}),
1296 ),
1297 );
1298 }
1299 };
1300 let kind = match body.get("kind").and_then(Value::as_str) {
1301 Some(k) => k.to_string(),
1302 None => {
1303 return (
1304 StatusCode::BAD_REQUEST,
1305 Json(json!({"error": "POST /api/queue requires `kind`"})),
1306 );
1307 }
1308 };
1309 let valid_kinds = [
1310 "propose_review",
1311 "propose_note",
1312 "propose_revise_confidence",
1313 "propose_retract",
1314 "accept_proposal",
1315 "reject_proposal",
1316 ];
1317 if !valid_kinds.contains(&kind.as_str()) {
1318 return (
1319 StatusCode::BAD_REQUEST,
1320 Json(json!({"error": format!("unsupported queue kind '{kind}'")})),
1321 );
1322 }
1323 let args = body.get("args").cloned().unwrap_or(Value::Null);
1324 let queued_at = chrono::Utc::now().to_rfc3339();
1325 let action = crate::queue::QueuedAction {
1326 kind,
1327 frontier: path,
1328 args,
1329 queued_at: queued_at.clone(),
1330 };
1331 let queue_path = crate::queue::default_queue_path();
1332 if let Err(error) = crate::queue::append(&queue_path, action) {
1333 return (
1334 StatusCode::INTERNAL_SERVER_ERROR,
1335 Json(json!({"error": format!("append to queue: {error}")})),
1336 );
1337 }
1338 (
1339 StatusCode::OK,
1340 Json(json!({
1341 "ok": true,
1342 "queue_file": queue_path.display().to_string(),
1343 "queued_at": queued_at,
1344 "next_step": "run `vela queue sign` to apply queued drafts",
1345 })),
1346 )
1347}
1348
1349async fn http_from_carina(
1362 State(state): State<AppState>,
1363 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1364 Json(body): Json<Value>,
1365) -> (StatusCode, Json<Value>) {
1366 let path = match &state.source_path {
1367 Some(p) => p.clone(),
1368 None => {
1369 return (
1370 StatusCode::BAD_REQUEST,
1371 Json(json!({
1372 "error": "agent write target requires a single-file or single-repo frontier (`vela serve <path> --http <port>`)"
1373 })),
1374 );
1375 }
1376 };
1377 let actor = params
1378 .get("actor")
1379 .cloned()
1380 .unwrap_or_else(|| "agent:carina-write-target".to_string());
1381 let apply_artifacts = params
1382 .get("apply_artifacts")
1383 .map(|v| v == "true" || v == "1")
1384 .unwrap_or(false);
1385
1386 let packet: crate::artifact_to_state::ArtifactPacket =
1390 match serde_json::from_value(body.clone()) {
1391 Ok(p) => p,
1392 Err(e) => {
1393 return (
1394 StatusCode::BAD_REQUEST,
1395 Json(json!({"error": format!("packet parse: {e}")})),
1396 );
1397 }
1398 };
1399 let packet = match packet.validate() {
1400 Ok(p) => p,
1401 Err(e) => {
1402 return (
1403 StatusCode::BAD_REQUEST,
1404 Json(json!({"error": format!("packet validate: {e}")})),
1405 );
1406 }
1407 };
1408
1409 let tmp = match tempfile::NamedTempFile::new() {
1413 Ok(t) => t,
1414 Err(e) => {
1415 return (
1416 StatusCode::INTERNAL_SERVER_ERROR,
1417 Json(json!({"error": format!("tempfile: {e}")})),
1418 );
1419 }
1420 };
1421 let canonical = match serde_json::to_vec_pretty(&packet) {
1422 Ok(b) => b,
1423 Err(e) => {
1424 return (
1425 StatusCode::INTERNAL_SERVER_ERROR,
1426 Json(json!({"error": format!("re-serialize: {e}")})),
1427 );
1428 }
1429 };
1430 if let Err(e) = std::fs::write(tmp.path(), &canonical) {
1431 return (
1432 StatusCode::INTERNAL_SERVER_ERROR,
1433 Json(json!({"error": format!("write tempfile: {e}")})),
1434 );
1435 }
1436
1437 drop(state.project.lock().await);
1440 let report = match crate::artifact_to_state::import_packet_at_path(
1441 &path,
1442 tmp.path(),
1443 &actor,
1444 apply_artifacts,
1445 ) {
1446 Ok(r) => r,
1447 Err(e) => {
1448 return (
1449 StatusCode::BAD_REQUEST,
1450 Json(json!({"error": format!("import: {e}")})),
1451 );
1452 }
1453 };
1454
1455 let mut reloaded = match crate::repo::load_from_path(&path) {
1458 Ok(p) => p,
1459 Err(e) => {
1460 return (
1461 StatusCode::INTERNAL_SERVER_ERROR,
1462 Json(json!({"error": format!("reload after import: {e}")})),
1463 );
1464 }
1465 };
1466 crate::sources::materialize_project(&mut reloaded);
1467 {
1468 let mut guard = state.project.lock().await;
1469 *guard = reloaded;
1470 }
1471
1472 (
1473 StatusCode::OK,
1474 Json(json!({
1475 "ok": true,
1476 "actor": actor,
1477 "apply_artifacts": apply_artifacts,
1478 "report": report,
1479 })),
1480 )
1481}
1482
1483fn workbench_web_dir() -> PathBuf {
1488 if let Ok(path) = std::env::var("VELA_WEB_DIR") {
1489 return PathBuf::from(path);
1490 }
1491 let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
1492 let candidates = [
1493 cwd.join("web"),
1494 PathBuf::from("./web"),
1495 PathBuf::from("web"),
1496 ];
1497 for candidate in candidates {
1498 if candidate.exists() {
1499 return candidate;
1500 }
1501 }
1502 cwd.join("web")
1503}
1504
1505fn requesting_clearance(
1518 headers: &HeaderMap,
1519 project: &Project,
1520) -> Option<crate::access_tier::AccessTier> {
1521 let actor_id = headers
1522 .get("x-vela-actor")
1523 .and_then(|v| v.to_str().ok())?
1524 .trim();
1525 if actor_id.is_empty() {
1526 return None;
1527 }
1528 let actor = project.actors.iter().find(|a| a.id == actor_id)?;
1529 actor.access_clearance
1530}
1531
1532async fn http_frontier(State(state): State<AppState>, headers: HeaderMap) -> Json<Value> {
1533 let project = state.project.lock().await;
1534 let clearance = requesting_clearance(&headers, &project);
1535 let view = crate::access_tier::redact_for_actor(&project, clearance);
1536 Json(serde_json::to_value(&view).unwrap_or_else(|_| json!({"error": "serialization failed"})))
1537}
1538
1539async fn http_findings(
1540 State(state): State<AppState>,
1541 headers: HeaderMap,
1542 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1543) -> Json<Value> {
1544 let project = state.project.lock().await;
1545 let clearance = requesting_clearance(&headers, &project);
1546 let view = crate::access_tier::redact_for_actor(&project, clearance);
1547
1548 let has_search = params.contains_key("query")
1556 || params.contains_key("entity")
1557 || params.contains_key("entity_type")
1558 || params.contains_key("type");
1559 if !has_search {
1560 let limit = params
1561 .get("limit")
1562 .and_then(|v| v.parse::<usize>().ok())
1563 .unwrap_or(view.findings.len());
1564 let findings: Vec<Value> = view
1565 .findings
1566 .iter()
1567 .take(limit)
1568 .map(|f| serde_json::to_value(f).unwrap_or_default())
1569 .collect();
1570 return Json(json!({
1571 "count": view.findings.len(),
1572 "returned": findings.len(),
1573 "findings": findings,
1574 }));
1575 }
1576
1577 let args = json!({
1578 "query": params.get("query"),
1579 "entity": params.get("entity"),
1580 "entity_type": params.get("entity_type"),
1581 "assertion_type": params.get("type"),
1582 "limit": params.get("limit").and_then(|v| v.parse::<u64>().ok()).unwrap_or(50),
1583 });
1584 match tool_search_findings(&args, &view) {
1585 Ok(text) => Json(json!({"result": text})),
1586 Err(error) => Json(json!({"error": error})),
1587 }
1588}
1589
1590async fn http_finding_by_id(
1591 State(state): State<AppState>,
1592 headers: HeaderMap,
1593 axum::extract::Path(id): axum::extract::Path<String>,
1594) -> (StatusCode, Json<Value>) {
1595 let project = state.project.lock().await;
1596 let clearance = requesting_clearance(&headers, &project);
1597 match project
1598 .findings
1599 .iter()
1600 .find(|finding| finding.id == id || finding.id.starts_with(&id))
1601 {
1602 Some(finding) => {
1603 if !crate::access_tier::actor_may_read(finding.access_tier, clearance) {
1604 return (
1607 StatusCode::NOT_FOUND,
1608 Json(json!({"error": format!("Finding not found: {id}")})),
1609 );
1610 }
1611 let sp =
1617 crate::provenance_compute::status_provenance_for_finding(&project, &finding.id);
1618 let belnap = sp.derive_status();
1619 let discord =
1623 crate::discord_compute::compute_discord_for_finding(&project, &finding.id);
1624 let discord_kinds: Vec<String> =
1625 discord.iter().map(|k| k.as_str().to_string()).collect();
1626 let mut value = serde_json::to_value(finding).unwrap_or_default();
1627 if let Some(map) = value.as_object_mut() {
1628 map.insert(
1629 "belnap_status".to_string(),
1630 serde_json::to_value(belnap).unwrap_or_default(),
1631 );
1632 map.insert(
1633 "belnap_letter".to_string(),
1634 json!(belnap.letter().to_string()),
1635 );
1636 map.insert(
1637 "support_term_count".to_string(),
1638 json!(sp.support.term_count()),
1639 );
1640 map.insert(
1641 "refute_term_count".to_string(),
1642 json!(sp.refute.term_count()),
1643 );
1644 map.insert("discord_kinds".to_string(), json!(discord_kinds));
1645 map.insert("discord_count".to_string(), json!(discord.len()));
1646 map.insert(
1652 "support_polynomial".to_string(),
1653 serde_json::to_value(&sp.support).unwrap_or_default(),
1654 );
1655 map.insert(
1656 "refute_polynomial".to_string(),
1657 serde_json::to_value(&sp.refute).unwrap_or_default(),
1658 );
1659 map.insert(
1663 "support_polynomial_display".to_string(),
1664 json!(format!("{}", sp.support)),
1665 );
1666 map.insert(
1667 "refute_polynomial_display".to_string(),
1668 json!(format!("{}", sp.refute)),
1669 );
1670 }
1671 (StatusCode::OK, Json(value))
1672 }
1673 None => (
1674 StatusCode::NOT_FOUND,
1675 Json(json!({"error": format!("Finding not found: {id}")})),
1676 ),
1677 }
1678}
1679
1680async fn http_contradictions(State(state): State<AppState>) -> Json<Value> {
1681 let project = state.project.lock().await;
1682 Json(
1683 serde_json::from_str(&tool_list_contradictions(&project).unwrap_or_default())
1684 .unwrap_or_else(
1685 |_| json!({"result": tool_list_contradictions(&project).unwrap_or_default()}),
1686 ),
1687 )
1688}
1689
1690async fn http_discord(
1698 State(state): State<AppState>,
1699 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1700) -> Json<Value> {
1701 use crate::discord::DiscordKind;
1702 use crate::discord_compute::compute_discord_assignment;
1703
1704 let project = state.project.lock().await;
1705 let assignment = compute_discord_assignment(&project);
1706 let support = assignment.frontier_support();
1707 let filter = params.get("kind").cloned();
1708
1709 let mut rows: Vec<Value> = Vec::new();
1710 for context in support.iter() {
1711 let set = assignment.get(context);
1712 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
1713 if let Some(f) = &filter
1714 && !kinds.iter().any(|k| k == f)
1715 {
1716 continue;
1717 }
1718 rows.push(json!({
1719 "finding_id": context,
1720 "discord_kinds": kinds,
1721 }));
1722 }
1723
1724 let mut histogram = serde_json::Map::new();
1725 for kind in DiscordKind::ALL {
1726 let count = assignment
1727 .iter()
1728 .filter(|(_, set)| set.contains(*kind))
1729 .count();
1730 if count > 0 {
1731 histogram.insert(kind.as_str().to_string(), json!(count));
1732 }
1733 }
1734
1735 let frontier_id = project
1736 .frontier_id
1737 .clone()
1738 .unwrap_or_else(|| String::from("<unknown>"));
1739
1740 Json(json!({
1741 "frontier_id": frontier_id,
1742 "total_findings": project.findings.len(),
1743 "frontier_support_size": support.len(),
1744 "filtered_row_count": rows.len(),
1745 "filter_kind": filter,
1746 "histogram": Value::Object(histogram),
1747 "rows": rows,
1748 }))
1749}
1750
1751async fn http_health(State(state): State<AppState>) -> Json<Value> {
1752 let project = state.project.lock().await;
1753 Json(json!({
1754 "ok": true,
1755 "frontier": {
1756 "name": project.project.name,
1757 "findings": project.stats.findings,
1758 "events": project.events.len(),
1759 }
1760 }))
1761}
1762
1763async fn http_artifacts(State(state): State<AppState>) -> Json<Value> {
1764 let project = state.project.lock().await;
1765 Json(json!({
1766 "ok": true,
1767 "count": project.artifacts.len(),
1768 "artifacts": project.artifacts,
1769 }))
1770}
1771
1772async fn http_artifact_audit(State(state): State<AppState>) -> Json<Value> {
1773 let source_path = state.source_path.clone();
1774 let project = state.project.lock().await;
1775 let Some(path) = source_path else {
1776 return Json(json!({
1777 "ok": false,
1778 "available": false,
1779 "issues": [],
1780 "error": "artifact audit requires a single frontier source",
1781 }));
1782 };
1783 Json(
1784 serde_json::to_value(crate::artifact_audit::audit_artifacts(&path, &project))
1785 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1786 )
1787}
1788
1789async fn http_decision_brief(State(state): State<AppState>) -> Json<Value> {
1790 let source_path = state.source_path.clone();
1791 let project = state.project.lock().await;
1792 let Some(path) = source_path else {
1793 return Json(json!({
1794 "ok": false,
1795 "available": false,
1796 "projection": null,
1797 "issues": [],
1798 "error": "decision projections require a single frontier source",
1799 }));
1800 };
1801 Json(
1802 serde_json::to_value(decision::load_decision_brief(&path, &project))
1803 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1804 )
1805}
1806
1807async fn http_trials(State(state): State<AppState>) -> Json<Value> {
1808 let source_path = state.source_path.clone();
1809 let project = state.project.lock().await;
1810 let Some(path) = source_path else {
1811 return Json(json!({
1812 "ok": false,
1813 "available": false,
1814 "projection": null,
1815 "issues": [],
1816 "error": "trial projections require a single frontier source",
1817 }));
1818 };
1819 Json(
1820 serde_json::to_value(decision::load_trial_outcomes(&path, &project))
1821 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1822 )
1823}
1824
1825async fn http_source_verification(State(state): State<AppState>) -> Json<Value> {
1826 let source_path = state.source_path.clone();
1827 let project = state.project.lock().await;
1828 let Some(path) = source_path else {
1829 return Json(json!({
1830 "ok": false,
1831 "available": false,
1832 "projection": null,
1833 "issues": [],
1834 "error": "source verification requires a single frontier source",
1835 }));
1836 };
1837 Json(
1838 serde_json::to_value(decision::load_source_verification(&path, &project))
1839 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1840 )
1841}
1842
1843async fn http_source_ingest_plan(State(state): State<AppState>) -> Json<Value> {
1844 let source_path = state.source_path.clone();
1845 let project = state.project.lock().await;
1846 let Some(path) = source_path else {
1847 return Json(json!({
1848 "ok": false,
1849 "available": false,
1850 "projection": null,
1851 "issues": [],
1852 "error": "source ingest plan requires a single frontier source",
1853 }));
1854 };
1855 Json(
1856 serde_json::to_value(decision::load_source_ingest_plan(&path, &project))
1857 .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1858 )
1859}
1860
1861async fn http_gaps(State(state): State<AppState>) -> Json<Value> {
1862 let project = state.project.lock().await;
1863 let gaps = project
1864 .findings
1865 .iter()
1866 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
1867 .map(|finding| {
1868 json!({
1869 "id": finding.id,
1870 "assertion": finding.assertion.text,
1871 "confidence": finding.confidence.score,
1872 "conditions": finding.conditions.text,
1873 "source": finding.provenance.title,
1874 })
1875 })
1876 .collect::<Vec<_>>();
1877 Json(json!({
1878 "ok": true,
1879 "count": gaps.len(),
1880 "gaps": gaps,
1881 "caveats": ["Candidate gap rankings are review leads, not confirmed experiment targets."],
1882 }))
1883}
1884
1885async fn http_tensions(State(state): State<AppState>) -> Json<Value> {
1886 let project = state.project.lock().await;
1887 let lookup = project
1888 .findings
1889 .iter()
1890 .map(|finding| (finding.id.as_str(), finding))
1891 .collect::<HashMap<_, _>>();
1892 let mut tensions = Vec::new();
1893 for finding in &project.findings {
1894 for link in &finding.links {
1895 if link.link_type != "contradicts" {
1896 continue;
1897 }
1898 let target = lookup.get(link.target.as_str());
1899 tensions.push(json!({
1900 "source": {
1901 "id": finding.id,
1902 "assertion": finding.assertion.text,
1903 "confidence": finding.confidence.score,
1904 },
1905 "target": target.map(|target| json!({
1906 "id": target.id,
1907 "assertion": target.assertion.text,
1908 "confidence": target.confidence.score,
1909 })),
1910 "type": link.link_type,
1911 "note": link.note,
1912 "resolved": finding.flags.retracted || target.is_some_and(|target| target.flags.retracted),
1913 }));
1914 }
1915 }
1916 Json(json!({
1917 "ok": true,
1918 "count": tensions.len(),
1919 "tensions": tensions,
1920 "caveats": ["Candidate tensions are review surfaces, not definitive contradictions."],
1921 }))
1922}
1923
1924async fn http_observer(
1925 State(state): State<AppState>,
1926 axum::extract::Path(policy): axum::extract::Path<String>,
1927 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1928) -> Json<Value> {
1929 let project = state.project.lock().await;
1930 let args = json!({
1931 "policy": policy,
1932 "limit": params.get("limit").and_then(|v| v.parse::<u64>().ok()).unwrap_or(20),
1933 });
1934 match tool_apply_observer(&args, &project) {
1935 Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1936 Err(error) => Json(json!({"error": error})),
1937 }
1938}
1939
1940async fn http_propagate(
1941 State(state): State<AppState>,
1942 axum::extract::Path(id): axum::extract::Path<String>,
1943) -> Json<Value> {
1944 let project = state.project.lock().await;
1945 let args = json!({"finding_id": id});
1946 match tool_propagate_retraction(&args, &project) {
1947 Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1948 Err(error) => Json(json!({"error": error})),
1949 }
1950}
1951
1952async fn http_bridges(
1953 State(state): State<AppState>,
1954 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1955) -> Json<Value> {
1956 let project = state.project.lock().await;
1957 let args = json!({
1958 "min_categories": params.get("min_categories").and_then(|v| v.parse::<u64>().ok()).unwrap_or(2),
1959 "limit": params.get("limit").and_then(|v| v.parse::<u64>().ok()).unwrap_or(15),
1960 });
1961 match tool_find_bridges(&args, &project) {
1962 Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1963 Err(error) => Json(json!({"error": error})),
1964 }
1965}
1966
1967async fn http_stats(State(state): State<AppState>) -> Json<Value> {
1968 let project = state.project.lock().await;
1969 Json(json!({
1970 "frontier": {
1971 "name": project.project.name,
1972 "compiled_at": project.project.compiled_at,
1973 "compiler": project.project.compiler,
1974 },
1975 "stats": project.stats,
1976 "signals": signals::analyze(&project, &[]).signals,
1977 }))
1978}
1979
1980async fn http_frontiers(State(state): State<AppState>) -> Json<Value> {
1981 Json(
1982 serde_json::from_str(&frontier_index_json(&state.project_infos).unwrap_or_default())
1983 .unwrap_or_else(|_| json!({"frontier_count": 0, "frontiers": []})),
1984 )
1985}
1986
1987async fn http_pubmed(
1988 State(state): State<AppState>,
1989 axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1990) -> Json<Value> {
1991 let args = json!({"query": params.get("query").cloned().unwrap_or_default()});
1992 match tool_check_pubmed(&args, &state.client).await {
1993 Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1994 Err(error) => Json(json!({"error": error})),
1995 }
1996}
1997
1998async fn http_tools_list() -> Json<Value> {
1999 Json(tool_registry::mcp_tools_json())
2000}
2001
2002async fn http_tool_call(
2003 State(state): State<AppState>,
2004 Json(body): Json<Value>,
2005) -> (StatusCode, Json<Value>) {
2006 let name = body["name"].as_str().unwrap_or_default();
2007 let args = &body["arguments"];
2008 let started = std::time::Instant::now();
2009 let (result, snapshot) = execute_tool(
2010 name,
2011 args,
2012 &state.project,
2013 &state.client,
2014 &state.project_infos,
2015 state.source_path.as_deref(),
2016 )
2017 .await;
2018 match result {
2019 Ok(text) => {
2020 let output = ToolResult::from_text(
2021 name,
2022 text,
2023 started.elapsed().as_millis(),
2024 false,
2025 snapshot.as_ref(),
2026 );
2027 (
2028 StatusCode::OK,
2029 Json(json!({
2030 "result": output.markdown,
2031 "tool": output.tool,
2032 "ok": output.ok,
2033 "data": output.data,
2034 "markdown": output.markdown,
2035 "signals": output.signals,
2036 "caveats": output.caveats,
2037 "duration_ms": output.duration_ms,
2038 "metadata": output.metadata(),
2039 })),
2040 )
2041 }
2042 Err(error) => {
2043 let output = ToolResult::from_text(
2044 name,
2045 error,
2046 started.elapsed().as_millis(),
2047 true,
2048 snapshot.as_ref(),
2049 );
2050 (
2051 StatusCode::INTERNAL_SERVER_ERROR,
2052 Json(json!({
2053 "error": output.markdown,
2054 "tool": output.tool,
2055 "ok": output.ok,
2056 "data": output.data,
2057 "markdown": output.markdown,
2058 "signals": output.signals,
2059 "caveats": output.caveats,
2060 "duration_ms": output.duration_ms,
2061 "metadata": output.metadata(),
2062 })),
2063 )
2064 }
2065 }
2066}
2067
2068fn tool_search_findings(args: &Value, frontier: &Project) -> Result<String, String> {
2069 let query = args["query"].as_str().map(str::to_lowercase);
2070 let entity = args["entity"].as_str().map(str::to_lowercase);
2071 let entity_type = args["entity_type"].as_str().map(str::to_lowercase);
2072 let assertion_type = args["assertion_type"].as_str().map(str::to_lowercase);
2073 let limit = args["limit"].as_u64().unwrap_or(20) as usize;
2074 let results = frontier
2075 .findings
2076 .iter()
2077 .filter(|finding| {
2078 query.as_ref().is_none_or(|q| {
2079 finding.assertion.text.to_lowercase().contains(q)
2080 || finding.conditions.text.to_lowercase().contains(q)
2081 || finding
2082 .assertion
2083 .entities
2084 .iter()
2085 .any(|e| e.name.to_lowercase().contains(q))
2086 }) && entity.as_ref().is_none_or(|needle| {
2087 finding
2088 .assertion
2089 .entities
2090 .iter()
2091 .any(|e| e.name.to_lowercase().contains(needle))
2092 }) && entity_type.as_ref().is_none_or(|needle| {
2093 finding
2094 .assertion
2095 .entities
2096 .iter()
2097 .any(|e| e.entity_type.to_lowercase() == *needle)
2098 }) && assertion_type
2099 .as_ref()
2100 .is_none_or(|needle| finding.assertion.assertion_type.to_lowercase() == *needle)
2101 })
2102 .take(limit)
2103 .collect::<Vec<_>>();
2104
2105 if results.is_empty() {
2106 return Ok("No findings matched the search criteria.".to_string());
2107 }
2108 let mut out = format!("{} findings matched:\n\n", results.len());
2109 for finding in results {
2110 let entities = finding
2111 .assertion
2112 .entities
2113 .iter()
2114 .map(|e| format!("{} ({})", e.name, e.entity_type))
2115 .collect::<Vec<_>>();
2116 out.push_str(&format!(
2117 "**{}** [conf: {}, type: {}]\n{}\nEntities: {}\nReplicated: {} | Gap: {} | Contested: {}\nSource: {} ({})\n\n",
2118 finding.id,
2119 finding.confidence.score,
2120 finding.assertion.assertion_type,
2121 finding.assertion.text,
2122 entities.join(", "),
2123 finding.evidence.replicated,
2124 finding.flags.gap,
2125 finding.flags.contested,
2126 finding.provenance.title,
2127 finding.provenance.year.map(|y| y.to_string()).unwrap_or_else(|| "?".to_string()),
2128 ));
2129 }
2130 Ok(out)
2131}
2132
2133fn tool_get_finding(args: &Value, frontier: &Project) -> Result<String, String> {
2134 let id = args["id"].as_str().ok_or("Missing 'id' argument")?;
2135 let finding = frontier
2136 .findings
2137 .iter()
2138 .find(|finding| finding.id == id || finding.id.starts_with(id))
2139 .ok_or_else(|| format!("Finding '{id}' not found"))?;
2140 let mut context = state::finding_context(frontier, &finding.id)?;
2141 if let Value::Object(map) = &mut context {
2142 map.insert(
2143 "caveats".to_string(),
2144 json!([
2145 "Finding-local events are canonical state transitions; review_events are projection artifacts.",
2146 "Sources identify artifacts; evidence atoms identify source-grounded units that bear on the finding."
2147 ]),
2148 );
2149 }
2150 serde_json::to_string_pretty(&context).map_err(|e| format!("Serialization error: {e}"))
2151}
2152
2153fn tool_get_finding_history(args: &Value, frontier: &Project) -> Result<String, String> {
2158 let id = args["id"].as_str().ok_or("Missing 'id' argument")?;
2159 let mut events: Vec<&crate::events::StateEvent> = frontier
2160 .events
2161 .iter()
2162 .filter(|e| {
2163 e.target.r#type == "finding" && (e.target.id == id || e.target.id.starts_with(id))
2164 })
2165 .collect();
2166 events.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
2167 let payload = json!({
2168 "finding_id": id,
2169 "event_count": events.len(),
2170 "events": events,
2171 "caveats": [
2172 "Events are the canonical state-transition log; events without a 'finding' target are excluded.",
2173 "Use payload.new_finding_id on finding.superseded events to walk forward in the supersedes chain."
2174 ],
2175 });
2176 serde_json::to_string_pretty(&payload).map_err(|e| format!("Serialization error: {e}"))
2177}
2178
2179fn tool_list_gaps(frontier: &Project) -> Result<String, String> {
2180 let gaps = frontier
2181 .findings
2182 .iter()
2183 .filter(|finding| finding.flags.gap)
2184 .collect::<Vec<_>>();
2185 if gaps.is_empty() {
2186 return Ok("No gap-flagged findings in this frontier.".to_string());
2187 }
2188 let mut out = format!(
2189 "{} candidate gap review leads:\nTreat these as navigation signals, not confirmed experiment targets.\n\n",
2190 gaps.len()
2191 );
2192 for finding in gaps {
2193 out.push_str(&format!(
2194 "**{}** [conf: {}]\n{}\nConditions: {}\n\n",
2195 finding.id, finding.confidence.score, finding.assertion.text, finding.conditions.text
2196 ));
2197 }
2198 Ok(out)
2199}
2200
2201fn tool_list_contradictions(frontier: &Project) -> Result<String, String> {
2202 let lookup = frontier
2203 .findings
2204 .iter()
2205 .map(|finding| (finding.id.as_str(), finding))
2206 .collect::<HashMap<_, _>>();
2207 let mut contradictions = Vec::new();
2208 for finding in &frontier.findings {
2209 for link in &finding.links {
2210 if matches!(link.link_type.as_str(), "contradicts" | "disputes") {
2211 let target = lookup
2212 .get(link.target.as_str())
2213 .map(|f| f.assertion.text.as_str())
2214 .unwrap_or("(unknown target)");
2215 contradictions.push(format!(
2216 "**{}** {} **{}**\n {} --[{}]--> {}\n Note: {}\n",
2217 finding.id,
2218 link.link_type,
2219 link.target,
2220 trunc(&finding.assertion.text, 80),
2221 link.link_type,
2222 trunc(target, 80),
2223 link.note,
2224 ));
2225 }
2226 }
2227 }
2228 if contradictions.is_empty() {
2229 return Ok("No candidate contradiction links in this frontier.".to_string());
2230 }
2231 Ok(format!(
2232 "{} candidate contradiction links:\n\n{}",
2233 contradictions.len(),
2234 contradictions.join("\n")
2235 ))
2236}
2237
2238fn tool_frontier_stats(frontier: &Project) -> Result<String, String> {
2239 serde_json::to_string_pretty(&json!({
2240 "frontier": {
2241 "name": frontier.project.name,
2242 "description": frontier.project.description,
2243 "compiled_at": frontier.project.compiled_at,
2244 "compiler": frontier.project.compiler,
2245 "papers_processed": frontier.project.papers_processed,
2246 "errors": frontier.project.errors,
2247 },
2248 "stats": frontier.stats,
2249 "source_registry": sources::source_summary(frontier),
2250 "evidence_atoms": sources::evidence_summary(frontier),
2251 "conditions": sources::condition_summary(frontier),
2252 "proposals": crate::proposals::summary(frontier),
2253 "proof_state": frontier.proof_state,
2254 "events": {
2255 "count": frontier.events.len(),
2256 "summary": events::summarize(frontier),
2257 "replay": events::replay_report(frontier),
2258 },
2259 "signals": signals::analyze(frontier, &[]).signals,
2260 }))
2261 .map_err(|e| format!("Serialization error: {e}"))
2262}
2263
2264fn tool_find_bridges(args: &Value, frontier: &Project) -> Result<String, String> {
2265 let min_categories = args["min_categories"].as_u64().unwrap_or(2) as usize;
2266 let limit = args["limit"].as_u64().unwrap_or(15) as usize;
2267 let mut entity_categories = HashMap::<String, HashSet<String>>::new();
2268 let mut entity_counts = HashMap::<String, usize>::new();
2269 for finding in &frontier.findings {
2270 for entity in &finding.assertion.entities {
2271 let key = entity.name.to_lowercase();
2272 entity_categories
2273 .entry(key.clone())
2274 .or_default()
2275 .insert(finding.assertion.assertion_type.clone());
2276 *entity_counts.entry(key).or_default() += 1;
2277 }
2278 }
2279 let mut bridges = entity_categories
2280 .iter()
2281 .filter(|(name, categories)| {
2282 categories.len() >= min_categories && !bridge::is_obvious(name)
2283 })
2284 .map(|(name, categories)| {
2285 json!({
2286 "entity": name,
2287 "categories": categories.iter().cloned().collect::<Vec<_>>(),
2288 "category_count": categories.len(),
2289 "finding_count": entity_counts.get(name).copied().unwrap_or(0),
2290 })
2291 })
2292 .collect::<Vec<_>>();
2293 bridges.sort_by(|a, b| {
2294 b["category_count"]
2295 .as_u64()
2296 .unwrap_or(0)
2297 .cmp(&a["category_count"].as_u64().unwrap_or(0))
2298 });
2299 bridges.truncate(limit);
2300 serde_json::to_string_pretty(&json!({"count": bridges.len(), "bridges": bridges}))
2301 .map_err(|e| format!("Serialization error: {e}"))
2302}
2303
2304fn tool_propagate_retraction(args: &Value, frontier: &Project) -> Result<String, String> {
2305 let id = args["finding_id"]
2306 .as_str()
2307 .ok_or("Missing 'finding_id' argument")?;
2308 let target = frontier
2309 .findings
2310 .iter()
2311 .find(|finding| finding.id == id || finding.id.starts_with(id))
2312 .ok_or_else(|| format!("Finding '{id}' not found"))?;
2313
2314 let reverse_idx = frontier.build_reverse_dep_index();
2321 let dependent_ids = reverse_idx.dependents_of(&target.id);
2322 let id_to_finding: std::collections::HashMap<&str, &crate::bundle::FindingBundle> = frontier
2323 .findings
2324 .iter()
2325 .map(|f| (f.id.as_str(), f))
2326 .collect();
2327
2328 let mut affected = Vec::new();
2329 for dep_id in dependent_ids {
2330 let Some(dependent) = id_to_finding.get(dep_id.as_str()) else {
2331 continue;
2332 };
2333 for link in &dependent.links {
2334 if matches!(link.link_type.as_str(), "supports" | "depends") && link.target == target.id
2335 {
2336 affected.push(json!({
2337 "id": dependent.id,
2338 "assertion": trunc(&dependent.assertion.text, 100),
2339 "link_type": link.link_type,
2340 }));
2341 }
2342 }
2343 }
2344 serde_json::to_string_pretty(&json!({
2345 "retracted": {"id": target.id, "assertion": trunc(&target.assertion.text, 120)},
2346 "directly_affected": affected.len(),
2347 "affected_findings": affected,
2348 "caveat": "Retraction impact is simulated over declared dependency links.",
2349 }))
2350 .map_err(|e| format!("Serialization error: {e}"))
2351}
2352
2353fn tool_apply_observer(args: &Value, frontier: &Project) -> Result<String, String> {
2354 let policy_name = args["policy"].as_str().ok_or("Missing 'policy' argument")?;
2355 let limit = args["limit"].as_u64().unwrap_or(15) as usize;
2356 let policy = observer::policy_by_name(policy_name).unwrap_or_else(observer::academic);
2357 let view = observer::observe(&frontier.findings, &frontier.replications, &policy);
2358 let top = view
2359 .findings
2360 .iter()
2361 .take(limit)
2362 .map(|scored| {
2363 let finding = frontier
2364 .findings
2365 .iter()
2366 .find(|finding| finding.id == scored.finding_id);
2367 json!({
2368 "id": scored.finding_id,
2369 "original_confidence": scored.original_confidence,
2370 "observer_score": scored.observer_score,
2371 "rank": scored.rank,
2372 "assertion": finding.map(|f| trunc(&f.assertion.text, 100)).unwrap_or_default(),
2373 })
2374 })
2375 .collect::<Vec<_>>();
2376 serde_json::to_string_pretty(&json!({
2377 "policy": policy_name,
2378 "shown": top.len(),
2379 "hidden": view.hidden,
2380 "top_findings": top,
2381 "caveat": "Observer output is policy-weighted reranking, not definitive disagreement.",
2382 }))
2383 .map_err(|e| format!("Serialization error: {e}"))
2384}
2385
2386async fn tool_check_pubmed(args: &Value, client: &Client) -> Result<String, String> {
2387 let query = args["query"].as_str().ok_or("Missing 'query' argument")?;
2388 let count = bridge::check_novelty(client, query).await?;
2389 serde_json::to_string_pretty(&json!({
2390 "query": query,
2391 "pubmed_results": count,
2392 "rough_prior_art_clear": count == 0,
2393 "caveat": "PubMed counts are rough prior-art signals, not proof of novelty.",
2394 }))
2395 .map_err(|e| format!("Serialization error: {e}"))
2396}
2397
2398fn frontier_index_json(project_infos: &[ProjectInfo]) -> Result<String, String> {
2399 let frontiers = project_infos
2400 .iter()
2401 .map(|info| {
2402 json!({
2403 "name": info.name,
2404 "file": info.file,
2405 "findings": info.findings_count,
2406 "links": info.links_count,
2407 "papers": info.papers,
2408 })
2409 })
2410 .collect::<Vec<_>>();
2411 serde_json::to_string_pretty(&json!({
2412 "frontier_count": frontiers.len(),
2413 "frontiers": frontiers,
2414 }))
2415 .map_err(|e| format!("Serialization error: {e}"))
2416}
2417
2418fn tool_trace_evidence_chain(args: &Value, frontier: &Project) -> Result<String, String> {
2419 let id = args["finding_id"]
2420 .as_str()
2421 .ok_or("Missing 'finding_id' argument")?;
2422 let depth = args["depth"].as_u64().unwrap_or(2) as usize;
2423 let lookup = frontier
2424 .findings
2425 .iter()
2426 .map(|finding| (finding.id.as_str(), finding))
2427 .collect::<HashMap<_, _>>();
2428 let finding = lookup
2429 .get(id)
2430 .copied()
2431 .or_else(|| {
2432 frontier
2433 .findings
2434 .iter()
2435 .find(|finding| finding.id.starts_with(id))
2436 })
2437 .ok_or_else(|| format!("Finding '{id}' not found"))?;
2438 let links = finding
2439 .links
2440 .iter()
2441 .take(depth.saturating_mul(10).max(10))
2442 .map(|link| {
2443 let target = lookup.get(link.target.as_str());
2444 json!({
2445 "target": link.target,
2446 "type": link.link_type,
2447 "note": link.note,
2448 "target_assertion": target.map(|f| trunc(&f.assertion.text, 120)),
2449 })
2450 })
2451 .collect::<Vec<_>>();
2452 let evidence_span_count = finding.evidence.evidence_spans.len();
2453 let source_ref = finding
2454 .provenance
2455 .doi
2456 .as_deref()
2457 .or(finding.provenance.pmid.as_deref())
2458 .unwrap_or(&finding.provenance.title);
2459 let review_state = finding
2460 .provenance
2461 .review
2462 .as_ref()
2463 .map(|review| {
2464 if review.reviewed {
2465 "reviewed"
2466 } else {
2467 "pending_review"
2468 }
2469 })
2470 .unwrap_or("pending_review");
2471 let finding_events = events::events_for_finding(frontier, &finding.id);
2472 let linked_sources = sources::sources_for_finding(frontier, &finding.id);
2473 let linked_atoms = sources::evidence_atoms_for_finding(frontier, &finding.id);
2474 let linked_conditions = sources::condition_records_for_finding(frontier, &finding.id);
2475 let linked_proposals = crate::proposals::proposals_for_finding(frontier, &finding.id);
2476 serde_json::to_string_pretty(&json!({
2477 "finding": {"id": finding.id, "assertion": finding.assertion.text},
2478 "sources": linked_sources,
2479 "evidence_atoms": linked_atoms,
2480 "condition_records": linked_conditions,
2481 "proposals": linked_proposals,
2482 "source_to_state": [
2483 {"step": "source", "value": linked_sources, "fallback": source_ref},
2484 {"step": "evidence_atom", "value": linked_atoms},
2485 {"step": "condition_boundary", "value": linked_conditions},
2486 {"step": "proposal_lineage", "value": linked_proposals},
2487 {"step": "legacy_evidence", "value": {"type": finding.evidence.evidence_type, "spans": evidence_span_count, "method": finding.evidence.method}},
2488 {"step": "finding", "value": {"id": finding.id, "assertion_type": finding.assertion.assertion_type, "confidence": finding.confidence.score}},
2489 {"step": "event_history", "value": finding_events},
2490 {"step": "links", "value": {"declared": finding.links.len()}},
2491 {"step": "review_state", "value": review_state}
2492 ],
2493 "state_events": finding_events,
2494 "path_explanation": format!(
2495 "source -> evidence spans ({}) -> finding {} -> {} declared links -> {}",
2496 evidence_span_count,
2497 finding.id,
2498 finding.links.len(),
2499 review_state
2500 ),
2501 "depth": depth,
2502 "links": links,
2503 "caveat": "Evidence-chain strength is heuristic and depends on declared links.",
2504 }))
2505 .map_err(|e| format!("Serialization error: {e}"))
2506}
2507
2508fn clone_project(project: &Project) -> Project {
2509 serde_json::from_value(serde_json::to_value(project).unwrap_or_default()).unwrap_or_else(|_| {
2510 project::assemble("unavailable", Vec::new(), 0, 1, "failed to clone frontier")
2511 })
2512}
2513
2514fn json_rpc_result(id: &Option<Value>, result: Value) -> Value {
2515 json!({"jsonrpc": "2.0", "id": id, "result": result})
2516}
2517
2518fn json_rpc_error(id: &Option<Value>, code: i32, message: &str) -> Value {
2519 json!({"jsonrpc": "2.0", "id": id, "error": {"code": code, "message": message}})
2520}
2521
2522fn trunc(s: &str, max: usize) -> String {
2523 if s.len() <= max {
2524 return s.to_string();
2525 }
2526 let mut end = max;
2527 while end > 0 && !s.is_char_boundary(end) {
2528 end -= 1;
2529 }
2530 format!("{}...", &s[..end])
2531}