Skip to main content

vela_protocol/
serve.rs

1//! Read-only MCP/HTTP frontier server.
2
3#![allow(clippy::too_many_lines)]
4
5use std::collections::{HashMap, HashSet};
6use std::io::{self, BufRead, Write};
7use std::path::{Path, PathBuf};
8use std::sync::Arc;
9
10use axum::{
11    Json, Router,
12    extract::State,
13    http::{HeaderMap, StatusCode},
14    routing::{get, post},
15};
16use reqwest::Client;
17use serde::Serialize;
18use serde_json::{Value, json};
19use tokio::sync::Mutex;
20use tower_http::cors::CorsLayer;
21
22use crate::bundle::FindingBundle;
23use crate::project::{self, ConfidenceDistribution, Project, ProjectStats};
24use crate::{bridge, decision, events, observer, repo, signals, sources, state, tool_registry};
25
26pub enum ProjectSource {
27    Single(PathBuf),
28    Directory(PathBuf),
29}
30
31impl ProjectSource {
32    pub fn from_args(single: Option<&Path>, dir: Option<&Path>) -> Self {
33        if let Some(d) = dir {
34            Self::Directory(d.to_path_buf())
35        } else if let Some(s) = single {
36            Self::Single(s.to_path_buf())
37        } else {
38            eprintln!(
39                "{} provide either a frontier file or --frontiers <dir>",
40                crate::cli_style::err_prefix()
41            );
42            std::process::exit(1);
43        }
44    }
45}
46
47#[derive(Clone)]
48pub struct ProjectInfo {
49    pub name: String,
50    pub file: String,
51    pub findings_count: usize,
52    pub links_count: usize,
53    pub papers: usize,
54}
55
56pub fn load_projects(source: &ProjectSource) -> (Project, Vec<ProjectInfo>) {
57    match source {
58        ProjectSource::Single(path) => {
59            let mut frontier = repo::load_from_path(path).unwrap_or_else(|e| {
60                eprintln!(
61                    "{} failed to load frontier: {e}",
62                    crate::cli_style::err_prefix()
63                );
64                std::process::exit(1);
65            });
66            sources::materialize_project(&mut frontier);
67            let info = ProjectInfo {
68                name: frontier.project.name.clone(),
69                file: path.display().to_string(),
70                findings_count: frontier.findings.len(),
71                links_count: frontier.stats.links,
72                papers: frontier.project.papers_processed,
73            };
74            (frontier, vec![info])
75        }
76        ProjectSource::Directory(dir) => {
77            let mut entries: Vec<PathBuf> = std::fs::read_dir(dir)
78                .unwrap_or_else(|e| {
79                    eprintln!(
80                        "{} failed to read directory: {e}",
81                        crate::cli_style::err_prefix()
82                    );
83                    std::process::exit(1);
84                })
85                .filter_map(Result::ok)
86                .map(|entry| entry.path())
87                .filter(|path| {
88                    (path.is_dir() && path.join(".vela").exists())
89                        || path.extension().is_some_and(|ext| ext == "json")
90                })
91                .collect();
92            entries.sort();
93            if entries.is_empty() {
94                eprintln!("no frontier files found in {}", dir.display());
95                std::process::exit(1);
96            }
97
98            let mut named = Vec::new();
99            for path in &entries {
100                let mut frontier = repo::load_from_path(path).unwrap_or_else(|e| {
101                    eprintln!(
102                        "{} failed to load {}: {e}",
103                        crate::cli_style::err_prefix(),
104                        path.display()
105                    );
106                    std::process::exit(1);
107                });
108                sources::materialize_project(&mut frontier);
109                let name = path
110                    .file_stem()
111                    .unwrap_or_default()
112                    .to_string_lossy()
113                    .to_string();
114                named.push((name, frontier));
115            }
116            let infos = named
117                .iter()
118                .map(|(name, frontier)| ProjectInfo {
119                    name: frontier.project.name.clone(),
120                    file: name.clone(),
121                    findings_count: frontier.findings.len(),
122                    links_count: frontier.stats.links,
123                    papers: frontier.project.papers_processed,
124                })
125                .collect::<Vec<_>>();
126            (merge_projects(named), infos)
127        }
128    }
129}
130
131fn merge_projects(frontiers: Vec<(String, Project)>) -> Project {
132    let mut findings = Vec::<FindingBundle>::new();
133    let mut categories = HashMap::<String, usize>::new();
134    let mut link_types = HashMap::<String, usize>::new();
135    let mut names = Vec::new();
136    let mut papers_processed = 0usize;
137    let mut errors = 0usize;
138    // v0.36.2: preserve v0.32+ kernel objects across the merge.
139    // Pre-v0.36.2, `replications`, `datasets`, `code_artifacts`,
140    // `artifacts`, `predictions`, and `resolutions` were dropped during merge,
141    // leaving the merged stats reading the legacy `evidence.replicated`
142    // scalar instead of the structured collection.
143    let mut replications = Vec::new();
144    let mut datasets = Vec::new();
145    let mut code_artifacts = Vec::new();
146    let mut artifacts = Vec::new();
147    let mut predictions = Vec::new();
148    let mut resolutions = Vec::new();
149
150    for (name, frontier) in frontiers {
151        names.push(name);
152        papers_processed += frontier.project.papers_processed;
153        errors += frontier.project.errors;
154        for (category, count) in frontier.stats.categories {
155            *categories.entry(category).or_default() += count;
156        }
157        for (link_type, count) in frontier.stats.link_types {
158            *link_types.entry(link_type).or_default() += count;
159        }
160        findings.extend(frontier.findings);
161        replications.extend(frontier.replications);
162        datasets.extend(frontier.datasets);
163        code_artifacts.extend(frontier.code_artifacts);
164        artifacts.extend(frontier.artifacts);
165        predictions.extend(frontier.predictions);
166        resolutions.extend(frontier.resolutions);
167    }
168
169    let mut deduped = Vec::<FindingBundle>::new();
170    let mut seen = HashMap::<String, usize>::new();
171    for finding in findings {
172        if let Some(existing) = seen.get(&finding.id).copied() {
173            if finding.confidence.score > deduped[existing].confidence.score {
174                deduped[existing] = finding;
175            }
176        } else {
177            seen.insert(finding.id.clone(), deduped.len());
178            deduped.push(finding);
179        }
180    }
181
182    let links = deduped.iter().map(|finding| finding.links.len()).sum();
183    // v0.36.2: count from the merged `replications` collection, with
184    // legacy `evidence.replicated` as fall-through for findings without
185    // structured records.
186    let mut targets_with_success: HashSet<&str> = HashSet::new();
187    let mut targets_with_any_record: HashSet<&str> = HashSet::new();
188    for r in &replications {
189        targets_with_any_record.insert(r.target_finding.as_str());
190        if r.outcome == "replicated" {
191            targets_with_success.insert(r.target_finding.as_str());
192        }
193    }
194    let replicated = deduped
195        .iter()
196        .filter(|finding| {
197            if targets_with_any_record.contains(finding.id.as_str()) {
198                targets_with_success.contains(finding.id.as_str())
199            } else {
200                finding.evidence.replicated
201            }
202        })
203        .count();
204    let avg_confidence = if deduped.is_empty() {
205        0.0
206    } else {
207        (deduped
208            .iter()
209            .map(|finding| finding.confidence.score)
210            .sum::<f64>()
211            / deduped.len() as f64
212            * 1000.0)
213            .round()
214            / 1000.0
215    };
216    let stats = ProjectStats {
217        findings: deduped.len(),
218        links,
219        replicated,
220        unreplicated: deduped.len().saturating_sub(replicated),
221        avg_confidence,
222        gaps: deduped.iter().filter(|finding| finding.flags.gap).count(),
223        negative_space: deduped
224            .iter()
225            .filter(|finding| finding.flags.negative_space)
226            .count(),
227        contested: deduped
228            .iter()
229            .filter(|finding| finding.flags.contested)
230            .count(),
231        categories,
232        link_types,
233        human_reviewed: deduped
234            .iter()
235            .filter(|finding| {
236                finding
237                    .provenance
238                    .review
239                    .as_ref()
240                    .is_some_and(|review| review.reviewed)
241            })
242            .count(),
243        review_event_count: 0,
244        confidence_update_count: 0,
245        event_count: 0,
246        source_count: 0,
247        evidence_atom_count: 0,
248        condition_record_count: 0,
249        proposal_count: 0,
250        confidence_distribution: ConfidenceDistribution {
251            high_gt_80: deduped
252                .iter()
253                .filter(|finding| finding.confidence.score > 0.8)
254                .count(),
255            medium_60_80: deduped
256                .iter()
257                .filter(|finding| (0.6..=0.8).contains(&finding.confidence.score))
258                .count(),
259            low_lt_60: deduped
260                .iter()
261                .filter(|finding| finding.confidence.score < 0.6)
262                .count(),
263        },
264    };
265
266    let mut project = Project {
267        vela_version: project::VELA_SCHEMA_VERSION.to_string(),
268        schema: project::VELA_SCHEMA_URL.to_string(),
269        frontier_id: None,
270        project: project::ProjectMeta {
271            name: format!("merged: {}", names.join(", ")),
272            description: format!("Merged from {} frontiers", names.len()),
273            compiled_at: chrono::Utc::now().to_rfc3339(),
274            compiler: project::VELA_COMPILER_VERSION.to_string(),
275            papers_processed,
276            errors,
277            dependencies: Vec::new(),
278        },
279        stats,
280        findings: deduped,
281        sources: Vec::new(),
282        evidence_atoms: Vec::new(),
283        condition_records: Vec::new(),
284        review_events: Vec::new(),
285        confidence_updates: Vec::new(),
286        events: Vec::new(),
287        proposals: Vec::new(),
288        proof_state: Default::default(),
289        signatures: Vec::new(),
290        actors: Vec::new(),
291        replications,
292        datasets,
293        code_artifacts,
294        artifacts,
295        predictions,
296        resolutions,
297        peers: Vec::new(),
298        negative_results: Vec::new(),
299        trajectories: Vec::new(),
300    };
301    sources::materialize_project(&mut project);
302    project
303}
304
305pub async fn run(source: ProjectSource, _backend: Option<&str>) {
306    dotenvy::dotenv().ok();
307    let (frontier, project_infos) = load_projects(&source);
308    let source_path: Option<PathBuf> = match &source {
309        ProjectSource::Single(path) => Some(path.clone()),
310        ProjectSource::Directory(_) => None,
311    };
312    let frontier = Arc::new(Mutex::new(frontier));
313    let client = Client::new();
314    let stdin = io::stdin();
315    let stdout = io::stdout();
316
317    for line in stdin.lock().lines() {
318        let Ok(line) = line else {
319            break;
320        };
321        if line.trim().is_empty() {
322            continue;
323        }
324        let Ok(request) = serde_json::from_str::<Value>(&line) else {
325            continue;
326        };
327        let id = request.get("id").cloned();
328        let method = request["method"].as_str().unwrap_or_default();
329        let response = match method {
330            "initialize" => json_rpc_result(
331                &id,
332                json!({
333                    "protocolVersion": "2024-11-05",
334                    "capabilities": {"tools": {}},
335                    "serverInfo": {"name": "vela", "version": project::VELA_SCHEMA_VERSION}
336                }),
337            ),
338            "notifications/initialized" => continue,
339            "tools/list" => json_rpc_result(&id, json!({"tools": tool_registry::mcp_tools_json()})),
340            "tools/call" => {
341                let name = request["params"]["name"].as_str().unwrap_or_default();
342                let args = request["params"]["arguments"].clone();
343                handle_tool_call(
344                    &id,
345                    name,
346                    &args,
347                    &frontier,
348                    &client,
349                    &project_infos,
350                    source_path.as_deref(),
351                )
352                .await
353            }
354            "ping" => json_rpc_result(&id, json!({})),
355            _ => json_rpc_error(&id, -32601, "Method not found"),
356        };
357        let mut out = stdout.lock();
358        let _ = serde_json::to_writer(&mut out, &response);
359        let _ = out.write_all(b"\n");
360        let _ = out.flush();
361    }
362}
363
364pub async fn run_http(source: ProjectSource, backend: Option<&str>, port: u16, workbench: bool) {
365    let _ = backend;
366    dotenvy::dotenv().ok();
367    let (frontier, project_infos) = load_projects(&source);
368    let source_path = match &source {
369        ProjectSource::Single(path) => Some(path.clone()),
370        ProjectSource::Directory(_) => None,
371    };
372    let state = AppState {
373        project: Arc::new(Mutex::new(frontier)),
374        project_infos,
375        client: Client::new(),
376        source_path,
377    };
378
379    let mut app = Router::new()
380        .route("/health", get(http_health))
381        .route("/healthz", get(http_health))
382        .route("/api/frontier", get(http_frontier))
383        .route("/api/findings", get(http_findings))
384        .route("/api/findings/{id}", get(http_finding_by_id))
385        .route("/api/contradictions", get(http_contradictions))
386        // v0.97: HTTP mirror of `vela discord` CLI. Frontier-wide
387        // discord report computed read-only from the live event log.
388        // Optional ?kind=<DiscordKind> filter.
389        .route("/api/discord", get(http_discord))
390        .route("/api/tensions", get(http_tensions))
391        .route("/api/gaps", get(http_gaps))
392        .route("/api/artifacts", get(http_artifacts))
393        .route("/api/artifact-audit", get(http_artifact_audit))
394        .route("/api/decision-brief", get(http_decision_brief))
395        .route("/api/trials", get(http_trials))
396        .route("/api/source-verification", get(http_source_verification))
397        .route("/api/source-ingest-plan", get(http_source_ingest_plan))
398        .route("/api/observer/{policy}", get(http_observer))
399        .route("/api/propagate/{id}", get(http_propagate))
400        .route("/api/hypotheses", get(http_bridges))
401        .route("/api/stats", get(http_stats))
402        .route("/api/frontiers", get(http_frontiers))
403        .route("/api/pubmed", get(http_pubmed))
404        // Phase Q-r (v0.5): cursor-paginated event-log read for agent
405        // loops and public consumers. The canonical event log is
406        // already ordered and content-addressed, so the cursor is just
407        // the last seen `vev_…`.
408        .route("/api/events", get(http_events))
409        // Phase R (v0.5): Workbench draft queue. Browser POSTs unsigned
410        // intents here; `vela queue sign` is the only path that turns
411        // them into signed canonical state. The Ed25519 key never
412        // enters the browser.
413        .route("/api/queue", post(http_queue_append))
414        // v0.92: agent write target. POST a Carina ArtifactPacket
415        // JSON; substrate validates, writes proposals to disk,
416        // returns the new vpr_* ids. The single integration
417        // surface for AI agents that produce structured
418        // scientific output.
419        .route("/api/proposals/from-carina", post(http_from_carina))
420        .route("/api/tools", get(http_tools_list))
421        .route("/mcp/tools", get(http_tools_list))
422        .route("/api/tool", post(http_tool_call));
423
424    // When --workbench, also serve the static `web/` directory at /
425    // alongside the API. The canonical Workbench UI now lives in the
426    // Astro site (vela-site.fly.dev/workbench) and proxies /api/* here;
427    // --workbench remains for local development against any web/ tree.
428    if workbench {
429        let web_dir = workbench_web_dir();
430        if web_dir.exists() {
431            app = app.fallback_service(tower_http::services::ServeDir::new(web_dir));
432        } else {
433            eprintln!(
434                "{} --workbench: web/ directory not found at expected location; serving API only",
435                crate::cli_style::err_prefix()
436            );
437        }
438    }
439
440    // v0.107.5: explicit request-body cap. Closes the integrity
441    // half of THREAT_MODEL.md A13 (resource exhaustion via large
442    // packets). axum's default body limit is 2MB; we raise to 8MB
443    // so a real Carina packet with several artifacts fits, then
444    // pin the limit explicitly so a future axum default change
445    // does not silently expose the surface. Localhost-only deploys
446    // are bounded by this limit; remote deploys behind a reverse
447    // proxy should layer rate limiting on top (the substrate does
448    // not enforce per-actor or per-IP request budgets).
449    let app = app
450        .layer(axum::extract::DefaultBodyLimit::max(8 * 1024 * 1024))
451        .layer(CorsLayer::permissive())
452        .with_state(state);
453
454    let addr = format!("0.0.0.0:{port}");
455    eprintln!(
456        "  {}",
457        if workbench {
458            format!("VELA · WORKBENCH :{port}").to_uppercase()
459        } else {
460            format!("VELA · SERVE · HTTP :{port}").to_uppercase()
461        }
462        .as_str()
463    );
464    eprintln!("  {}", crate::cli_style::tick_row(60));
465    eprintln!("  listening on http://{addr}");
466    if workbench {
467        // v0.29: print the deep link the researcher actually opens.
468        // The deployed Astro page accepts ?api=… and bypasses the hub
469        // — same UI, local data. This was the v0.28 friction-pass
470        // forcing function (Friction #1: "researcher with a local
471        // frontier should not need to publish before reviewing in a
472        // browser"). Same banner works against `npm run dev` at
473        // localhost:4321 too.
474        eprintln!("  workbench UI: https://vela-site.fly.dev/frontiers/view?api=http://{addr}");
475        eprintln!(
476            "                (or  http://localhost:4321/frontiers/view?api=http://{addr}  for a local site)"
477        );
478    }
479    // v0.91: full endpoint enumeration so a fresh user opening
480    // `vela serve --http` knows what they can hit. Grouped by
481    // function rather than alphabetically.
482    eprintln!("  endpoints:");
483    eprintln!("    health:     GET  /health");
484    eprintln!("    state:      GET  /api/frontier      /api/frontiers     /api/stats");
485    eprintln!("    findings:   GET  /api/findings      /api/findings/{{id}}");
486    eprintln!("                     (no params -> structured list; query=... -> search)");
487    eprintln!("    events:     GET  /api/events");
488    eprintln!("    artifacts:  GET  /api/artifacts     /api/artifact-audit");
489    eprintln!("    discord:    GET  /api/contradictions /api/tensions     /api/gaps");
490    eprintln!("                     /api/hypotheses (cross-frontier bridges)");
491    eprintln!("                     /api/discord (frontier-wide discord report)");
492    eprintln!(
493        "    projections:GET  /api/decision-brief /api/trials       /api/source-verification"
494    );
495    eprintln!("                     /api/source-ingest-plan /api/observer/{{policy}}");
496    eprintln!("                     /api/propagate/{{id}}     /api/pubmed");
497    eprintln!("    queue:      POST /api/queue");
498    eprintln!("    agent:      POST /api/proposals/from-carina (Carina artifact -> proposals)");
499    eprintln!("    tools:      POST /api/tool/{{name}} (MCP-style tool dispatch)");
500    let listener = tokio::net::TcpListener::bind(&addr)
501        .await
502        .unwrap_or_else(|e| {
503            eprintln!(
504                "{} failed to bind to {addr}: {e}",
505                crate::cli_style::err_prefix()
506            );
507            std::process::exit(1);
508        });
509    axum::serve(listener, app).await.unwrap();
510}
511
512pub fn check_tools(source: ProjectSource) -> Result<Value, String> {
513    let started = std::time::Instant::now();
514    let (frontier, _project_infos) = load_projects(&source);
515    let first_id = frontier.findings.first().map(|finding| finding.id.clone());
516    let mut checks = vec![
517        check_tool_result("frontier_stats", tool_frontier_stats(&frontier), started),
518        check_tool_result(
519            "search_findings",
520            tool_search_findings(&json!({"query": "amyloid", "limit": 3}), &frontier),
521            started,
522        ),
523        check_tool_result("list_gaps", tool_list_gaps(&frontier), started),
524        check_tool_result(
525            "list_contradictions",
526            tool_list_contradictions(&frontier),
527            started,
528        ),
529        check_tool_result(
530            "find_bridges",
531            tool_find_bridges(&json!({"limit": 5, "min_categories": 2}), &frontier),
532            started,
533        ),
534        check_tool_result(
535            "apply_observer",
536            tool_apply_observer(&json!({"policy": "academic", "limit": 5}), &frontier),
537            started,
538        ),
539        check_tool_result(
540            "propagate_retraction",
541            tool_propagate_retraction(&json!({"finding_id": "vf_missing"}), &frontier),
542            started,
543        ),
544    ];
545    if let Some(id) = first_id {
546        checks.push(check_tool_result(
547            "get_finding",
548            tool_get_finding(&json!({"id": id}), &frontier),
549            started,
550        ));
551        checks.push(check_tool_result(
552            "get_finding_history",
553            tool_get_finding_history(&json!({"id": id}), &frontier),
554            started,
555        ));
556        checks.push(check_tool_result(
557            "trace_evidence_chain",
558            tool_trace_evidence_chain(&json!({"finding_id": id}), &frontier),
559            started,
560        ));
561    }
562    let failures = checks
563        .iter()
564        .filter(|check| check.get("ok").and_then(Value::as_bool) != Some(true))
565        .filter_map(|check| {
566            check
567                .get("tool")
568                .and_then(Value::as_str)
569                .map(str::to_string)
570        })
571        .collect::<Vec<_>>();
572    let checked_tools = checks
573        .iter()
574        .filter_map(|check| check.get("tool").and_then(Value::as_str))
575        .map(str::to_string)
576        .collect::<Vec<_>>();
577    let registered_tools = tool_registry::all_tools()
578        .into_iter()
579        .map(|tool| tool.name)
580        .collect::<Vec<_>>();
581
582    Ok(json!({
583        "ok": failures.is_empty(),
584        "command": "serve --check-tools",
585        "schema": "vela.tool-check.v0",
586        "frontier": {
587            "name": frontier.project.name,
588            "findings": frontier.stats.findings,
589            "links": frontier.stats.links,
590        },
591        "summary": {
592            "checks": checks.len(),
593            "passed": checks.len().saturating_sub(failures.len()),
594            "failed": failures.len(),
595        },
596        "tool_count": checked_tools.len(),
597        "tools": checked_tools,
598        "registered_tool_count": registered_tools.len(),
599        "registered_tools": registered_tools,
600        "checks": checks,
601        "failures": failures,
602    }))
603}
604
605#[derive(Clone)]
606struct AppState {
607    project: Arc<Mutex<Project>>,
608    project_infos: Vec<ProjectInfo>,
609    client: Client,
610    /// Phase Q-w (v0.5): when serving a single frontier file, this is
611    /// the path to write back to after a successful signed write. None
612    /// when `--frontiers <dir>` is used; in that mode all writes are
613    /// rejected.
614    source_path: Option<PathBuf>,
615}
616
617#[derive(Debug, Clone, Serialize)]
618struct ToolResult {
619    tool: String,
620    ok: bool,
621    data: Value,
622    markdown: String,
623    signals: Vec<signals::SignalItem>,
624    caveats: Vec<String>,
625    duration_ms: u128,
626}
627
628impl ToolResult {
629    fn from_text(
630        tool: &str,
631        text: String,
632        duration_ms: u128,
633        is_error: bool,
634        frontier: Option<&Project>,
635    ) -> Self {
636        let data = serde_json::from_str(&text).unwrap_or_else(|_| json!({"text": text}));
637        let signal_items = frontier
638            .map(|project| signals::analyze(project, &[]).signals)
639            .unwrap_or_default();
640        Self {
641            tool: tool.to_string(),
642            ok: !is_error,
643            data,
644            markdown: text,
645            signals: signal_items,
646            caveats: tool_registry::tool_caveats(tool),
647            duration_ms,
648        }
649    }
650
651    fn metadata(&self) -> Value {
652        json!({
653            "tool": self.tool,
654            "ok": self.ok,
655            "duration_ms": self.duration_ms,
656            "signals": self.signals,
657            "caveats": self.caveats,
658            "definition": tool_registry::get_tool(&self.tool),
659        })
660    }
661
662    fn to_json_text(&self) -> String {
663        serde_json::to_string_pretty(self).unwrap_or_else(|_| "{}".to_string())
664    }
665}
666
667async fn handle_tool_call(
668    id: &Option<Value>,
669    name: &str,
670    args: &Value,
671    frontier: &Arc<Mutex<Project>>,
672    client: &Client,
673    project_infos: &[ProjectInfo],
674    source_path: Option<&Path>,
675) -> Value {
676    let started = std::time::Instant::now();
677    let (result, snapshot) =
678        execute_tool(name, args, frontier, client, project_infos, source_path).await;
679    match result {
680        Ok(text) => {
681            let output = ToolResult::from_text(
682                name,
683                text,
684                started.elapsed().as_millis(),
685                false,
686                snapshot.as_ref(),
687            );
688            json_rpc_result(
689                id,
690                json!({
691                    "content": [{"type": "text", "text": output.to_json_text()}],
692                    "isError": false,
693                    "_meta": output.metadata()
694                }),
695            )
696        }
697        Err(error) => {
698            let output = ToolResult::from_text(
699                name,
700                error,
701                started.elapsed().as_millis(),
702                true,
703                snapshot.as_ref(),
704            );
705            json_rpc_result(
706                id,
707                json!({
708                    "content": [{"type": "text", "text": output.to_json_text()}],
709                    "isError": true,
710                    "_meta": output.metadata()
711                }),
712            )
713        }
714    }
715}
716
717async fn execute_tool(
718    name: &str,
719    args: &Value,
720    frontier: &Arc<Mutex<Project>>,
721    client: &Client,
722    _project_infos: &[ProjectInfo],
723    source_path: Option<&Path>,
724) -> (Result<String, String>, Option<Project>) {
725    match name {
726        "search_findings" => {
727            let project = frontier.lock().await;
728            (
729                tool_search_findings(args, &project),
730                Some(clone_project(&project)),
731            )
732        }
733        "get_finding" => {
734            let project = frontier.lock().await;
735            (
736                tool_get_finding(args, &project),
737                Some(clone_project(&project)),
738            )
739        }
740        "get_finding_history" => {
741            let project = frontier.lock().await;
742            (
743                tool_get_finding_history(args, &project),
744                Some(clone_project(&project)),
745            )
746        }
747        "list_gaps" => {
748            let project = frontier.lock().await;
749            (tool_list_gaps(&project), Some(clone_project(&project)))
750        }
751        "list_contradictions" => {
752            let project = frontier.lock().await;
753            (
754                tool_list_contradictions(&project),
755                Some(clone_project(&project)),
756            )
757        }
758        "frontier_stats" => {
759            let project = frontier.lock().await;
760            (tool_frontier_stats(&project), Some(clone_project(&project)))
761        }
762        "find_bridges" => {
763            let project = frontier.lock().await;
764            (
765                tool_find_bridges(args, &project),
766                Some(clone_project(&project)),
767            )
768        }
769        "propagate_retraction" => {
770            let project = frontier.lock().await;
771            (
772                tool_propagate_retraction(args, &project),
773                Some(clone_project(&project)),
774            )
775        }
776        "apply_observer" => {
777            let project = frontier.lock().await;
778            (
779                tool_apply_observer(args, &project),
780                Some(clone_project(&project)),
781            )
782        }
783        "trace_evidence_chain" => {
784            let project = frontier.lock().await;
785            (
786                tool_trace_evidence_chain(args, &project),
787                Some(clone_project(&project)),
788            )
789        }
790        "check_pubmed" => (tool_check_pubmed(args, client).await, None),
791        "list_events_since" => {
792            let project = frontier.lock().await;
793            (
794                tool_list_events_since(args, &project),
795                Some(clone_project(&project)),
796            )
797        }
798        // Phase Q-w (v0.5): write surface — propose-* and decision tools.
799        // Each requires a registered actor and a verifying signature
800        // over a canonical preimage. Idempotent under Phase P.
801        "propose_review" => {
802            let result = write_tool_propose(
803                args,
804                frontier,
805                source_path,
806                "finding.review",
807                |args| {
808                    let status = args
809                        .get("status")
810                        .and_then(Value::as_str)
811                        .ok_or("propose_review requires `status`")?;
812                    if !matches!(
813                        status,
814                        "accepted" | "approved" | "contested" | "needs_revision" | "rejected"
815                    ) {
816                        return Err(format!("invalid review status '{status}'"));
817                    }
818                    Ok(json!({"status": status}))
819                },
820                false,
821            )
822            .await;
823            let snapshot = Some(clone_project(&*frontier.lock().await));
824            (result, snapshot)
825        }
826        "propose_note" => {
827            let result = write_tool_propose(
828                args,
829                frontier,
830                source_path,
831                "finding.note",
832                |args| build_note_payload(args, "propose_note"),
833                false,
834            )
835            .await;
836            let snapshot = Some(clone_project(&*frontier.lock().await));
837            (result, snapshot)
838        }
839        // Phase α (v0.6): one-call propose-and-apply for `finding.note`.
840        // Requires the actor to have `tier="auto-notes"` registered; the
841        // `write_tool_propose` helper rejects with a clear error otherwise.
842        // Doctrine: tiers permit review-context kinds only; never state-
843        // changing kinds (no `propose_and_apply_review`/`_retract`/`_revise`).
844        "propose_and_apply_note" => {
845            let result = write_tool_propose(
846                args,
847                frontier,
848                source_path,
849                "finding.note",
850                |args| build_note_payload(args, "propose_and_apply_note"),
851                true,
852            )
853            .await;
854            let snapshot = Some(clone_project(&*frontier.lock().await));
855            (result, snapshot)
856        }
857        "propose_revise_confidence" => {
858            let result = write_tool_propose(
859                args,
860                frontier,
861                source_path,
862                "finding.confidence_revise",
863                |args| {
864                    let new_score = args
865                        .get("new_score")
866                        .and_then(Value::as_f64)
867                        .ok_or("propose_revise_confidence requires `new_score`")?;
868                    if !(0.0..=1.0).contains(&new_score) {
869                        return Err(format!("new_score {new_score} out of [0.0, 1.0]"));
870                    }
871                    Ok(json!({"new_score": new_score}))
872                },
873                false,
874            )
875            .await;
876            let snapshot = Some(clone_project(&*frontier.lock().await));
877            (result, snapshot)
878        }
879        "propose_retract" => {
880            let result = write_tool_propose(
881                args,
882                frontier,
883                source_path,
884                "finding.retract",
885                |_args| Ok(json!({})),
886                false,
887            )
888            .await;
889            let snapshot = Some(clone_project(&*frontier.lock().await));
890            (result, snapshot)
891        }
892        "accept_proposal" => {
893            let result = write_tool_decision(args, frontier, source_path, "accept").await;
894            let snapshot = Some(clone_project(&*frontier.lock().await));
895            (result, snapshot)
896        }
897        "reject_proposal" => {
898            let result = write_tool_decision(args, frontier, source_path, "reject").await;
899            let snapshot = Some(clone_project(&*frontier.lock().await));
900            (result, snapshot)
901        }
902        _ => (Err(format!("Unknown tool: {name}")), None),
903    }
904}
905
906/// Phase β (v0.6): build the `finding.note` proposal payload from
907/// caller args. Accepts the required `text` plus an optional structured
908/// `provenance` object whose at-least-one-identifier rule is enforced
909/// here at the API boundary, so the same validation runs whether the
910/// caller is `propose_note` or `propose_and_apply_note`.
911fn build_note_payload(args: &Value, tool_name: &str) -> Result<Value, String> {
912    let text = args
913        .get("text")
914        .and_then(Value::as_str)
915        .ok_or_else(|| format!("{tool_name} requires `text`"))?;
916    if text.trim().is_empty() {
917        return Err("text must be non-empty".to_string());
918    }
919    let mut payload = json!({"text": text});
920    if let Some(prov) = args.get("provenance") {
921        let prov_obj = prov
922            .as_object()
923            .ok_or("provenance must be a JSON object when present")?;
924        let has_id = ["doi", "pmid", "title"].iter().any(|k| {
925            prov_obj
926                .get(*k)
927                .and_then(Value::as_str)
928                .is_some_and(|s| !s.trim().is_empty())
929        });
930        if !has_id {
931            return Err("provenance must include at least one of doi/pmid/title".to_string());
932        }
933        payload["provenance"] = prov.clone();
934    }
935    Ok(payload)
936}
937
938/// Phase Q-w (v0.5) + Phase α (v0.6): shared body for the propose-* write
939/// tools. `payload_builder` extracts the kind-specific payload from `args`.
940/// `apply_if_tier_permits` (Phase α): when `true`, the function looks up the
941/// actor's `tier`, requires `sign::actor_can_auto_apply(actor, kind)` to
942/// return `true`, and applies the proposal in one canonical event;
943/// otherwise rejects with a clear error. When `false` (the v0.5 default),
944/// the proposal stays in `pending_review` regardless of tier.
945async fn write_tool_propose<F>(
946    args: &Value,
947    frontier: &Arc<Mutex<Project>>,
948    source_path: Option<&Path>,
949    kind: &str,
950    payload_builder: F,
951    apply_if_tier_permits: bool,
952) -> Result<String, String>
953where
954    F: Fn(&Value) -> Result<Value, String>,
955{
956    let path = source_path.ok_or_else(|| {
957        "Write tools require a single-file frontier (--frontier <PATH>); rejected in --frontiers <DIR> mode".to_string()
958    })?;
959    let actor_id = args
960        .get("actor_id")
961        .and_then(Value::as_str)
962        .ok_or("write tool requires `actor_id`")?;
963    let target_finding_id = args
964        .get("target_finding_id")
965        .and_then(Value::as_str)
966        .ok_or("write tool requires `target_finding_id`")?;
967    let reason = args
968        .get("reason")
969        .and_then(Value::as_str)
970        .ok_or("write tool requires `reason`")?;
971    let signature_hex = args
972        .get("signature")
973        .and_then(Value::as_str)
974        .ok_or("write tool requires `signature` (Ed25519 over canonical proposal preimage)")?;
975    let created_at = args
976        .get("created_at")
977        .and_then(Value::as_str)
978        .map(String::from)
979        .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
980    let payload = payload_builder(args)?;
981
982    // Look up the actor's registered pubkey AND tier (Phase α).
983    let (pubkey, tier_permits_apply) = {
984        let project = frontier.lock().await;
985        let actor = project
986            .actors
987            .iter()
988            .find(|actor| actor.id == actor_id)
989            .ok_or_else(|| {
990                format!(
991                    "actor '{actor_id}' is not registered in this frontier; register via `vela actor add` before writing"
992                )
993            })?;
994        let tier_permits = crate::sign::actor_can_auto_apply(actor, kind);
995        // If the caller asked to auto-apply but the actor's tier doesn't
996        // permit this kind, reject before signature verification — the
997        // capability gate is independent of signing correctness.
998        if apply_if_tier_permits && !tier_permits {
999            let tier_label = actor.tier.as_deref().unwrap_or("none");
1000            return Err(format!(
1001                "actor '{actor_id}' tier '{tier_label}' does not permit auto-apply for {kind}"
1002            ));
1003        }
1004        (actor.public_key.clone(), tier_permits)
1005    };
1006
1007    // Build the proposal exactly as the CLI would, then verify the signature
1008    // against the registered pubkey before persisting.
1009    let mut proposal = crate::proposals::new_proposal(
1010        kind,
1011        crate::events::StateTarget {
1012            r#type: "finding".to_string(),
1013            id: target_finding_id.to_string(),
1014        },
1015        actor_id,
1016        "human",
1017        reason,
1018        payload,
1019        Vec::new(),
1020        Vec::new(),
1021    );
1022    proposal.created_at = created_at;
1023    proposal.id = crate::proposals::proposal_id(&proposal);
1024
1025    let valid = crate::sign::verify_proposal_signature(&proposal, signature_hex, &pubkey)?;
1026    if !valid {
1027        return Err(format!(
1028            "Signature does not verify for actor '{actor_id}' on this proposal"
1029        ));
1030    }
1031
1032    // Persist. Phase α: apply iff caller asked AND tier permits (already
1033    // enforced above). Phase P guarantees `create_or_apply` is idempotent
1034    // either way.
1035    let apply = apply_if_tier_permits && tier_permits_apply;
1036    let result = crate::proposals::create_or_apply(path, proposal, apply)
1037        .map_err(|e| format!("create_or_apply failed: {e}"))?;
1038
1039    // Refresh the in-memory state from disk so subsequent reads see the write.
1040    let fresh =
1041        crate::repo::load_from_path(path).map_err(|e| format!("reload after write failed: {e}"))?;
1042    let mut project = frontier.lock().await;
1043    *project = fresh;
1044
1045    serde_json::to_string(&json!({
1046        "proposal_id": result.proposal_id,
1047        "finding_id": result.finding_id,
1048        "status": result.status,
1049        "applied_event_id": result.applied_event_id,
1050    }))
1051    .map_err(|e| format!("serialize write result: {e}"))
1052}
1053
1054/// Phase Q-w (v0.5): shared body for `accept_proposal` and `reject_proposal`.
1055/// The signing preimage is `{action, proposal_id, reviewer_id, reason, timestamp}`
1056/// canonicalized; the reviewer must be a registered actor.
1057async fn write_tool_decision(
1058    args: &Value,
1059    frontier: &Arc<Mutex<Project>>,
1060    source_path: Option<&Path>,
1061    action: &str,
1062) -> Result<String, String> {
1063    let path = source_path.ok_or_else(|| {
1064        "Write tools require a single-file frontier (--frontier <PATH>); rejected in --frontiers <DIR> mode".to_string()
1065    })?;
1066    let proposal_id = args
1067        .get("proposal_id")
1068        .and_then(Value::as_str)
1069        .ok_or("decision tool requires `proposal_id`")?;
1070    let reviewer_id = args
1071        .get("reviewer_id")
1072        .and_then(Value::as_str)
1073        .ok_or("decision tool requires `reviewer_id`")?;
1074    let reason = args
1075        .get("reason")
1076        .and_then(Value::as_str)
1077        .ok_or("decision tool requires `reason`")?;
1078    let signature_hex = args
1079        .get("signature")
1080        .and_then(Value::as_str)
1081        .ok_or("decision tool requires `signature`")?;
1082    let timestamp = args
1083        .get("timestamp")
1084        .and_then(Value::as_str)
1085        .map(String::from)
1086        .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
1087
1088    // Canonical preimage for the decision action.
1089    let preimage = json!({
1090        "action": action,
1091        "proposal_id": proposal_id,
1092        "reviewer_id": reviewer_id,
1093        "reason": reason,
1094        "timestamp": timestamp,
1095    });
1096    let signing_bytes = crate::canonical::to_canonical_bytes(&preimage)?;
1097
1098    // Look up the reviewer's registered pubkey.
1099    let pubkey = {
1100        let project = frontier.lock().await;
1101        project
1102            .actors
1103            .iter()
1104            .find(|actor| actor.id == reviewer_id)
1105            .map(|actor| actor.public_key.clone())
1106            .ok_or_else(|| format!("reviewer '{reviewer_id}' is not registered"))?
1107    };
1108
1109    let valid = crate::sign::verify_action_signature(&signing_bytes, signature_hex, &pubkey)?;
1110    if !valid {
1111        return Err(format!(
1112            "Signature does not verify for reviewer '{reviewer_id}' on {action} of {proposal_id}"
1113        ));
1114    }
1115
1116    let outcome = match action {
1117        "accept" => {
1118            let event_id = crate::proposals::accept_at_path(path, proposal_id, reviewer_id, reason)
1119                .map_err(|e| format!("accept failed: {e}"))?;
1120            json!({
1121                "proposal_id": proposal_id,
1122                "applied_event_id": event_id,
1123                "status": "applied",
1124            })
1125        }
1126        "reject" => {
1127            crate::proposals::reject_at_path(path, proposal_id, reviewer_id, reason)
1128                .map_err(|e| format!("reject failed: {e}"))?;
1129            json!({
1130                "proposal_id": proposal_id,
1131                "applied_event_id": Value::Null,
1132                "status": "rejected",
1133            })
1134        }
1135        other => return Err(format!("unsupported decision action '{other}'")),
1136    };
1137
1138    // Refresh in-memory state.
1139    let fresh =
1140        crate::repo::load_from_path(path).map_err(|e| format!("reload after write failed: {e}"))?;
1141    let mut project = frontier.lock().await;
1142    *project = fresh;
1143
1144    serde_json::to_string(&outcome).map_err(|e| format!("serialize decision: {e}"))
1145}
1146
1147/// Phase Q-r (v0.5): MCP-tool form of the cursor-paginated event read.
1148/// Mirrors `GET /api/events`. Same cursor semantics: events strictly
1149/// after `cursor` (a `vev_…` id), or from genesis if cursor is omitted.
1150fn tool_list_events_since(args: &Value, project: &Project) -> Result<String, String> {
1151    let cursor = args.get("cursor").and_then(Value::as_str);
1152    let limit = args
1153        .get("limit")
1154        .and_then(Value::as_u64)
1155        .map_or(100usize, |n| (n as usize).min(500));
1156    let start_idx: usize = match cursor {
1157        None => 0,
1158        Some(c) => match project.events.iter().position(|event| event.id == c) {
1159            Some(idx) => idx + 1,
1160            None => {
1161                return Err(format!(
1162                    "cursor '{c}' not found in event log; client is out of sync"
1163                ));
1164            }
1165        },
1166    };
1167    let end_idx = (start_idx + limit).min(project.events.len());
1168    let slice = &project.events[start_idx..end_idx];
1169    let next_cursor = if end_idx < project.events.len() {
1170        slice.last().map(|event| event.id.clone())
1171    } else {
1172        None
1173    };
1174    let payload = json!({
1175        "events": slice,
1176        "count": slice.len(),
1177        "next_cursor": next_cursor,
1178        "log_total": project.events.len(),
1179    });
1180    serde_json::to_string(&payload).map_err(|e| format!("serialize list_events_since: {e}"))
1181}
1182
1183fn check_tool_result(
1184    name: &str,
1185    result: Result<String, String>,
1186    started: std::time::Instant,
1187) -> Value {
1188    let output = ToolResult::from_text(
1189        name,
1190        result.unwrap_or_else(|e| e),
1191        started.elapsed().as_millis(),
1192        false,
1193        None,
1194    );
1195    let has_data = !output.data.is_null();
1196    let has_markdown = !output.markdown.trim().is_empty();
1197    let has_signals = true;
1198    let has_caveats = true;
1199    json!({
1200        "tool": name,
1201        "ok": has_data && has_markdown && has_signals && has_caveats,
1202        "data": output.data,
1203        "markdown": output.markdown,
1204        "has_data": has_data,
1205        "has_markdown": has_markdown,
1206        "has_signals": has_signals,
1207        "has_caveats": has_caveats,
1208        "signals": output.signals,
1209        "caveats": output.caveats,
1210        "duration_ms": output.duration_ms,
1211    })
1212}
1213
1214/// Phase Q-r (v0.5): cursor-paginated read over the canonical event log.
1215///
1216/// Query params:
1217///   - `since` (optional): a `vev_…` event id; events strictly after this id
1218///     are returned. Omit to start from the genesis event.
1219///   - `limit` (optional, default 100, max 500): cap the response size.
1220///
1221/// Returns `{events: [...], next_cursor: "vev_..." | null, count: usize}`.
1222/// `next_cursor` is null when the response includes the tail of the log.
1223///
1224/// 400 if `since` is provided but does not exist in the log (the client is
1225/// out of sync with the log it's reading; better to fail loudly than to
1226/// silently skip).
1227async fn http_events(
1228    State(state): State<AppState>,
1229    axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1230) -> (StatusCode, Json<Value>) {
1231    let project = state.project.lock().await;
1232    let limit = params
1233        .get("limit")
1234        .and_then(|v| v.parse::<usize>().ok())
1235        .unwrap_or(100)
1236        .min(500);
1237    let start_idx: usize = match params.get("since") {
1238        None => 0,
1239        Some(cursor) => match project.events.iter().position(|event| &event.id == cursor) {
1240            Some(idx) => idx + 1,
1241            None => {
1242                return (
1243                    StatusCode::BAD_REQUEST,
1244                    Json(json!({
1245                        "error": format!(
1246                            "cursor '{cursor}' not found in event log; client is out of sync"
1247                        ),
1248                    })),
1249                );
1250            }
1251        },
1252    };
1253    // v0.17: server-side `?kind=` and `?target=` filters. Agents watching
1254    // for specific event kinds (e.g. polling for new finding.superseded
1255    // events) shouldn't need to fetch the whole log to locate one match.
1256    // Filters apply BEFORE the limit/cursor so pagination works on the
1257    // filtered view.
1258    let kind_filter = params.get("kind").map(String::as_str);
1259    let target_filter = params.get("target").map(String::as_str);
1260    let filtered: Vec<&crate::events::StateEvent> = project
1261        .events
1262        .iter()
1263        .skip(start_idx)
1264        .filter(|e| kind_filter.is_none_or(|k| e.kind == k))
1265        .filter(|e| target_filter.is_none_or(|t| e.target.id == t))
1266        .collect();
1267    let total_filtered = filtered.len();
1268    let take_n = limit.min(total_filtered);
1269    let slice: Vec<&crate::events::StateEvent> = filtered.into_iter().take(take_n).collect();
1270    let next_cursor = if take_n < total_filtered {
1271        slice.last().map(|event| event.id.clone())
1272    } else {
1273        None
1274    };
1275    (
1276        StatusCode::OK,
1277        Json(json!({
1278            "events": slice,
1279            "count": slice.len(),
1280            "next_cursor": next_cursor,
1281            "log_total": project.events.len(),
1282            "filtered_total": total_filtered,
1283        })),
1284    )
1285}
1286
1287/// Phase R (v0.5): append a draft Workbench action to the local queue.
1288/// The browser POSTs `{kind, args}` (no signature, no actor key — the
1289/// browser is identity-blind under the v0.5 doctrine). The Workbench
1290/// host process appends to the configured queue file; `vela queue sign`
1291/// is the only path that produces a signed write.
1292///
1293/// Body:
1294///   `{"kind": "<tool_name>", "args": { ... }}`
1295///
1296/// Returns `{ok: true, queued_at: "<rfc3339>"}` on success.
1297async fn http_queue_append(
1298    State(state): State<AppState>,
1299    Json(body): Json<Value>,
1300) -> (StatusCode, Json<Value>) {
1301    let path = match &state.source_path {
1302        Some(p) => p.clone(),
1303        None => {
1304            return (
1305                StatusCode::BAD_REQUEST,
1306                Json(
1307                    json!({"error": "Workbench queue requires a single-file frontier (--frontier <PATH>)"}),
1308                ),
1309            );
1310        }
1311    };
1312    let kind = match body.get("kind").and_then(Value::as_str) {
1313        Some(k) => k.to_string(),
1314        None => {
1315            return (
1316                StatusCode::BAD_REQUEST,
1317                Json(json!({"error": "POST /api/queue requires `kind`"})),
1318            );
1319        }
1320    };
1321    let valid_kinds = [
1322        "propose_review",
1323        "propose_note",
1324        "propose_revise_confidence",
1325        "propose_retract",
1326        "accept_proposal",
1327        "reject_proposal",
1328    ];
1329    if !valid_kinds.contains(&kind.as_str()) {
1330        return (
1331            StatusCode::BAD_REQUEST,
1332            Json(json!({"error": format!("unsupported queue kind '{kind}'")})),
1333        );
1334    }
1335    let args = body.get("args").cloned().unwrap_or(Value::Null);
1336    let queued_at = chrono::Utc::now().to_rfc3339();
1337    let action = crate::queue::QueuedAction {
1338        kind,
1339        frontier: path,
1340        args,
1341        queued_at: queued_at.clone(),
1342    };
1343    let queue_path = crate::queue::default_queue_path();
1344    if let Err(error) = crate::queue::append(&queue_path, action) {
1345        return (
1346            StatusCode::INTERNAL_SERVER_ERROR,
1347            Json(json!({"error": format!("append to queue: {error}")})),
1348        );
1349    }
1350    (
1351        StatusCode::OK,
1352        Json(json!({
1353            "ok": true,
1354            "queue_file": queue_path.display().to_string(),
1355            "queued_at": queued_at,
1356            "next_step": "run `vela queue sign` to apply queued drafts",
1357        })),
1358    )
1359}
1360
1361/// v0.92: agent write target.
1362///
1363/// POST a Carina `ArtifactPacket` JSON. The substrate validates it,
1364/// imports it as proposals via `artifact_to_state::import_packet_at_path`,
1365/// reloads the in-memory project so subsequent reads see the new
1366/// proposals, and returns the new `vpr_*` ids plus the full report.
1367///
1368/// Optional query params:
1369/// - `actor`: actor id to attribute the import to (defaults to
1370///   `agent:carina-write-target`).
1371/// - `apply_artifacts`: if `true`, applies the Carina artifacts as
1372///   accepted-state events instead of pending proposals. Default `false`.
1373async fn http_from_carina(
1374    State(state): State<AppState>,
1375    axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1376    Json(body): Json<Value>,
1377) -> (StatusCode, Json<Value>) {
1378    let path = match &state.source_path {
1379        Some(p) => p.clone(),
1380        None => {
1381            return (
1382                StatusCode::BAD_REQUEST,
1383                Json(json!({
1384                    "error": "agent write target requires a single-file or single-repo frontier (`vela serve <path> --http <port>`)"
1385                })),
1386            );
1387        }
1388    };
1389    let actor = params
1390        .get("actor")
1391        .cloned()
1392        .unwrap_or_else(|| "agent:carina-write-target".to_string());
1393    let apply_artifacts = params
1394        .get("apply_artifacts")
1395        .map(|v| v == "true" || v == "1")
1396        .unwrap_or(false);
1397
1398    // Validate the body against the ArtifactPacket schema before
1399    // touching disk. Decoupling validation from filesystem write
1400    // means a malformed packet returns 400 cheaply.
1401    let packet: crate::artifact_to_state::ArtifactPacket =
1402        match serde_json::from_value(body.clone()) {
1403            Ok(p) => p,
1404            Err(e) => {
1405                return (
1406                    StatusCode::BAD_REQUEST,
1407                    Json(json!({"error": format!("packet parse: {e}")})),
1408                );
1409            }
1410        };
1411    let packet = match packet.validate() {
1412        Ok(p) => p,
1413        Err(e) => {
1414            return (
1415                StatusCode::BAD_REQUEST,
1416                Json(json!({"error": format!("packet validate: {e}")})),
1417            );
1418        }
1419    };
1420
1421    // Write the validated packet to a temp file, since the existing
1422    // `import_packet_at_path` takes a path. Future cleanup: a
1423    // direct in-memory variant that skips this hop.
1424    let tmp = match tempfile::NamedTempFile::new() {
1425        Ok(t) => t,
1426        Err(e) => {
1427            return (
1428                StatusCode::INTERNAL_SERVER_ERROR,
1429                Json(json!({"error": format!("tempfile: {e}")})),
1430            );
1431        }
1432    };
1433    let canonical = match serde_json::to_vec_pretty(&packet) {
1434        Ok(b) => b,
1435        Err(e) => {
1436            return (
1437                StatusCode::INTERNAL_SERVER_ERROR,
1438                Json(json!({"error": format!("re-serialize: {e}")})),
1439            );
1440        }
1441    };
1442    if let Err(e) = std::fs::write(tmp.path(), &canonical) {
1443        return (
1444            StatusCode::INTERNAL_SERVER_ERROR,
1445            Json(json!({"error": format!("write tempfile: {e}")})),
1446        );
1447    }
1448
1449    // Drop the project lock around the import call so the import's
1450    // own loads/writes don't deadlock against ongoing reads.
1451    drop(state.project.lock().await);
1452    let report = match crate::artifact_to_state::import_packet_at_path(
1453        &path,
1454        tmp.path(),
1455        &actor,
1456        apply_artifacts,
1457    ) {
1458        Ok(r) => r,
1459        Err(e) => {
1460            return (
1461                StatusCode::BAD_REQUEST,
1462                Json(json!({"error": format!("import: {e}")})),
1463            );
1464        }
1465    };
1466
1467    // Reload the project so later GET /api/findings, GET /api/findings/{id},
1468    // etc. see the new proposals.
1469    let mut reloaded = match crate::repo::load_from_path(&path) {
1470        Ok(p) => p,
1471        Err(e) => {
1472            return (
1473                StatusCode::INTERNAL_SERVER_ERROR,
1474                Json(json!({"error": format!("reload after import: {e}")})),
1475            );
1476        }
1477    };
1478    crate::sources::materialize_project(&mut reloaded);
1479    {
1480        let mut guard = state.project.lock().await;
1481        *guard = reloaded;
1482    }
1483
1484    (
1485        StatusCode::OK,
1486        Json(json!({
1487            "ok": true,
1488            "actor": actor,
1489            "apply_artifacts": apply_artifacts,
1490            "report": report,
1491        })),
1492    )
1493}
1494
1495/// Phase R (v0.5): resolve the location of the `web/` directory for the
1496/// Workbench static assets. Tries common paths: workspace root relative
1497/// to the running binary, then current working directory, then a
1498/// `VELA_WEB_DIR` env override.
1499fn workbench_web_dir() -> PathBuf {
1500    if let Ok(path) = std::env::var("VELA_WEB_DIR") {
1501        return PathBuf::from(path);
1502    }
1503    let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
1504    let candidates = [
1505        cwd.join("web"),
1506        PathBuf::from("./web"),
1507        PathBuf::from("web"),
1508    ];
1509    for candidate in candidates {
1510        if candidate.exists() {
1511            return candidate;
1512        }
1513    }
1514    cwd.join("web")
1515}
1516
1517/// v0.51: Resolve the requesting actor's read-side access clearance
1518/// from the `X-Vela-Actor` request header. The header value, if
1519/// present, is matched against `Project.actors` by id; the actor's
1520/// `access_clearance` field is returned. Anonymous reads (header
1521/// absent) get `None`, which equals "public-only" per
1522/// `access_tier::actor_may_read`.
1523///
1524/// This is a deliberately thin authentication surface for v0.51 —
1525/// the assumption is that a real deployment terminates TLS and
1526/// validates actor signatures at a reverse proxy in front of `vela
1527/// serve`, then forwards `X-Vela-Actor` only when verified. v0.52+
1528/// can tighten this to require a signed bearer token end-to-end.
1529fn requesting_clearance(
1530    headers: &HeaderMap,
1531    project: &Project,
1532) -> Option<crate::access_tier::AccessTier> {
1533    let actor_id = headers
1534        .get("x-vela-actor")
1535        .and_then(|v| v.to_str().ok())?
1536        .trim();
1537    if actor_id.is_empty() {
1538        return None;
1539    }
1540    let actor = project.actors.iter().find(|a| a.id == actor_id)?;
1541    actor.access_clearance
1542}
1543
1544async fn http_frontier(State(state): State<AppState>, headers: HeaderMap) -> Json<Value> {
1545    let project = state.project.lock().await;
1546    let clearance = requesting_clearance(&headers, &project);
1547    let view = crate::access_tier::redact_for_actor(&project, clearance);
1548    Json(serde_json::to_value(&view).unwrap_or_else(|_| json!({"error": "serialization failed"})))
1549}
1550
1551async fn http_findings(
1552    State(state): State<AppState>,
1553    headers: HeaderMap,
1554    axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1555) -> Json<Value> {
1556    let project = state.project.lock().await;
1557    let clearance = requesting_clearance(&headers, &project);
1558    let view = crate::access_tier::redact_for_actor(&project, clearance);
1559
1560    // v0.91: When no search-style filter is supplied, return a
1561    // structured `{findings, count}` list rather than the search
1562    // tool's text-shaped result. The previous behavior was
1563    // surprising for API consumers expecting a REST-style list
1564    // and forced them to scrape free-text. Search filters
1565    // (`query`, `entity`, `entity_type`, `type`) preserve the
1566    // existing search-tool behavior for callers that depend on it.
1567    let has_search = params.contains_key("query")
1568        || params.contains_key("entity")
1569        || params.contains_key("entity_type")
1570        || params.contains_key("type");
1571    if !has_search {
1572        let limit = params
1573            .get("limit")
1574            .and_then(|v| v.parse::<usize>().ok())
1575            .unwrap_or(view.findings.len());
1576        let findings: Vec<Value> = view
1577            .findings
1578            .iter()
1579            .take(limit)
1580            .map(|f| serde_json::to_value(f).unwrap_or_default())
1581            .collect();
1582        return Json(json!({
1583            "count": view.findings.len(),
1584            "returned": findings.len(),
1585            "findings": findings,
1586        }));
1587    }
1588
1589    let args = json!({
1590        "query": params.get("query"),
1591        "entity": params.get("entity"),
1592        "entity_type": params.get("entity_type"),
1593        "assertion_type": params.get("type"),
1594        "limit": params.get("limit").and_then(|v| v.parse::<u64>().ok()).unwrap_or(50),
1595    });
1596    match tool_search_findings(&args, &view) {
1597        Ok(text) => Json(json!({"result": text})),
1598        Err(error) => Json(json!({"error": error})),
1599    }
1600}
1601
1602async fn http_finding_by_id(
1603    State(state): State<AppState>,
1604    headers: HeaderMap,
1605    axum::extract::Path(id): axum::extract::Path<String>,
1606) -> (StatusCode, Json<Value>) {
1607    let project = state.project.lock().await;
1608    let clearance = requesting_clearance(&headers, &project);
1609    match project
1610        .findings
1611        .iter()
1612        .find(|finding| finding.id == id || finding.id.starts_with(&id))
1613    {
1614        Some(finding) => {
1615            if !crate::access_tier::actor_may_read(finding.access_tier, clearance) {
1616                // v0.51: above-clearance findings 404 — the existence
1617                // of the object is itself part of the tiered content.
1618                return (
1619                    StatusCode::NOT_FOUND,
1620                    Json(json!({"error": format!("Finding not found: {id}")})),
1621                );
1622            }
1623            // v0.85: surface the substrate-derived Belnap status
1624            // alongside the on-disk finding. Computed read-only
1625            // from the event log via `provenance_compute`. The
1626            // existing finding fields remain authoritative; this
1627            // is a derived view per docs/THEORY.md Section 7.
1628            let sp =
1629                crate::provenance_compute::status_provenance_for_finding(&project, &finding.id);
1630            let belnap = sp.derive_status();
1631            // v0.87: surface the substrate-derived discord set per
1632            // docs/THEORY.md Section 4. Detectors run read-only
1633            // against the live Project state; results are advisory.
1634            let discord =
1635                crate::discord_compute::compute_discord_for_finding(&project, &finding.id);
1636            let discord_kinds: Vec<String> =
1637                discord.iter().map(|k| k.as_str().to_string()).collect();
1638            let mut value = serde_json::to_value(finding).unwrap_or_default();
1639            if let Some(map) = value.as_object_mut() {
1640                map.insert(
1641                    "belnap_status".to_string(),
1642                    serde_json::to_value(belnap).unwrap_or_default(),
1643                );
1644                map.insert(
1645                    "belnap_letter".to_string(),
1646                    json!(belnap.letter().to_string()),
1647                );
1648                map.insert(
1649                    "support_term_count".to_string(),
1650                    json!(sp.support.term_count()),
1651                );
1652                map.insert(
1653                    "refute_term_count".to_string(),
1654                    json!(sp.refute.term_count()),
1655                );
1656                map.insert("discord_kinds".to_string(), json!(discord_kinds));
1657                map.insert("discord_count".to_string(), json!(discord.len()));
1658                // v0.88: surface the actual provenance-polynomial
1659                // structure per docs/THEORY.md §2.2. The serde-
1660                // friendly form is `[{monomial, coefficient}]`,
1661                // suitable for audit trails and downstream tooling
1662                // that needs to know which events derive support.
1663                map.insert(
1664                    "support_polynomial".to_string(),
1665                    serde_json::to_value(&sp.support).unwrap_or_default(),
1666                );
1667                map.insert(
1668                    "refute_polynomial".to_string(),
1669                    serde_json::to_value(&sp.refute).unwrap_or_default(),
1670                );
1671                // Display strings render polynomials in standard
1672                // additive form: `2*p1*d3 + r7*e2`. Suitable for
1673                // debug surfaces and Workbench tooltips.
1674                map.insert(
1675                    "support_polynomial_display".to_string(),
1676                    json!(format!("{}", sp.support)),
1677                );
1678                map.insert(
1679                    "refute_polynomial_display".to_string(),
1680                    json!(format!("{}", sp.refute)),
1681                );
1682            }
1683            (StatusCode::OK, Json(value))
1684        }
1685        None => (
1686            StatusCode::NOT_FOUND,
1687            Json(json!({"error": format!("Finding not found: {id}")})),
1688        ),
1689    }
1690}
1691
1692async fn http_contradictions(State(state): State<AppState>) -> Json<Value> {
1693    let project = state.project.lock().await;
1694    Json(
1695        serde_json::from_str(&tool_list_contradictions(&project).unwrap_or_default())
1696            .unwrap_or_else(
1697                |_| json!({"result": tool_list_contradictions(&project).unwrap_or_default()}),
1698            ),
1699    )
1700}
1701
1702/// v0.97: HTTP mirror of `vela discord`. Returns the frontier-wide
1703/// discord assignment computed read-only against live Project state
1704/// per docs/THEORY.md §4. Body shape matches the CLI's --json output.
1705///
1706/// Query params:
1707/// - `kind`: optional filter to a single discord kind (e.g.
1708///   `provenance_fragile`, `evidence_gap`, `status_divergent`).
1709async fn http_discord(
1710    State(state): State<AppState>,
1711    axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1712) -> Json<Value> {
1713    use crate::discord::DiscordKind;
1714    use crate::discord_compute::compute_discord_assignment;
1715
1716    let project = state.project.lock().await;
1717    let assignment = compute_discord_assignment(&project);
1718    let support = assignment.frontier_support();
1719    let filter = params.get("kind").cloned();
1720
1721    let mut rows: Vec<Value> = Vec::new();
1722    for context in support.iter() {
1723        let set = assignment.get(context);
1724        let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
1725        if let Some(f) = &filter
1726            && !kinds.iter().any(|k| k == f)
1727        {
1728            continue;
1729        }
1730        rows.push(json!({
1731            "finding_id": context,
1732            "discord_kinds": kinds,
1733        }));
1734    }
1735
1736    let mut histogram = serde_json::Map::new();
1737    for kind in DiscordKind::ALL {
1738        let count = assignment
1739            .iter()
1740            .filter(|(_, set)| set.contains(*kind))
1741            .count();
1742        if count > 0 {
1743            histogram.insert(kind.as_str().to_string(), json!(count));
1744        }
1745    }
1746
1747    let frontier_id = project
1748        .frontier_id
1749        .clone()
1750        .unwrap_or_else(|| String::from("<unknown>"));
1751
1752    Json(json!({
1753        "frontier_id": frontier_id,
1754        "total_findings": project.findings.len(),
1755        "frontier_support_size": support.len(),
1756        "filtered_row_count": rows.len(),
1757        "filter_kind": filter,
1758        "histogram": Value::Object(histogram),
1759        "rows": rows,
1760    }))
1761}
1762
1763async fn http_health(State(state): State<AppState>) -> Json<Value> {
1764    let project = state.project.lock().await;
1765    Json(json!({
1766        "ok": true,
1767        "frontier": {
1768            "name": project.project.name,
1769            "findings": project.stats.findings,
1770            "events": project.events.len(),
1771        }
1772    }))
1773}
1774
1775async fn http_artifacts(State(state): State<AppState>) -> Json<Value> {
1776    let project = state.project.lock().await;
1777    Json(json!({
1778        "ok": true,
1779        "count": project.artifacts.len(),
1780        "artifacts": project.artifacts,
1781    }))
1782}
1783
1784async fn http_artifact_audit(State(state): State<AppState>) -> Json<Value> {
1785    let source_path = state.source_path.clone();
1786    let project = state.project.lock().await;
1787    let Some(path) = source_path else {
1788        return Json(json!({
1789            "ok": false,
1790            "available": false,
1791            "issues": [],
1792            "error": "artifact audit requires a single frontier source",
1793        }));
1794    };
1795    Json(
1796        serde_json::to_value(crate::artifact_audit::audit_artifacts(&path, &project))
1797            .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1798    )
1799}
1800
1801async fn http_decision_brief(State(state): State<AppState>) -> Json<Value> {
1802    let source_path = state.source_path.clone();
1803    let project = state.project.lock().await;
1804    let Some(path) = source_path else {
1805        return Json(json!({
1806            "ok": false,
1807            "available": false,
1808            "projection": null,
1809            "issues": [],
1810            "error": "decision projections require a single frontier source",
1811        }));
1812    };
1813    Json(
1814        serde_json::to_value(decision::load_decision_brief(&path, &project))
1815            .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1816    )
1817}
1818
1819async fn http_trials(State(state): State<AppState>) -> Json<Value> {
1820    let source_path = state.source_path.clone();
1821    let project = state.project.lock().await;
1822    let Some(path) = source_path else {
1823        return Json(json!({
1824            "ok": false,
1825            "available": false,
1826            "projection": null,
1827            "issues": [],
1828            "error": "trial projections require a single frontier source",
1829        }));
1830    };
1831    Json(
1832        serde_json::to_value(decision::load_trial_outcomes(&path, &project))
1833            .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1834    )
1835}
1836
1837async fn http_source_verification(State(state): State<AppState>) -> Json<Value> {
1838    let source_path = state.source_path.clone();
1839    let project = state.project.lock().await;
1840    let Some(path) = source_path else {
1841        return Json(json!({
1842            "ok": false,
1843            "available": false,
1844            "projection": null,
1845            "issues": [],
1846            "error": "source verification requires a single frontier source",
1847        }));
1848    };
1849    Json(
1850        serde_json::to_value(decision::load_source_verification(&path, &project))
1851            .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1852    )
1853}
1854
1855async fn http_source_ingest_plan(State(state): State<AppState>) -> Json<Value> {
1856    let source_path = state.source_path.clone();
1857    let project = state.project.lock().await;
1858    let Some(path) = source_path else {
1859        return Json(json!({
1860            "ok": false,
1861            "available": false,
1862            "projection": null,
1863            "issues": [],
1864            "error": "source ingest plan requires a single frontier source",
1865        }));
1866    };
1867    Json(
1868        serde_json::to_value(decision::load_source_ingest_plan(&path, &project))
1869            .unwrap_or_else(|_| json!({"ok": false, "error": "serialization failed"})),
1870    )
1871}
1872
1873async fn http_gaps(State(state): State<AppState>) -> Json<Value> {
1874    let project = state.project.lock().await;
1875    let gaps = project
1876        .findings
1877        .iter()
1878        .filter(|finding| finding.flags.gap || finding.flags.negative_space)
1879        .map(|finding| {
1880            json!({
1881                "id": finding.id,
1882                "assertion": finding.assertion.text,
1883                "confidence": finding.confidence.score,
1884                "conditions": finding.conditions.text,
1885                "source": finding.provenance.title,
1886            })
1887        })
1888        .collect::<Vec<_>>();
1889    Json(json!({
1890        "ok": true,
1891        "count": gaps.len(),
1892        "gaps": gaps,
1893        "caveats": ["Candidate gap rankings are review leads, not confirmed experiment targets."],
1894    }))
1895}
1896
1897async fn http_tensions(State(state): State<AppState>) -> Json<Value> {
1898    let project = state.project.lock().await;
1899    let lookup = project
1900        .findings
1901        .iter()
1902        .map(|finding| (finding.id.as_str(), finding))
1903        .collect::<HashMap<_, _>>();
1904    let mut tensions = Vec::new();
1905    for finding in &project.findings {
1906        for link in &finding.links {
1907            if link.link_type != "contradicts" {
1908                continue;
1909            }
1910            let target = lookup.get(link.target.as_str());
1911            tensions.push(json!({
1912                "source": {
1913                    "id": finding.id,
1914                    "assertion": finding.assertion.text,
1915                    "confidence": finding.confidence.score,
1916                },
1917                "target": target.map(|target| json!({
1918                    "id": target.id,
1919                    "assertion": target.assertion.text,
1920                    "confidence": target.confidence.score,
1921                })),
1922                "type": link.link_type,
1923                "note": link.note,
1924                "resolved": finding.flags.retracted || target.is_some_and(|target| target.flags.retracted),
1925            }));
1926        }
1927    }
1928    Json(json!({
1929        "ok": true,
1930        "count": tensions.len(),
1931        "tensions": tensions,
1932        "caveats": ["Candidate tensions are review surfaces, not definitive contradictions."],
1933    }))
1934}
1935
1936async fn http_observer(
1937    State(state): State<AppState>,
1938    axum::extract::Path(policy): axum::extract::Path<String>,
1939    axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1940) -> Json<Value> {
1941    let project = state.project.lock().await;
1942    let args = json!({
1943        "policy": policy,
1944        "limit": params.get("limit").and_then(|v| v.parse::<u64>().ok()).unwrap_or(20),
1945    });
1946    match tool_apply_observer(&args, &project) {
1947        Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1948        Err(error) => Json(json!({"error": error})),
1949    }
1950}
1951
1952async fn http_propagate(
1953    State(state): State<AppState>,
1954    axum::extract::Path(id): axum::extract::Path<String>,
1955) -> Json<Value> {
1956    let project = state.project.lock().await;
1957    let args = json!({"finding_id": id});
1958    match tool_propagate_retraction(&args, &project) {
1959        Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1960        Err(error) => Json(json!({"error": error})),
1961    }
1962}
1963
1964async fn http_bridges(
1965    State(state): State<AppState>,
1966    axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
1967) -> Json<Value> {
1968    let project = state.project.lock().await;
1969    let args = json!({
1970        "min_categories": params.get("min_categories").and_then(|v| v.parse::<u64>().ok()).unwrap_or(2),
1971        "limit": params.get("limit").and_then(|v| v.parse::<u64>().ok()).unwrap_or(15),
1972    });
1973    match tool_find_bridges(&args, &project) {
1974        Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
1975        Err(error) => Json(json!({"error": error})),
1976    }
1977}
1978
1979async fn http_stats(State(state): State<AppState>) -> Json<Value> {
1980    let project = state.project.lock().await;
1981    Json(json!({
1982        "frontier": {
1983            "name": project.project.name,
1984            "compiled_at": project.project.compiled_at,
1985            "compiler": project.project.compiler,
1986        },
1987        "stats": project.stats,
1988        "signals": signals::analyze(&project, &[]).signals,
1989    }))
1990}
1991
1992async fn http_frontiers(State(state): State<AppState>) -> Json<Value> {
1993    Json(
1994        serde_json::from_str(&frontier_index_json(&state.project_infos).unwrap_or_default())
1995            .unwrap_or_else(|_| json!({"frontier_count": 0, "frontiers": []})),
1996    )
1997}
1998
1999async fn http_pubmed(
2000    State(state): State<AppState>,
2001    axum::extract::Query(params): axum::extract::Query<HashMap<String, String>>,
2002) -> Json<Value> {
2003    let args = json!({"query": params.get("query").cloned().unwrap_or_default()});
2004    match tool_check_pubmed(&args, &state.client).await {
2005        Ok(text) => Json(serde_json::from_str(&text).unwrap_or_else(|_| json!({"result": text}))),
2006        Err(error) => Json(json!({"error": error})),
2007    }
2008}
2009
2010async fn http_tools_list() -> Json<Value> {
2011    Json(tool_registry::mcp_tools_json())
2012}
2013
2014async fn http_tool_call(
2015    State(state): State<AppState>,
2016    Json(body): Json<Value>,
2017) -> (StatusCode, Json<Value>) {
2018    let name = body["name"].as_str().unwrap_or_default();
2019    let args = &body["arguments"];
2020    let started = std::time::Instant::now();
2021    let (result, snapshot) = execute_tool(
2022        name,
2023        args,
2024        &state.project,
2025        &state.client,
2026        &state.project_infos,
2027        state.source_path.as_deref(),
2028    )
2029    .await;
2030    match result {
2031        Ok(text) => {
2032            let output = ToolResult::from_text(
2033                name,
2034                text,
2035                started.elapsed().as_millis(),
2036                false,
2037                snapshot.as_ref(),
2038            );
2039            (
2040                StatusCode::OK,
2041                Json(json!({
2042                    "result": output.markdown,
2043                    "tool": output.tool,
2044                    "ok": output.ok,
2045                    "data": output.data,
2046                    "markdown": output.markdown,
2047                    "signals": output.signals,
2048                    "caveats": output.caveats,
2049                    "duration_ms": output.duration_ms,
2050                    "metadata": output.metadata(),
2051                })),
2052            )
2053        }
2054        Err(error) => {
2055            let output = ToolResult::from_text(
2056                name,
2057                error,
2058                started.elapsed().as_millis(),
2059                true,
2060                snapshot.as_ref(),
2061            );
2062            (
2063                StatusCode::INTERNAL_SERVER_ERROR,
2064                Json(json!({
2065                    "error": output.markdown,
2066                    "tool": output.tool,
2067                    "ok": output.ok,
2068                    "data": output.data,
2069                    "markdown": output.markdown,
2070                    "signals": output.signals,
2071                    "caveats": output.caveats,
2072                    "duration_ms": output.duration_ms,
2073                    "metadata": output.metadata(),
2074                })),
2075            )
2076        }
2077    }
2078}
2079
2080fn tool_search_findings(args: &Value, frontier: &Project) -> Result<String, String> {
2081    let query = args["query"].as_str().map(str::to_lowercase);
2082    let entity = args["entity"].as_str().map(str::to_lowercase);
2083    let entity_type = args["entity_type"].as_str().map(str::to_lowercase);
2084    let assertion_type = args["assertion_type"].as_str().map(str::to_lowercase);
2085    let limit = args["limit"].as_u64().unwrap_or(20) as usize;
2086    let results = frontier
2087        .findings
2088        .iter()
2089        .filter(|finding| {
2090            query.as_ref().is_none_or(|q| {
2091                finding.assertion.text.to_lowercase().contains(q)
2092                    || finding.conditions.text.to_lowercase().contains(q)
2093                    || finding
2094                        .assertion
2095                        .entities
2096                        .iter()
2097                        .any(|e| e.name.to_lowercase().contains(q))
2098            }) && entity.as_ref().is_none_or(|needle| {
2099                finding
2100                    .assertion
2101                    .entities
2102                    .iter()
2103                    .any(|e| e.name.to_lowercase().contains(needle))
2104            }) && entity_type.as_ref().is_none_or(|needle| {
2105                finding
2106                    .assertion
2107                    .entities
2108                    .iter()
2109                    .any(|e| e.entity_type.to_lowercase() == *needle)
2110            }) && assertion_type
2111                .as_ref()
2112                .is_none_or(|needle| finding.assertion.assertion_type.to_lowercase() == *needle)
2113        })
2114        .take(limit)
2115        .collect::<Vec<_>>();
2116
2117    if results.is_empty() {
2118        return Ok("No findings matched the search criteria.".to_string());
2119    }
2120    let mut out = format!("{} findings matched:\n\n", results.len());
2121    for finding in results {
2122        let entities = finding
2123            .assertion
2124            .entities
2125            .iter()
2126            .map(|e| format!("{} ({})", e.name, e.entity_type))
2127            .collect::<Vec<_>>();
2128        out.push_str(&format!(
2129            "**{}** [conf: {}, type: {}]\n{}\nEntities: {}\nReplicated: {} | Gap: {} | Contested: {}\nSource: {} ({})\n\n",
2130            finding.id,
2131            finding.confidence.score,
2132            finding.assertion.assertion_type,
2133            finding.assertion.text,
2134            entities.join(", "),
2135            finding.evidence.replicated,
2136            finding.flags.gap,
2137            finding.flags.contested,
2138            finding.provenance.title,
2139            finding.provenance.year.map(|y| y.to_string()).unwrap_or_else(|| "?".to_string()),
2140        ));
2141    }
2142    Ok(out)
2143}
2144
2145fn tool_get_finding(args: &Value, frontier: &Project) -> Result<String, String> {
2146    let id = args["id"].as_str().ok_or("Missing 'id' argument")?;
2147    let finding = frontier
2148        .findings
2149        .iter()
2150        .find(|finding| finding.id == id || finding.id.starts_with(id))
2151        .ok_or_else(|| format!("Finding '{id}' not found"))?;
2152    let mut context = state::finding_context(frontier, &finding.id)?;
2153    if let Value::Object(map) = &mut context {
2154        map.insert(
2155            "caveats".to_string(),
2156            json!([
2157            "Finding-local events are canonical state transitions; review_events are projection artifacts.",
2158            "Sources identify artifacts; evidence atoms identify source-grounded units that bear on the finding."
2159            ]),
2160        );
2161    }
2162    serde_json::to_string_pretty(&context).map_err(|e| format!("Serialization error: {e}"))
2163}
2164
2165/// v0.17: chronological event log for one finding. The full canonical event
2166/// log filtered to events whose `target.id` matches the requested finding,
2167/// sorted ascending by timestamp. Useful for agents walking the supersedes
2168/// chain or auditing corrections.
2169fn tool_get_finding_history(args: &Value, frontier: &Project) -> Result<String, String> {
2170    let id = args["id"].as_str().ok_or("Missing 'id' argument")?;
2171    let mut events: Vec<&crate::events::StateEvent> = frontier
2172        .events
2173        .iter()
2174        .filter(|e| {
2175            e.target.r#type == "finding" && (e.target.id == id || e.target.id.starts_with(id))
2176        })
2177        .collect();
2178    events.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
2179    let payload = json!({
2180        "finding_id": id,
2181        "event_count": events.len(),
2182        "events": events,
2183        "caveats": [
2184            "Events are the canonical state-transition log; events without a 'finding' target are excluded.",
2185            "Use payload.new_finding_id on finding.superseded events to walk forward in the supersedes chain."
2186        ],
2187    });
2188    serde_json::to_string_pretty(&payload).map_err(|e| format!("Serialization error: {e}"))
2189}
2190
2191fn tool_list_gaps(frontier: &Project) -> Result<String, String> {
2192    let gaps = frontier
2193        .findings
2194        .iter()
2195        .filter(|finding| finding.flags.gap)
2196        .collect::<Vec<_>>();
2197    if gaps.is_empty() {
2198        return Ok("No gap-flagged findings in this frontier.".to_string());
2199    }
2200    let mut out = format!(
2201        "{} candidate gap review leads:\nTreat these as navigation signals, not confirmed experiment targets.\n\n",
2202        gaps.len()
2203    );
2204    for finding in gaps {
2205        out.push_str(&format!(
2206            "**{}** [conf: {}]\n{}\nConditions: {}\n\n",
2207            finding.id, finding.confidence.score, finding.assertion.text, finding.conditions.text
2208        ));
2209    }
2210    Ok(out)
2211}
2212
2213fn tool_list_contradictions(frontier: &Project) -> Result<String, String> {
2214    let lookup = frontier
2215        .findings
2216        .iter()
2217        .map(|finding| (finding.id.as_str(), finding))
2218        .collect::<HashMap<_, _>>();
2219    let mut contradictions = Vec::new();
2220    for finding in &frontier.findings {
2221        for link in &finding.links {
2222            if matches!(link.link_type.as_str(), "contradicts" | "disputes") {
2223                let target = lookup
2224                    .get(link.target.as_str())
2225                    .map(|f| f.assertion.text.as_str())
2226                    .unwrap_or("(unknown target)");
2227                contradictions.push(format!(
2228                    "**{}** {} **{}**\n  {} --[{}]--> {}\n  Note: {}\n",
2229                    finding.id,
2230                    link.link_type,
2231                    link.target,
2232                    trunc(&finding.assertion.text, 80),
2233                    link.link_type,
2234                    trunc(target, 80),
2235                    link.note,
2236                ));
2237            }
2238        }
2239    }
2240    if contradictions.is_empty() {
2241        return Ok("No candidate contradiction links in this frontier.".to_string());
2242    }
2243    Ok(format!(
2244        "{} candidate contradiction links:\n\n{}",
2245        contradictions.len(),
2246        contradictions.join("\n")
2247    ))
2248}
2249
2250fn tool_frontier_stats(frontier: &Project) -> Result<String, String> {
2251    serde_json::to_string_pretty(&json!({
2252        "frontier": {
2253            "name": frontier.project.name,
2254            "description": frontier.project.description,
2255            "compiled_at": frontier.project.compiled_at,
2256            "compiler": frontier.project.compiler,
2257            "papers_processed": frontier.project.papers_processed,
2258            "errors": frontier.project.errors,
2259        },
2260        "stats": frontier.stats,
2261        "source_registry": sources::source_summary(frontier),
2262        "evidence_atoms": sources::evidence_summary(frontier),
2263        "conditions": sources::condition_summary(frontier),
2264        "proposals": crate::proposals::summary(frontier),
2265        "proof_state": frontier.proof_state,
2266        "events": {
2267            "count": frontier.events.len(),
2268            "summary": events::summarize(frontier),
2269            "replay": events::replay_report(frontier),
2270        },
2271        "signals": signals::analyze(frontier, &[]).signals,
2272    }))
2273    .map_err(|e| format!("Serialization error: {e}"))
2274}
2275
2276fn tool_find_bridges(args: &Value, frontier: &Project) -> Result<String, String> {
2277    let min_categories = args["min_categories"].as_u64().unwrap_or(2) as usize;
2278    let limit = args["limit"].as_u64().unwrap_or(15) as usize;
2279    let mut entity_categories = HashMap::<String, HashSet<String>>::new();
2280    let mut entity_counts = HashMap::<String, usize>::new();
2281    for finding in &frontier.findings {
2282        for entity in &finding.assertion.entities {
2283            let key = entity.name.to_lowercase();
2284            entity_categories
2285                .entry(key.clone())
2286                .or_default()
2287                .insert(finding.assertion.assertion_type.clone());
2288            *entity_counts.entry(key).or_default() += 1;
2289        }
2290    }
2291    let mut bridges = entity_categories
2292        .iter()
2293        .filter(|(name, categories)| {
2294            categories.len() >= min_categories && !bridge::is_obvious(name)
2295        })
2296        .map(|(name, categories)| {
2297            json!({
2298                "entity": name,
2299                "categories": categories.iter().cloned().collect::<Vec<_>>(),
2300                "category_count": categories.len(),
2301                "finding_count": entity_counts.get(name).copied().unwrap_or(0),
2302            })
2303        })
2304        .collect::<Vec<_>>();
2305    bridges.sort_by(|a, b| {
2306        b["category_count"]
2307            .as_u64()
2308            .unwrap_or(0)
2309            .cmp(&a["category_count"].as_u64().unwrap_or(0))
2310    });
2311    bridges.truncate(limit);
2312    serde_json::to_string_pretty(&json!({"count": bridges.len(), "bridges": bridges}))
2313        .map_err(|e| format!("Serialization error: {e}"))
2314}
2315
2316fn tool_propagate_retraction(args: &Value, frontier: &Project) -> Result<String, String> {
2317    let id = args["finding_id"]
2318        .as_str()
2319        .ok_or("Missing 'finding_id' argument")?;
2320    let target = frontier
2321        .findings
2322        .iter()
2323        .find(|finding| finding.id == id || finding.id.starts_with(id))
2324        .ok_or_else(|| format!("Finding '{id}' not found"))?;
2325
2326    // v0.49.3: O(1) reverse-dep lookup via the denormalized index
2327    // instead of the prior O(N×L) scan over every finding × every
2328    // link. The index is built once per request — at this frontier's
2329    // size it costs microseconds; at 100K findings it stays under a
2330    // second. Filter on link_type after the lookup so "supports" /
2331    // "depends" semantics are preserved.
2332    let reverse_idx = frontier.build_reverse_dep_index();
2333    let dependent_ids = reverse_idx.dependents_of(&target.id);
2334    let id_to_finding: std::collections::HashMap<&str, &crate::bundle::FindingBundle> = frontier
2335        .findings
2336        .iter()
2337        .map(|f| (f.id.as_str(), f))
2338        .collect();
2339
2340    let mut affected = Vec::new();
2341    for dep_id in dependent_ids {
2342        let Some(dependent) = id_to_finding.get(dep_id.as_str()) else {
2343            continue;
2344        };
2345        for link in &dependent.links {
2346            if matches!(link.link_type.as_str(), "supports" | "depends") && link.target == target.id
2347            {
2348                affected.push(json!({
2349                    "id": dependent.id,
2350                    "assertion": trunc(&dependent.assertion.text, 100),
2351                    "link_type": link.link_type,
2352                }));
2353            }
2354        }
2355    }
2356    serde_json::to_string_pretty(&json!({
2357        "retracted": {"id": target.id, "assertion": trunc(&target.assertion.text, 120)},
2358        "directly_affected": affected.len(),
2359        "affected_findings": affected,
2360        "caveat": "Retraction impact is simulated over declared dependency links.",
2361    }))
2362    .map_err(|e| format!("Serialization error: {e}"))
2363}
2364
2365fn tool_apply_observer(args: &Value, frontier: &Project) -> Result<String, String> {
2366    let policy_name = args["policy"].as_str().ok_or("Missing 'policy' argument")?;
2367    let limit = args["limit"].as_u64().unwrap_or(15) as usize;
2368    let policy = observer::policy_by_name(policy_name).unwrap_or_else(observer::academic);
2369    let view = observer::observe(&frontier.findings, &frontier.replications, &policy);
2370    let top = view
2371        .findings
2372        .iter()
2373        .take(limit)
2374        .map(|scored| {
2375            let finding = frontier
2376                .findings
2377                .iter()
2378                .find(|finding| finding.id == scored.finding_id);
2379            json!({
2380                "id": scored.finding_id,
2381                "original_confidence": scored.original_confidence,
2382                "observer_score": scored.observer_score,
2383                "rank": scored.rank,
2384                "assertion": finding.map(|f| trunc(&f.assertion.text, 100)).unwrap_or_default(),
2385            })
2386        })
2387        .collect::<Vec<_>>();
2388    serde_json::to_string_pretty(&json!({
2389        "policy": policy_name,
2390        "shown": top.len(),
2391        "hidden": view.hidden,
2392        "top_findings": top,
2393        "caveat": "Observer output is policy-weighted reranking, not definitive disagreement.",
2394    }))
2395    .map_err(|e| format!("Serialization error: {e}"))
2396}
2397
2398async fn tool_check_pubmed(args: &Value, client: &Client) -> Result<String, String> {
2399    let query = args["query"].as_str().ok_or("Missing 'query' argument")?;
2400    let count = bridge::check_novelty(client, query).await?;
2401    serde_json::to_string_pretty(&json!({
2402        "query": query,
2403        "pubmed_results": count,
2404        "rough_prior_art_clear": count == 0,
2405        "caveat": "PubMed counts are rough prior-art signals, not proof of novelty.",
2406    }))
2407    .map_err(|e| format!("Serialization error: {e}"))
2408}
2409
2410fn frontier_index_json(project_infos: &[ProjectInfo]) -> Result<String, String> {
2411    let frontiers = project_infos
2412        .iter()
2413        .map(|info| {
2414            json!({
2415                "name": info.name,
2416                "file": info.file,
2417                "findings": info.findings_count,
2418                "links": info.links_count,
2419                "papers": info.papers,
2420            })
2421        })
2422        .collect::<Vec<_>>();
2423    serde_json::to_string_pretty(&json!({
2424        "frontier_count": frontiers.len(),
2425        "frontiers": frontiers,
2426    }))
2427    .map_err(|e| format!("Serialization error: {e}"))
2428}
2429
2430fn tool_trace_evidence_chain(args: &Value, frontier: &Project) -> Result<String, String> {
2431    let id = args["finding_id"]
2432        .as_str()
2433        .ok_or("Missing 'finding_id' argument")?;
2434    let depth = args["depth"].as_u64().unwrap_or(2) as usize;
2435    let lookup = frontier
2436        .findings
2437        .iter()
2438        .map(|finding| (finding.id.as_str(), finding))
2439        .collect::<HashMap<_, _>>();
2440    let finding = lookup
2441        .get(id)
2442        .copied()
2443        .or_else(|| {
2444            frontier
2445                .findings
2446                .iter()
2447                .find(|finding| finding.id.starts_with(id))
2448        })
2449        .ok_or_else(|| format!("Finding '{id}' not found"))?;
2450    let links = finding
2451        .links
2452        .iter()
2453        .take(depth.saturating_mul(10).max(10))
2454        .map(|link| {
2455            let target = lookup.get(link.target.as_str());
2456            json!({
2457                "target": link.target,
2458                "type": link.link_type,
2459                "note": link.note,
2460                "target_assertion": target.map(|f| trunc(&f.assertion.text, 120)),
2461            })
2462        })
2463        .collect::<Vec<_>>();
2464    let evidence_span_count = finding.evidence.evidence_spans.len();
2465    let source_ref = finding
2466        .provenance
2467        .doi
2468        .as_deref()
2469        .or(finding.provenance.pmid.as_deref())
2470        .unwrap_or(&finding.provenance.title);
2471    let review_state = finding
2472        .provenance
2473        .review
2474        .as_ref()
2475        .map(|review| {
2476            if review.reviewed {
2477                "reviewed"
2478            } else {
2479                "pending_review"
2480            }
2481        })
2482        .unwrap_or("pending_review");
2483    let finding_events = events::events_for_finding(frontier, &finding.id);
2484    let linked_sources = sources::sources_for_finding(frontier, &finding.id);
2485    let linked_atoms = sources::evidence_atoms_for_finding(frontier, &finding.id);
2486    let linked_conditions = sources::condition_records_for_finding(frontier, &finding.id);
2487    let linked_proposals = crate::proposals::proposals_for_finding(frontier, &finding.id);
2488    serde_json::to_string_pretty(&json!({
2489        "finding": {"id": finding.id, "assertion": finding.assertion.text},
2490        "sources": linked_sources,
2491        "evidence_atoms": linked_atoms,
2492        "condition_records": linked_conditions,
2493        "proposals": linked_proposals,
2494        "source_to_state": [
2495            {"step": "source", "value": linked_sources, "fallback": source_ref},
2496            {"step": "evidence_atom", "value": linked_atoms},
2497            {"step": "condition_boundary", "value": linked_conditions},
2498            {"step": "proposal_lineage", "value": linked_proposals},
2499            {"step": "legacy_evidence", "value": {"type": finding.evidence.evidence_type, "spans": evidence_span_count, "method": finding.evidence.method}},
2500            {"step": "finding", "value": {"id": finding.id, "assertion_type": finding.assertion.assertion_type, "confidence": finding.confidence.score}},
2501            {"step": "event_history", "value": finding_events},
2502            {"step": "links", "value": {"declared": finding.links.len()}},
2503            {"step": "review_state", "value": review_state}
2504        ],
2505        "state_events": finding_events,
2506        "path_explanation": format!(
2507            "source -> evidence spans ({}) -> finding {} -> {} declared links -> {}",
2508            evidence_span_count,
2509            finding.id,
2510            finding.links.len(),
2511            review_state
2512        ),
2513        "depth": depth,
2514        "links": links,
2515        "caveat": "Evidence-chain strength is heuristic and depends on declared links.",
2516    }))
2517    .map_err(|e| format!("Serialization error: {e}"))
2518}
2519
2520fn clone_project(project: &Project) -> Project {
2521    serde_json::from_value(serde_json::to_value(project).unwrap_or_default()).unwrap_or_else(|_| {
2522        project::assemble("unavailable", Vec::new(), 0, 1, "failed to clone frontier")
2523    })
2524}
2525
2526fn json_rpc_result(id: &Option<Value>, result: Value) -> Value {
2527    json!({"jsonrpc": "2.0", "id": id, "result": result})
2528}
2529
2530fn json_rpc_error(id: &Option<Value>, code: i32, message: &str) -> Value {
2531    json!({"jsonrpc": "2.0", "id": id, "error": {"code": code, "message": message}})
2532}
2533
2534fn trunc(s: &str, max: usize) -> String {
2535    if s.len() <= max {
2536        return s.to_string();
2537    }
2538    let mut end = max;
2539    while end > 0 && !s.is_char_boundary(end) {
2540        end -= 1;
2541    }
2542    format!("{}...", &s[..end])
2543}