Skip to main content

tandem_tools/
lib.rs

1use std::collections::{hash_map::DefaultHasher, HashMap, HashSet};
2use std::hash::{Hash, Hasher};
3use std::path::{Path, PathBuf};
4use std::process::Stdio;
5use std::sync::atomic::{AtomicU64, Ordering};
6use std::sync::Arc;
7use std::time::Duration;
8
9use anyhow::anyhow;
10use async_trait::async_trait;
11use ignore::WalkBuilder;
12use regex::Regex;
13use serde_json::{json, Value};
14use tandem_memory::embeddings::{get_embedding_service, EmbeddingService};
15use tandem_skills::SkillService;
16use tokio::fs;
17use tokio::process::Command;
18use tokio::sync::RwLock;
19use tokio_util::sync::CancellationToken;
20
21use futures_util::StreamExt;
22use tandem_agent_teams::compat::{
23    send_message_schema, task_create_schema, task_list_schema, task_schema, task_update_schema,
24    team_create_schema,
25};
26use tandem_agent_teams::{
27    AgentTeamPaths, SendMessageInput, SendMessageType, TaskCreateInput, TaskInput, TaskListInput,
28    TaskUpdateInput, TeamCreateInput,
29};
30use tandem_memory::types::{MemorySearchResult, MemoryTier};
31use tandem_memory::MemoryManager;
32use tandem_types::{ToolResult, ToolSchema};
33
34mod builtin_tools;
35mod tool_metadata;
36use builtin_tools::*;
37use tool_metadata::*;
38
39#[async_trait]
40pub trait Tool: Send + Sync {
41    fn schema(&self) -> ToolSchema;
42    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult>;
43    async fn execute_with_cancel(
44        &self,
45        args: Value,
46        _cancel: CancellationToken,
47    ) -> anyhow::Result<ToolResult> {
48        self.execute(args).await
49    }
50}
51
52#[derive(Clone)]
53pub struct ToolRegistry {
54    tools: Arc<RwLock<HashMap<String, Arc<dyn Tool>>>>,
55    tool_vectors: Arc<RwLock<HashMap<String, Vec<f32>>>>,
56}
57
58impl ToolRegistry {
59    pub fn new() -> Self {
60        let mut map: HashMap<String, Arc<dyn Tool>> = HashMap::new();
61        map.insert("bash".to_string(), Arc::new(BashTool));
62        map.insert("read".to_string(), Arc::new(ReadTool));
63        map.insert("write".to_string(), Arc::new(WriteTool));
64        map.insert("edit".to_string(), Arc::new(EditTool));
65        map.insert("glob".to_string(), Arc::new(GlobTool));
66        map.insert("grep".to_string(), Arc::new(GrepTool));
67        map.insert("webfetch".to_string(), Arc::new(WebFetchTool));
68        map.insert("webfetch_html".to_string(), Arc::new(WebFetchHtmlTool));
69        map.insert("mcp_debug".to_string(), Arc::new(McpDebugTool));
70        let search_backend = SearchBackend::from_env();
71        if search_backend.is_enabled() {
72            map.insert(
73                "websearch".to_string(),
74                Arc::new(WebSearchTool {
75                    backend: search_backend,
76                }),
77            );
78        } else {
79            tracing::info!(
80                reason = search_backend.disabled_reason().unwrap_or("unknown"),
81                "builtin websearch disabled because no search backend is configured"
82            );
83        }
84        map.insert("codesearch".to_string(), Arc::new(CodeSearchTool));
85        let todo_tool: Arc<dyn Tool> = Arc::new(TodoWriteTool);
86        map.insert("todo_write".to_string(), todo_tool.clone());
87        map.insert("todowrite".to_string(), todo_tool.clone());
88        map.insert("update_todo_list".to_string(), todo_tool);
89        map.insert("task".to_string(), Arc::new(TaskTool));
90        map.insert("question".to_string(), Arc::new(QuestionTool));
91        map.insert("spawn_agent".to_string(), Arc::new(SpawnAgentTool));
92        map.insert("skill".to_string(), Arc::new(SkillTool));
93        map.insert("memory_store".to_string(), Arc::new(MemoryStoreTool));
94        map.insert("memory_list".to_string(), Arc::new(MemoryListTool));
95        map.insert("memory_search".to_string(), Arc::new(MemorySearchTool));
96        map.insert("memory_delete".to_string(), Arc::new(MemoryDeleteTool));
97        map.insert("apply_patch".to_string(), Arc::new(ApplyPatchTool));
98        map.insert("batch".to_string(), Arc::new(BatchTool));
99        map.insert("lsp".to_string(), Arc::new(LspTool));
100        map.insert("teamcreate".to_string(), Arc::new(TeamCreateTool));
101        map.insert("taskcreate".to_string(), Arc::new(TaskCreateCompatTool));
102        map.insert("taskupdate".to_string(), Arc::new(TaskUpdateCompatTool));
103        map.insert("tasklist".to_string(), Arc::new(TaskListCompatTool));
104        map.insert("sendmessage".to_string(), Arc::new(SendMessageCompatTool));
105        Self {
106            tools: Arc::new(RwLock::new(map)),
107            tool_vectors: Arc::new(RwLock::new(HashMap::new())),
108        }
109    }
110
111    pub async fn list(&self) -> Vec<ToolSchema> {
112        let mut dedup: HashMap<String, ToolSchema> = HashMap::new();
113        for schema in self.tools.read().await.values().map(|t| t.schema()) {
114            dedup.entry(schema.name.clone()).or_insert(schema);
115        }
116        let mut schemas = dedup.into_values().collect::<Vec<_>>();
117        schemas.sort_by(|a, b| a.name.cmp(&b.name));
118        schemas
119    }
120
121    pub async fn register_tool(&self, name: String, tool: Arc<dyn Tool>) {
122        let schema = tool.schema();
123        self.tools.write().await.insert(name.clone(), tool);
124        self.index_tool_schema(&schema).await;
125        if name != schema.name {
126            self.index_tool_name(&name, &schema).await;
127        }
128    }
129
130    pub async fn unregister_tool(&self, name: &str) -> bool {
131        let removed = self.tools.write().await.remove(name);
132        self.tool_vectors.write().await.remove(name);
133        if let Some(tool) = removed {
134            let schema_name = tool.schema().name;
135            self.tool_vectors.write().await.remove(&schema_name);
136            return true;
137        }
138        false
139    }
140
141    pub async fn unregister_by_prefix(&self, prefix: &str) -> usize {
142        let mut tools = self.tools.write().await;
143        let keys = tools
144            .keys()
145            .filter(|name| name.starts_with(prefix))
146            .cloned()
147            .collect::<Vec<_>>();
148        let removed = keys.len();
149        let mut removed_schema_names = Vec::new();
150        for key in keys {
151            if let Some(tool) = tools.remove(&key) {
152                removed_schema_names.push(tool.schema().name);
153            }
154        }
155        drop(tools);
156        let mut vectors = self.tool_vectors.write().await;
157        vectors.retain(|name, _| {
158            !name.starts_with(prefix) && !removed_schema_names.iter().any(|schema| schema == name)
159        });
160        removed
161    }
162
163    pub async fn index_all(&self) {
164        let schemas = self.list().await;
165        if schemas.is_empty() {
166            self.tool_vectors.write().await.clear();
167            return;
168        }
169        let texts = schemas
170            .iter()
171            .map(|schema| format!("{}: {}", schema.name, schema.description))
172            .collect::<Vec<_>>();
173        let service = get_embedding_service().await;
174        let service = service.lock().await;
175        if !service.is_available() {
176            return;
177        }
178        let Ok(vectors) = service.embed_batch(&texts).await else {
179            return;
180        };
181        drop(service);
182        let mut indexed = HashMap::new();
183        for (schema, vector) in schemas.into_iter().zip(vectors) {
184            indexed.insert(schema.name, vector);
185        }
186        *self.tool_vectors.write().await = indexed;
187    }
188
189    async fn index_tool_schema(&self, schema: &ToolSchema) {
190        self.index_tool_name(&schema.name, schema).await;
191    }
192
193    async fn index_tool_name(&self, name: &str, schema: &ToolSchema) {
194        let text = format!("{}: {}", schema.name, schema.description);
195        let service = get_embedding_service().await;
196        let service = service.lock().await;
197        if !service.is_available() {
198            return;
199        }
200        let Ok(vector) = service.embed(&text).await else {
201            return;
202        };
203        drop(service);
204        self.tool_vectors
205            .write()
206            .await
207            .insert(name.to_string(), vector);
208    }
209
210    pub async fn retrieve(&self, query: &str, k: usize) -> Vec<ToolSchema> {
211        if k == 0 {
212            return Vec::new();
213        }
214        let service = get_embedding_service().await;
215        let service = service.lock().await;
216        if !service.is_available() {
217            drop(service);
218            return self.list().await;
219        }
220        let Ok(query_vec) = service.embed(query).await else {
221            drop(service);
222            return self.list().await;
223        };
224        drop(service);
225
226        let vectors = self.tool_vectors.read().await;
227        if vectors.is_empty() {
228            drop(vectors);
229            return self.list().await;
230        }
231        let tools = self.tools.read().await;
232        let mut scored = vectors
233            .iter()
234            .map(|(name, vector)| {
235                (
236                    EmbeddingService::cosine_similarity(&query_vec, vector),
237                    name.clone(),
238                )
239            })
240            .collect::<Vec<_>>();
241        scored.sort_by(|a, b| {
242            b.0.partial_cmp(&a.0)
243                .unwrap_or(std::cmp::Ordering::Equal)
244                .then_with(|| a.1.cmp(&b.1))
245        });
246        let mut out = Vec::new();
247        let mut seen = HashSet::new();
248        for (_, name) in scored.into_iter().take(k) {
249            let Some(tool) = tools.get(&name) else {
250                continue;
251            };
252            let schema = tool.schema();
253            if seen.insert(schema.name.clone()) {
254                out.push(schema);
255            }
256        }
257        if out.is_empty() {
258            self.list().await
259        } else {
260            out
261        }
262    }
263
264    pub async fn mcp_server_names(&self) -> Vec<String> {
265        let mut names = HashSet::new();
266        for schema in self.list().await {
267            let mut parts = schema.name.split('.');
268            if parts.next() == Some("mcp") {
269                if let Some(server) = parts.next() {
270                    if !server.trim().is_empty() {
271                        names.insert(server.to_string());
272                    }
273                }
274            }
275        }
276        let mut sorted = names.into_iter().collect::<Vec<_>>();
277        sorted.sort();
278        sorted
279    }
280
281    pub async fn execute(&self, name: &str, args: Value) -> anyhow::Result<ToolResult> {
282        let tool = {
283            let tools = self.tools.read().await;
284            resolve_registered_tool(&tools, name)
285        };
286        let Some(tool) = tool else {
287            return Ok(ToolResult {
288                output: format!("Unknown tool: {name}"),
289                metadata: json!({}),
290            });
291        };
292        tool.execute(args).await
293    }
294
295    pub async fn execute_with_cancel(
296        &self,
297        name: &str,
298        args: Value,
299        cancel: CancellationToken,
300    ) -> anyhow::Result<ToolResult> {
301        let tool = {
302            let tools = self.tools.read().await;
303            resolve_registered_tool(&tools, name)
304        };
305        let Some(tool) = tool else {
306            return Ok(ToolResult {
307                output: format!("Unknown tool: {name}"),
308                metadata: json!({}),
309            });
310        };
311        tool.execute_with_cancel(args, cancel).await
312    }
313}
314
315#[derive(Clone, Debug, PartialEq, Eq)]
316enum SearchBackendKind {
317    Disabled,
318    Auto,
319    Tandem,
320    Searxng,
321    Exa,
322    Brave,
323}
324
325#[derive(Clone, Debug)]
326enum SearchBackend {
327    Disabled {
328        reason: String,
329    },
330    Auto {
331        backends: Vec<SearchBackend>,
332    },
333    Tandem {
334        base_url: String,
335        timeout_ms: u64,
336    },
337    Searxng {
338        base_url: String,
339        engines: Option<String>,
340        timeout_ms: u64,
341    },
342    Exa {
343        api_key: String,
344        timeout_ms: u64,
345    },
346    Brave {
347        api_key: String,
348        timeout_ms: u64,
349    },
350}
351
352impl SearchBackend {
353    fn from_env() -> Self {
354        let explicit = std::env::var("TANDEM_SEARCH_BACKEND")
355            .ok()
356            .map(|value| value.trim().to_ascii_lowercase())
357            .filter(|value| !value.is_empty());
358        let timeout_ms = search_backend_timeout_ms();
359
360        match explicit.as_deref() {
361            Some("none") | Some("disabled") => {
362                return Self::Disabled {
363                    reason: "TANDEM_SEARCH_BACKEND explicitly disabled websearch".to_string(),
364                };
365            }
366            Some("auto") => {
367                return search_backend_from_auto_env(timeout_ms);
368            }
369            Some("tandem") => {
370                return search_backend_from_tandem_env(timeout_ms, true);
371            }
372            Some("searxng") => {
373                return search_backend_from_searxng_env(timeout_ms).unwrap_or_else(|| {
374                    Self::Disabled {
375                        reason: "TANDEM_SEARCH_BACKEND=searxng but TANDEM_SEARXNG_URL is missing"
376                            .to_string(),
377                    }
378                });
379            }
380            Some("exa") => {
381                return search_backend_from_exa_env(timeout_ms).unwrap_or_else(|| Self::Disabled {
382                    reason:
383                        "TANDEM_SEARCH_BACKEND=exa but EXA_API_KEY/TANDEM_EXA_API_KEY is missing"
384                            .to_string(),
385                });
386            }
387            Some("brave") => {
388                return search_backend_from_brave_env(timeout_ms).unwrap_or_else(|| {
389                    Self::Disabled {
390                        reason:
391                            "TANDEM_SEARCH_BACKEND=brave but BRAVE_SEARCH_API_KEY/TANDEM_BRAVE_SEARCH_API_KEY is missing"
392                                .to_string(),
393                    }
394                });
395            }
396            Some(other) => {
397                return Self::Disabled {
398                    reason: format!(
399                        "TANDEM_SEARCH_BACKEND `{other}` is unsupported; expected auto, tandem, searxng, exa, brave, or none"
400                    ),
401                };
402            }
403            None => {}
404        }
405        search_backend_from_auto_env(timeout_ms)
406    }
407
408    fn is_enabled(&self) -> bool {
409        !matches!(self, Self::Disabled { .. })
410    }
411
412    fn kind(&self) -> SearchBackendKind {
413        match self {
414            Self::Disabled { .. } => SearchBackendKind::Disabled,
415            Self::Auto { .. } => SearchBackendKind::Auto,
416            Self::Tandem { .. } => SearchBackendKind::Tandem,
417            Self::Searxng { .. } => SearchBackendKind::Searxng,
418            Self::Exa { .. } => SearchBackendKind::Exa,
419            Self::Brave { .. } => SearchBackendKind::Brave,
420        }
421    }
422
423    fn name(&self) -> &'static str {
424        match self.kind() {
425            SearchBackendKind::Disabled => "disabled",
426            SearchBackendKind::Auto => "auto",
427            SearchBackendKind::Tandem => "tandem",
428            SearchBackendKind::Searxng => "searxng",
429            SearchBackendKind::Exa => "exa",
430            SearchBackendKind::Brave => "brave",
431        }
432    }
433
434    fn disabled_reason(&self) -> Option<&str> {
435        match self {
436            Self::Disabled { reason } => Some(reason.as_str()),
437            _ => None,
438        }
439    }
440
441    fn schema_description(&self) -> String {
442        match self {
443            Self::Auto { .. } => {
444                "Search web results using the configured search backends with automatic failover"
445                    .to_string()
446            }
447            Self::Tandem { .. } => {
448                "Search web results using Tandem's hosted search backend".to_string()
449            }
450            Self::Searxng { .. } => {
451                "Search web results using the configured SearxNG backend".to_string()
452            }
453            Self::Exa { .. } => "Search web results using the configured Exa backend".to_string(),
454            Self::Brave { .. } => {
455                "Search web results using the configured Brave Search backend".to_string()
456            }
457            Self::Disabled { .. } => {
458                "Search web results using the configured search backend".to_string()
459            }
460        }
461    }
462}
463
464fn has_nonempty_env_var(name: &str) -> bool {
465    std::env::var(name)
466        .ok()
467        .map(|value| !value.trim().is_empty())
468        .unwrap_or(false)
469}
470
471fn search_backend_timeout_ms() -> u64 {
472    std::env::var("TANDEM_SEARCH_TIMEOUT_MS")
473        .ok()
474        .and_then(|value| value.trim().parse::<u64>().ok())
475        .unwrap_or(10_000)
476        .clamp(1_000, 120_000)
477}
478
479fn search_backend_from_tandem_env(timeout_ms: u64, allow_default_url: bool) -> SearchBackend {
480    const DEFAULT_TANDEM_SEARCH_URL: &str = "https://search.tandem.ac";
481    let base_url = std::env::var("TANDEM_SEARCH_URL")
482        .ok()
483        .map(|value| value.trim().trim_end_matches('/').to_string())
484        .filter(|value| !value.is_empty())
485        .or_else(|| allow_default_url.then(|| DEFAULT_TANDEM_SEARCH_URL.to_string()));
486    match base_url {
487        Some(base_url) => SearchBackend::Tandem {
488            base_url,
489            timeout_ms,
490        },
491        None => SearchBackend::Disabled {
492            reason: "TANDEM_SEARCH_BACKEND=tandem but TANDEM_SEARCH_URL is missing".to_string(),
493        },
494    }
495}
496
497fn search_backend_from_searxng_env(timeout_ms: u64) -> Option<SearchBackend> {
498    let base_url = std::env::var("TANDEM_SEARXNG_URL").ok()?;
499    let base_url = base_url.trim().trim_end_matches('/').to_string();
500    if base_url.is_empty() {
501        return None;
502    }
503    let engines = std::env::var("TANDEM_SEARXNG_ENGINES")
504        .ok()
505        .map(|value| value.trim().to_string())
506        .filter(|value| !value.is_empty());
507    Some(SearchBackend::Searxng {
508        base_url,
509        engines,
510        timeout_ms,
511    })
512}
513
514fn search_backend_from_exa_env(timeout_ms: u64) -> Option<SearchBackend> {
515    let api_key = std::env::var("TANDEM_EXA_API_KEY")
516        .ok()
517        .or_else(|| std::env::var("TANDEM_EXA_SEARCH_API_KEY").ok())
518        .or_else(|| std::env::var("EXA_API_KEY").ok())?;
519    let api_key = api_key.trim().to_string();
520    if api_key.is_empty() {
521        return None;
522    }
523    Some(SearchBackend::Exa {
524        api_key,
525        timeout_ms,
526    })
527}
528
529fn search_backend_from_brave_env(timeout_ms: u64) -> Option<SearchBackend> {
530    let api_key = std::env::var("TANDEM_BRAVE_SEARCH_API_KEY")
531        .ok()
532        .or_else(|| std::env::var("BRAVE_SEARCH_API_KEY").ok())?;
533    let api_key = api_key.trim().to_string();
534    if api_key.is_empty() {
535        return None;
536    }
537    Some(SearchBackend::Brave {
538        api_key,
539        timeout_ms,
540    })
541}
542
543fn search_backend_auto_candidates(timeout_ms: u64) -> Vec<SearchBackend> {
544    let mut backends = Vec::new();
545
546    if has_nonempty_env_var("TANDEM_SEARCH_URL") {
547        backends.push(search_backend_from_tandem_env(timeout_ms, false));
548    }
549    if let Some(config) = search_backend_from_searxng_env(timeout_ms) {
550        backends.push(config);
551    }
552    if let Some(config) = search_backend_from_brave_env(timeout_ms) {
553        backends.push(config);
554    }
555    if let Some(config) = search_backend_from_exa_env(timeout_ms) {
556        backends.push(config);
557    }
558    if backends.is_empty() {
559        backends.push(search_backend_from_tandem_env(timeout_ms, true));
560    }
561
562    backends
563        .into_iter()
564        .filter(|backend| !matches!(backend, SearchBackend::Disabled { .. }))
565        .collect()
566}
567
568fn search_backend_from_auto_env(timeout_ms: u64) -> SearchBackend {
569    let backends = search_backend_auto_candidates(timeout_ms);
570    match backends.len() {
571        0 => SearchBackend::Disabled {
572            reason:
573                "set TANDEM_SEARCH_URL or configure tandem, searxng, brave, or exa to enable websearch"
574                    .to_string(),
575        },
576        1 => backends.into_iter().next().expect("single backend"),
577        _ => SearchBackend::Auto { backends },
578    }
579}
580
581#[derive(Clone, Debug, serde::Serialize)]
582struct SearchResultEntry {
583    title: String,
584    url: String,
585    snippet: String,
586    source: String,
587}
588
589fn canonical_tool_name(name: &str) -> String {
590    match name.trim().to_ascii_lowercase().replace('-', "_").as_str() {
591        "todowrite" | "update_todo_list" | "update_todos" => "todo_write".to_string(),
592        "run_command" | "shell" | "powershell" | "cmd" => "bash".to_string(),
593        other => other.to_string(),
594    }
595}
596
597fn strip_known_tool_namespace(name: &str) -> Option<String> {
598    const PREFIXES: [&str; 8] = [
599        "default_api:",
600        "default_api.",
601        "functions.",
602        "function.",
603        "tools.",
604        "tool.",
605        "builtin:",
606        "builtin.",
607    ];
608    for prefix in PREFIXES {
609        if let Some(rest) = name.strip_prefix(prefix) {
610            let trimmed = rest.trim();
611            if !trimmed.is_empty() {
612                return Some(trimmed.to_string());
613            }
614        }
615    }
616    None
617}
618
619fn resolve_registered_tool(
620    tools: &HashMap<String, Arc<dyn Tool>>,
621    requested_name: &str,
622) -> Option<Arc<dyn Tool>> {
623    let canonical = canonical_tool_name(requested_name);
624    if let Some(tool) = tools.get(&canonical) {
625        return Some(tool.clone());
626    }
627    if let Some(stripped) = strip_known_tool_namespace(&canonical) {
628        let stripped = canonical_tool_name(&stripped);
629        if let Some(tool) = tools.get(&stripped) {
630            return Some(tool.clone());
631        }
632    }
633    None
634}
635
636fn is_batch_wrapper_tool_name(name: &str) -> bool {
637    matches!(
638        canonical_tool_name(name).as_str(),
639        "default_api" | "default" | "api" | "function" | "functions" | "tool" | "tools"
640    )
641}
642
643fn non_empty_batch_str(value: Option<&Value>) -> Option<&str> {
644    trimmed_non_empty_str(value)
645}
646
647fn resolve_batch_call_tool_name(call: &Value) -> Option<String> {
648    let tool = non_empty_batch_str(call.get("tool"))
649        .or_else(|| {
650            call.get("tool")
651                .and_then(|v| v.as_object())
652                .and_then(|obj| non_empty_batch_str(obj.get("name")))
653        })
654        .or_else(|| {
655            call.get("function")
656                .and_then(|v| v.as_object())
657                .and_then(|obj| non_empty_batch_str(obj.get("tool")))
658        })
659        .or_else(|| {
660            call.get("function_call")
661                .and_then(|v| v.as_object())
662                .and_then(|obj| non_empty_batch_str(obj.get("tool")))
663        })
664        .or_else(|| {
665            call.get("call")
666                .and_then(|v| v.as_object())
667                .and_then(|obj| non_empty_batch_str(obj.get("tool")))
668        });
669    let name = non_empty_batch_str(call.get("name"))
670        .or_else(|| {
671            call.get("function")
672                .and_then(|v| v.as_object())
673                .and_then(|obj| non_empty_batch_str(obj.get("name")))
674        })
675        .or_else(|| {
676            call.get("function_call")
677                .and_then(|v| v.as_object())
678                .and_then(|obj| non_empty_batch_str(obj.get("name")))
679        })
680        .or_else(|| {
681            call.get("call")
682                .and_then(|v| v.as_object())
683                .and_then(|obj| non_empty_batch_str(obj.get("name")))
684        })
685        .or_else(|| {
686            call.get("tool")
687                .and_then(|v| v.as_object())
688                .and_then(|obj| non_empty_batch_str(obj.get("name")))
689        });
690
691    match (tool, name) {
692        (Some(t), Some(n)) => {
693            if is_batch_wrapper_tool_name(t) {
694                Some(n.to_string())
695            } else if let Some(stripped) = strip_known_tool_namespace(t) {
696                Some(stripped)
697            } else {
698                Some(t.to_string())
699            }
700        }
701        (Some(t), None) => {
702            if is_batch_wrapper_tool_name(t) {
703                None
704            } else if let Some(stripped) = strip_known_tool_namespace(t) {
705                Some(stripped)
706            } else {
707                Some(t.to_string())
708            }
709        }
710        (None, Some(n)) => Some(n.to_string()),
711        (None, None) => None,
712    }
713}
714
715impl Default for ToolRegistry {
716    fn default() -> Self {
717        Self::new()
718    }
719}
720
721#[derive(Debug, Clone, PartialEq, Eq)]
722pub struct ToolSchemaValidationError {
723    pub tool_name: String,
724    pub path: String,
725    pub reason: String,
726}
727
728impl std::fmt::Display for ToolSchemaValidationError {
729    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
730        write!(
731            f,
732            "invalid tool schema `{}` at `{}`: {}",
733            self.tool_name, self.path, self.reason
734        )
735    }
736}
737
738impl std::error::Error for ToolSchemaValidationError {}
739
740pub fn validate_tool_schemas(schemas: &[ToolSchema]) -> Result<(), ToolSchemaValidationError> {
741    for schema in schemas {
742        validate_schema_node(&schema.name, "$", &schema.input_schema)?;
743    }
744    Ok(())
745}
746
747fn validate_schema_node(
748    tool_name: &str,
749    path: &str,
750    value: &Value,
751) -> Result<(), ToolSchemaValidationError> {
752    let Some(obj) = value.as_object() else {
753        if let Some(arr) = value.as_array() {
754            for (idx, item) in arr.iter().enumerate() {
755                validate_schema_node(tool_name, &format!("{path}[{idx}]"), item)?;
756            }
757        }
758        return Ok(());
759    };
760
761    if obj.get("type").and_then(|t| t.as_str()) == Some("array") && !obj.contains_key("items") {
762        return Err(ToolSchemaValidationError {
763            tool_name: tool_name.to_string(),
764            path: path.to_string(),
765            reason: "array schema missing items".to_string(),
766        });
767    }
768
769    if let Some(items) = obj.get("items") {
770        validate_schema_node(tool_name, &format!("{path}.items"), items)?;
771    }
772    if let Some(props) = obj.get("properties").and_then(|v| v.as_object()) {
773        for (key, child) in props {
774            validate_schema_node(tool_name, &format!("{path}.properties.{key}"), child)?;
775        }
776    }
777    if let Some(additional_props) = obj.get("additionalProperties") {
778        validate_schema_node(
779            tool_name,
780            &format!("{path}.additionalProperties"),
781            additional_props,
782        )?;
783    }
784    if let Some(one_of) = obj.get("oneOf").and_then(|v| v.as_array()) {
785        for (idx, child) in one_of.iter().enumerate() {
786            validate_schema_node(tool_name, &format!("{path}.oneOf[{idx}]"), child)?;
787        }
788    }
789    if let Some(any_of) = obj.get("anyOf").and_then(|v| v.as_array()) {
790        for (idx, child) in any_of.iter().enumerate() {
791            validate_schema_node(tool_name, &format!("{path}.anyOf[{idx}]"), child)?;
792        }
793    }
794    if let Some(all_of) = obj.get("allOf").and_then(|v| v.as_array()) {
795        for (idx, child) in all_of.iter().enumerate() {
796            validate_schema_node(tool_name, &format!("{path}.allOf[{idx}]"), child)?;
797        }
798    }
799
800    Ok(())
801}
802
803fn workspace_root_from_args(args: &Value) -> Option<PathBuf> {
804    args.get("__workspace_root")
805        .and_then(|v| v.as_str())
806        .map(str::trim)
807        .filter(|s| !s.is_empty())
808        .map(PathBuf::from)
809}
810
811fn effective_cwd_from_args(args: &Value) -> PathBuf {
812    args.get("__effective_cwd")
813        .and_then(|v| v.as_str())
814        .map(str::trim)
815        .filter(|s| !s.is_empty())
816        .map(PathBuf::from)
817        .or_else(|| workspace_root_from_args(args))
818        .or_else(|| std::env::current_dir().ok())
819        .unwrap_or_else(|| PathBuf::from("."))
820}
821
822fn normalize_path_for_compare(path: &Path) -> PathBuf {
823    let mut normalized = PathBuf::new();
824    for component in path.components() {
825        match component {
826            std::path::Component::CurDir => {}
827            std::path::Component::ParentDir => {
828                let _ = normalized.pop();
829            }
830            other => normalized.push(other.as_os_str()),
831        }
832    }
833    normalized
834}
835
836fn normalize_existing_or_lexical(path: &Path) -> PathBuf {
837    path.canonicalize()
838        .unwrap_or_else(|_| normalize_path_for_compare(path))
839}
840
841fn is_within_workspace_root(path: &Path, workspace_root: &Path) -> bool {
842    // First compare lexical-normalized paths so non-existent target files under symlinked
843    // workspace roots still pass containment checks.
844    let candidate_lexical = normalize_path_for_compare(path);
845    let root_lexical = normalize_path_for_compare(workspace_root);
846    if candidate_lexical.starts_with(&root_lexical) {
847        return true;
848    }
849
850    // Fallback to canonical comparison when available (best for existing paths and symlink
851    // resolution consistency).
852    let candidate = normalize_existing_or_lexical(path);
853    let root = normalize_existing_or_lexical(workspace_root);
854    candidate.starts_with(root)
855}
856
857fn resolve_tool_path(path: &str, args: &Value) -> Option<PathBuf> {
858    let trimmed = path.trim();
859    if trimmed.is_empty() {
860        return None;
861    }
862    if trimmed == "." || trimmed == "./" || trimmed == ".\\" {
863        let cwd = effective_cwd_from_args(args);
864        if let Some(workspace_root) = workspace_root_from_args(args) {
865            if !is_within_workspace_root(&cwd, &workspace_root) {
866                return None;
867            }
868        }
869        return Some(cwd);
870    }
871    if is_root_only_path_token(trimmed) || is_malformed_tool_path_token(trimmed) {
872        return None;
873    }
874    let raw = Path::new(trimmed);
875    if !raw.is_absolute()
876        && raw
877            .components()
878            .any(|c| matches!(c, std::path::Component::ParentDir))
879    {
880        return None;
881    }
882
883    let resolved = if raw.is_absolute() {
884        raw.to_path_buf()
885    } else {
886        effective_cwd_from_args(args).join(raw)
887    };
888
889    if let Some(workspace_root) = workspace_root_from_args(args) {
890        if !is_within_workspace_root(&resolved, &workspace_root) {
891            return None;
892        }
893    } else if raw.is_absolute() {
894        return None;
895    }
896
897    Some(resolved)
898}
899
900fn resolve_walk_root(path: &str, args: &Value) -> Option<PathBuf> {
901    let trimmed = path.trim();
902    if trimmed.is_empty() {
903        return None;
904    }
905    if is_malformed_tool_path_token(trimmed) {
906        return None;
907    }
908    resolve_tool_path(path, args)
909}
910
911fn resolve_read_path_fallback(path: &str, args: &Value) -> Option<PathBuf> {
912    let token = path.trim();
913    if token.is_empty() {
914        return None;
915    }
916    let raw = Path::new(token);
917    if raw.is_absolute() || token.contains('\\') || token.contains('/') || raw.extension().is_none()
918    {
919        return None;
920    }
921
922    let workspace_root = workspace_root_from_args(args);
923    let effective_cwd = effective_cwd_from_args(args);
924    let mut search_roots = vec![effective_cwd.clone()];
925    if let Some(root) = workspace_root.as_ref() {
926        if *root != effective_cwd {
927            search_roots.push(root.clone());
928        }
929    }
930
931    let token_lower = token.to_lowercase();
932    for root in search_roots {
933        if let Some(workspace_root) = workspace_root.as_ref() {
934            if !is_within_workspace_root(&root, workspace_root) {
935                continue;
936            }
937        }
938
939        let mut matches = Vec::new();
940        for entry in WalkBuilder::new(&root).build().flatten() {
941            if !entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
942                continue;
943            }
944            let candidate = entry.path();
945            if let Some(workspace_root) = workspace_root.as_ref() {
946                if !is_within_workspace_root(candidate, workspace_root) {
947                    continue;
948                }
949            }
950            let file_name = candidate
951                .file_name()
952                .and_then(|name| name.to_str())
953                .unwrap_or_default()
954                .to_lowercase();
955            if file_name == token_lower || file_name.ends_with(&token_lower) {
956                matches.push(candidate.to_path_buf());
957                if matches.len() > 8 {
958                    break;
959                }
960            }
961        }
962
963        if matches.len() == 1 {
964            return matches.into_iter().next();
965        }
966    }
967
968    None
969}
970
971fn sandbox_path_denied_result(path: &str, args: &Value) -> ToolResult {
972    let requested = path.trim();
973    let workspace_root = workspace_root_from_args(args);
974    let effective_cwd = effective_cwd_from_args(args);
975    let suggested_path = Path::new(requested)
976        .file_name()
977        .filter(|name| !name.is_empty())
978        .map(PathBuf::from)
979        .map(|name| {
980            if let Some(root) = workspace_root.as_ref() {
981                if is_within_workspace_root(&effective_cwd, root) {
982                    effective_cwd.join(name)
983                } else {
984                    root.join(name)
985                }
986            } else {
987                effective_cwd.join(name)
988            }
989        });
990
991    let mut output =
992        "path denied by sandbox policy (outside workspace root, malformed path, or missing workspace context)"
993            .to_string();
994    if let Some(suggested) = suggested_path.as_ref() {
995        output.push_str(&format!(
996            "\nrequested: {}\ntry: {}",
997            requested,
998            suggested.to_string_lossy()
999        ));
1000    }
1001    if let Some(root) = workspace_root.as_ref() {
1002        output.push_str(&format!("\nworkspace_root: {}", root.to_string_lossy()));
1003    }
1004
1005    ToolResult {
1006        output,
1007        metadata: json!({
1008            "path": path,
1009            "workspace_root": workspace_root.map(|p| p.to_string_lossy().to_string()),
1010            "effective_cwd": effective_cwd.to_string_lossy().to_string(),
1011            "suggested_path": suggested_path.map(|p| p.to_string_lossy().to_string())
1012        }),
1013    }
1014}
1015
1016fn is_root_only_path_token(path: &str) -> bool {
1017    if matches!(path, "/" | "\\" | "." | ".." | "~") {
1018        return true;
1019    }
1020    let bytes = path.as_bytes();
1021    if bytes.len() == 2 && bytes[1] == b':' && (bytes[0] as char).is_ascii_alphabetic() {
1022        return true;
1023    }
1024    if bytes.len() == 3
1025        && bytes[1] == b':'
1026        && (bytes[0] as char).is_ascii_alphabetic()
1027        && (bytes[2] == b'\\' || bytes[2] == b'/')
1028    {
1029        return true;
1030    }
1031    false
1032}
1033
1034fn is_malformed_tool_path_token(path: &str) -> bool {
1035    let lower = path.to_ascii_lowercase();
1036    if lower.contains("<tool_call")
1037        || lower.contains("</tool_call")
1038        || lower.contains("<function=")
1039        || lower.contains("<parameter=")
1040        || lower.contains("</function>")
1041        || lower.contains("</parameter>")
1042    {
1043        return true;
1044    }
1045    if path.contains('\n') || path.contains('\r') {
1046        return true;
1047    }
1048    if path.contains('*') {
1049        return true;
1050    }
1051    // Allow Windows verbatim prefixes (\\?\C:\... / //?/C:/... / \\?\UNC\...).
1052    // These can appear in tool outputs and should not be treated as malformed.
1053    if path.contains('?') {
1054        let trimmed = path.trim();
1055        let is_windows_verbatim = trimmed.starts_with("\\\\?\\") || trimmed.starts_with("//?/");
1056        if !is_windows_verbatim {
1057            return true;
1058        }
1059    }
1060    false
1061}
1062
1063fn is_malformed_tool_pattern_token(pattern: &str) -> bool {
1064    let lower = pattern.to_ascii_lowercase();
1065    if lower.contains("<tool_call")
1066        || lower.contains("</tool_call")
1067        || lower.contains("<function=")
1068        || lower.contains("<parameter=")
1069        || lower.contains("</function>")
1070        || lower.contains("</parameter>")
1071    {
1072        return true;
1073    }
1074    if pattern.contains('\n') || pattern.contains('\r') {
1075        return true;
1076    }
1077    false
1078}
1079
1080// Builtin shell/read tool implementations live in `builtin_tools`.
1081
1082struct WriteTool;
1083#[async_trait]
1084impl Tool for WriteTool {
1085    fn schema(&self) -> ToolSchema {
1086        tool_schema_with_capabilities(
1087            "write",
1088            "Write file contents",
1089            json!({
1090                "type":"object",
1091                "properties":{
1092                    "path":{"type":"string"},
1093                    "content":{"type":"string"},
1094                    "allow_empty":{"type":"boolean"}
1095                },
1096                "required":["path", "content"]
1097            }),
1098            workspace_write_capabilities(),
1099        )
1100    }
1101    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
1102        let path = args["path"].as_str().unwrap_or("").trim();
1103        let content = args["content"].as_str();
1104        let allow_empty = args
1105            .get("allow_empty")
1106            .and_then(|v| v.as_bool())
1107            .unwrap_or(false);
1108        let Some(path_buf) = resolve_tool_path(path, &args) else {
1109            return Ok(sandbox_path_denied_result(path, &args));
1110        };
1111        let Some(content) = content else {
1112            return Ok(ToolResult {
1113                output: "write requires `content`".to_string(),
1114                metadata: json!({"ok": false, "reason": "missing_content", "path": path}),
1115            });
1116        };
1117        if content.is_empty() && !allow_empty {
1118            return Ok(ToolResult {
1119                output: "write requires non-empty `content` (or set allow_empty=true)".to_string(),
1120                metadata: json!({"ok": false, "reason": "empty_content", "path": path}),
1121            });
1122        }
1123        if let Some(parent) = path_buf.parent() {
1124            if !parent.as_os_str().is_empty() {
1125                fs::create_dir_all(parent).await?;
1126            }
1127        }
1128        fs::write(&path_buf, content).await?;
1129        Ok(ToolResult {
1130            output: "ok".to_string(),
1131            metadata: json!({"path": path_buf.to_string_lossy()}),
1132        })
1133    }
1134}
1135
1136struct EditTool;
1137#[async_trait]
1138impl Tool for EditTool {
1139    fn schema(&self) -> ToolSchema {
1140        tool_schema_with_capabilities(
1141            "edit",
1142            "String replacement edit",
1143            json!({
1144                "type":"object",
1145                "properties":{
1146                    "path":{"type":"string"},
1147                    "old":{"type":"string"},
1148                    "new":{"type":"string"}
1149                },
1150                "required":["path", "old", "new"]
1151            }),
1152            workspace_write_capabilities(),
1153        )
1154    }
1155    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
1156        let path = args["path"].as_str().unwrap_or("");
1157        let old = args["old"].as_str().unwrap_or("");
1158        let new = args["new"].as_str().unwrap_or("");
1159        let Some(path_buf) = resolve_tool_path(path, &args) else {
1160            return Ok(sandbox_path_denied_result(path, &args));
1161        };
1162        let content = fs::read_to_string(&path_buf).await.unwrap_or_default();
1163        let updated = content.replace(old, new);
1164        fs::write(&path_buf, updated).await?;
1165        Ok(ToolResult {
1166            output: "ok".to_string(),
1167            metadata: json!({"path": path_buf.to_string_lossy()}),
1168        })
1169    }
1170}
1171
1172struct GlobTool;
1173
1174fn normalize_recursive_wildcard_pattern(pattern: &str) -> Option<String> {
1175    let mut changed = false;
1176    let normalized = pattern
1177        .split('/')
1178        .flat_map(|component| {
1179            if let Some(tail) = component.strip_prefix("**") {
1180                if !tail.is_empty() {
1181                    changed = true;
1182                    let normalized_tail = if tail.starts_with('.') || tail.starts_with('{') {
1183                        format!("*{tail}")
1184                    } else {
1185                        tail.to_string()
1186                    };
1187                    return vec!["**".to_string(), normalized_tail];
1188                }
1189            }
1190            vec![component.to_string()]
1191        })
1192        .collect::<Vec<_>>()
1193        .join("/");
1194    changed.then_some(normalized)
1195}
1196
1197#[async_trait]
1198impl Tool for GlobTool {
1199    fn schema(&self) -> ToolSchema {
1200        tool_schema_with_capabilities(
1201            "glob",
1202            "Find files by glob",
1203            json!({"type":"object","properties":{"pattern":{"type":"string"}}}),
1204            workspace_search_capabilities(),
1205        )
1206    }
1207    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
1208        let pattern = args["pattern"].as_str().unwrap_or("*");
1209        if pattern.contains("..") {
1210            return Ok(ToolResult {
1211                output: "pattern denied by sandbox policy".to_string(),
1212                metadata: json!({"pattern": pattern}),
1213            });
1214        }
1215        if is_malformed_tool_pattern_token(pattern) {
1216            return Ok(ToolResult {
1217                output: "pattern denied by sandbox policy".to_string(),
1218                metadata: json!({"pattern": pattern}),
1219            });
1220        }
1221        let workspace_root = workspace_root_from_args(&args);
1222        let effective_cwd = effective_cwd_from_args(&args);
1223        let scoped_pattern = if Path::new(pattern).is_absolute() {
1224            pattern.to_string()
1225        } else {
1226            effective_cwd.join(pattern).to_string_lossy().to_string()
1227        };
1228        let mut files = Vec::new();
1229        let mut effective_pattern = scoped_pattern.clone();
1230        let paths = match glob::glob(&scoped_pattern) {
1231            Ok(paths) => paths,
1232            Err(err) => {
1233                if let Some(normalized) = normalize_recursive_wildcard_pattern(&scoped_pattern) {
1234                    if let Ok(paths) = glob::glob(&normalized) {
1235                        effective_pattern = normalized;
1236                        paths
1237                    } else {
1238                        return Err(err.into());
1239                    }
1240                } else {
1241                    return Err(err.into());
1242                }
1243            }
1244        };
1245        for path in paths.flatten() {
1246            if is_discovery_ignored_path(&path) {
1247                continue;
1248            }
1249            if let Some(root) = workspace_root.as_ref() {
1250                if !is_within_workspace_root(&path, root) {
1251                    continue;
1252                }
1253            }
1254            files.push(path.display().to_string());
1255            if files.len() >= 100 {
1256                break;
1257            }
1258        }
1259        Ok(ToolResult {
1260            output: files.join("\n"),
1261            metadata: json!({
1262                "count": files.len(),
1263                "effective_cwd": effective_cwd,
1264                "workspace_root": workspace_root,
1265                "pattern": pattern,
1266                "effective_pattern": effective_pattern
1267            }),
1268        })
1269    }
1270}
1271
1272fn is_discovery_ignored_path(path: &Path) -> bool {
1273    let components: Vec<_> = path.components().collect();
1274    for (idx, component) in components.iter().enumerate() {
1275        if component.as_os_str() == ".tandem" {
1276            let next = components
1277                .get(idx + 1)
1278                .map(|component| component.as_os_str());
1279            return next != Some(std::ffi::OsStr::new("artifacts"));
1280        }
1281    }
1282    false
1283}
1284
1285struct GrepTool;
1286#[async_trait]
1287impl Tool for GrepTool {
1288    fn schema(&self) -> ToolSchema {
1289        tool_schema_with_capabilities(
1290            "grep",
1291            "Regex search in files",
1292            json!({"type":"object","properties":{"pattern":{"type":"string"},"path":{"type":"string"}}}),
1293            workspace_search_capabilities(),
1294        )
1295    }
1296    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
1297        let pattern = args["pattern"].as_str().unwrap_or("");
1298        let root = args["path"].as_str().unwrap_or(".");
1299        let Some(root_path) = resolve_walk_root(root, &args) else {
1300            return Ok(sandbox_path_denied_result(root, &args));
1301        };
1302        let regex = Regex::new(pattern)?;
1303        let mut out = Vec::new();
1304        for entry in WalkBuilder::new(&root_path).build().flatten() {
1305            if !entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
1306                continue;
1307            }
1308            let path = entry.path();
1309            if is_discovery_ignored_path(path) {
1310                continue;
1311            }
1312            if let Ok(content) = fs::read_to_string(path).await {
1313                for (idx, line) in content.lines().enumerate() {
1314                    if regex.is_match(line) {
1315                        out.push(format!("{}:{}:{}", path.display(), idx + 1, line));
1316                        if out.len() >= 100 {
1317                            break;
1318                        }
1319                    }
1320                }
1321            }
1322            if out.len() >= 100 {
1323                break;
1324            }
1325        }
1326        Ok(ToolResult {
1327            output: out.join("\n"),
1328            metadata: json!({"count": out.len(), "path": root_path.to_string_lossy()}),
1329        })
1330    }
1331}
1332
1333struct WebFetchTool;
1334#[async_trait]
1335impl Tool for WebFetchTool {
1336    fn schema(&self) -> ToolSchema {
1337        tool_schema_with_capabilities(
1338            "webfetch",
1339            "Fetch URL content and return a structured markdown document",
1340            json!({
1341                "type":"object",
1342                "properties":{
1343                    "url":{"type":"string"},
1344                    "mode":{"type":"string"},
1345                    "return":{"type":"string"},
1346                    "max_bytes":{"type":"integer"},
1347                    "timeout_ms":{"type":"integer"},
1348                    "max_redirects":{"type":"integer"}
1349                }
1350            }),
1351            web_fetch_capabilities(),
1352        )
1353    }
1354    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
1355        let url = args["url"].as_str().unwrap_or("").trim();
1356        if url.is_empty() {
1357            return Ok(ToolResult {
1358                output: "url is required".to_string(),
1359                metadata: json!({"url": url}),
1360            });
1361        }
1362        let mode = args["mode"].as_str().unwrap_or("auto");
1363        let return_mode = args["return"].as_str().unwrap_or("markdown");
1364        let timeout_ms = args["timeout_ms"]
1365            .as_u64()
1366            .unwrap_or(15_000)
1367            .clamp(1_000, 120_000);
1368        let max_bytes = args["max_bytes"].as_u64().unwrap_or(500_000).min(5_000_000) as usize;
1369        let max_redirects = args["max_redirects"].as_u64().unwrap_or(5).min(20) as usize;
1370
1371        let started = std::time::Instant::now();
1372        let fetched = fetch_url_with_limits(url, timeout_ms, max_bytes, max_redirects).await?;
1373        let raw = String::from_utf8_lossy(&fetched.buffer).to_string();
1374
1375        let cleaned = strip_html_noise(&raw);
1376        let title = extract_title(&cleaned).unwrap_or_default();
1377        let canonical = extract_canonical(&cleaned);
1378        let links = extract_links(&cleaned);
1379
1380        let markdown = if fetched.content_type.contains("html") || fetched.content_type.is_empty() {
1381            html2md::parse_html(&cleaned)
1382        } else {
1383            cleaned.clone()
1384        };
1385        let text = markdown_to_text(&markdown);
1386
1387        let markdown_out = if return_mode == "text" {
1388            String::new()
1389        } else {
1390            markdown
1391        };
1392        let text_out = if return_mode == "markdown" {
1393            String::new()
1394        } else {
1395            text
1396        };
1397
1398        let raw_chars = raw.chars().count();
1399        let markdown_chars = markdown_out.chars().count();
1400        let reduction_pct = if raw_chars == 0 {
1401            0.0
1402        } else {
1403            ((raw_chars.saturating_sub(markdown_chars)) as f64 / raw_chars as f64) * 100.0
1404        };
1405
1406        let output = json!({
1407            "url": url,
1408            "final_url": fetched.final_url,
1409            "title": title,
1410            "content_type": fetched.content_type,
1411            "markdown": markdown_out,
1412            "text": text_out,
1413            "links": links,
1414            "meta": {
1415                "canonical": canonical,
1416                "mode": mode
1417            },
1418            "stats": {
1419                "bytes_in": fetched.buffer.len(),
1420                "bytes_out": markdown_chars,
1421                "raw_chars": raw_chars,
1422                "markdown_chars": markdown_chars,
1423                "reduction_pct": reduction_pct,
1424                "elapsed_ms": started.elapsed().as_millis(),
1425                "truncated": fetched.truncated
1426            }
1427        });
1428
1429        Ok(ToolResult {
1430            output: serde_json::to_string_pretty(&output)?,
1431            metadata: json!({
1432                "url": url,
1433                "final_url": fetched.final_url,
1434                "content_type": fetched.content_type,
1435                "truncated": fetched.truncated
1436            }),
1437        })
1438    }
1439}
1440
1441struct WebFetchHtmlTool;
1442#[async_trait]
1443impl Tool for WebFetchHtmlTool {
1444    fn schema(&self) -> ToolSchema {
1445        tool_schema_with_capabilities(
1446            "webfetch_html",
1447            "Fetch URL and return raw HTML content",
1448            json!({
1449                "type":"object",
1450                "properties":{
1451                    "url":{"type":"string"},
1452                    "max_bytes":{"type":"integer"},
1453                    "timeout_ms":{"type":"integer"},
1454                    "max_redirects":{"type":"integer"}
1455                }
1456            }),
1457            web_fetch_capabilities(),
1458        )
1459    }
1460    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
1461        let url = args["url"].as_str().unwrap_or("").trim();
1462        if url.is_empty() {
1463            return Ok(ToolResult {
1464                output: "url is required".to_string(),
1465                metadata: json!({"url": url}),
1466            });
1467        }
1468        let timeout_ms = args["timeout_ms"]
1469            .as_u64()
1470            .unwrap_or(15_000)
1471            .clamp(1_000, 120_000);
1472        let max_bytes = args["max_bytes"].as_u64().unwrap_or(500_000).min(5_000_000) as usize;
1473        let max_redirects = args["max_redirects"].as_u64().unwrap_or(5).min(20) as usize;
1474
1475        let started = std::time::Instant::now();
1476        let fetched = fetch_url_with_limits(url, timeout_ms, max_bytes, max_redirects).await?;
1477        let output = String::from_utf8_lossy(&fetched.buffer).to_string();
1478
1479        Ok(ToolResult {
1480            output,
1481            metadata: json!({
1482                "url": url,
1483                "final_url": fetched.final_url,
1484                "content_type": fetched.content_type,
1485                "truncated": fetched.truncated,
1486                "bytes_in": fetched.buffer.len(),
1487                "elapsed_ms": started.elapsed().as_millis()
1488            }),
1489        })
1490    }
1491}
1492
1493struct FetchedResponse {
1494    final_url: String,
1495    content_type: String,
1496    buffer: Vec<u8>,
1497    truncated: bool,
1498}
1499
1500async fn fetch_url_with_limits(
1501    url: &str,
1502    timeout_ms: u64,
1503    max_bytes: usize,
1504    max_redirects: usize,
1505) -> anyhow::Result<FetchedResponse> {
1506    let client = reqwest::Client::builder()
1507        .timeout(std::time::Duration::from_millis(timeout_ms))
1508        .redirect(reqwest::redirect::Policy::limited(max_redirects))
1509        .build()?;
1510
1511    let res = client
1512        .get(url)
1513        .header(
1514            "Accept",
1515            "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
1516        )
1517        .send()
1518        .await?;
1519    let final_url = res.url().to_string();
1520    let content_type = res
1521        .headers()
1522        .get("content-type")
1523        .and_then(|v| v.to_str().ok())
1524        .unwrap_or("")
1525        .to_string();
1526
1527    let mut stream = res.bytes_stream();
1528    let mut buffer: Vec<u8> = Vec::new();
1529    let mut truncated = false;
1530    while let Some(chunk) = stream.next().await {
1531        let chunk = chunk?;
1532        if buffer.len() + chunk.len() > max_bytes {
1533            let remaining = max_bytes.saturating_sub(buffer.len());
1534            buffer.extend_from_slice(&chunk[..remaining]);
1535            truncated = true;
1536            break;
1537        }
1538        buffer.extend_from_slice(&chunk);
1539    }
1540
1541    Ok(FetchedResponse {
1542        final_url,
1543        content_type,
1544        buffer,
1545        truncated,
1546    })
1547}
1548
1549fn strip_html_noise(input: &str) -> String {
1550    let script_re = Regex::new(r"(?is)<script[^>]*>.*?</script>").unwrap();
1551    let style_re = Regex::new(r"(?is)<style[^>]*>.*?</style>").unwrap();
1552    let noscript_re = Regex::new(r"(?is)<noscript[^>]*>.*?</noscript>").unwrap();
1553    let cleaned = script_re.replace_all(input, "");
1554    let cleaned = style_re.replace_all(&cleaned, "");
1555    let cleaned = noscript_re.replace_all(&cleaned, "");
1556    cleaned.to_string()
1557}
1558
1559fn extract_title(input: &str) -> Option<String> {
1560    let title_re = Regex::new(r"(?is)<title[^>]*>(.*?)</title>").ok()?;
1561    let caps = title_re.captures(input)?;
1562    let raw = caps.get(1)?.as_str();
1563    let tag_re = Regex::new(r"(?is)<[^>]+>").ok()?;
1564    Some(tag_re.replace_all(raw, "").trim().to_string())
1565}
1566
1567fn extract_canonical(input: &str) -> Option<String> {
1568    let canon_re =
1569        Regex::new(r#"(?is)<link[^>]*rel=["']canonical["'][^>]*href=["']([^"']+)["'][^>]*>"#)
1570            .ok()?;
1571    let caps = canon_re.captures(input)?;
1572    Some(caps.get(1)?.as_str().trim().to_string())
1573}
1574
1575fn extract_links(input: &str) -> Vec<Value> {
1576    let link_re = Regex::new(r#"(?is)<a[^>]*href=["']([^"']+)["'][^>]*>(.*?)</a>"#).unwrap();
1577    let tag_re = Regex::new(r"(?is)<[^>]+>").unwrap();
1578    let mut out = Vec::new();
1579    for caps in link_re.captures_iter(input).take(200) {
1580        let href = caps.get(1).map(|m| m.as_str()).unwrap_or("").trim();
1581        let raw_text = caps.get(2).map(|m| m.as_str()).unwrap_or("");
1582        let text = tag_re.replace_all(raw_text, "");
1583        if !href.is_empty() {
1584            out.push(json!({
1585                "text": text.trim(),
1586                "href": href
1587            }));
1588        }
1589    }
1590    out
1591}
1592
1593fn markdown_to_text(input: &str) -> String {
1594    let code_block_re = Regex::new(r"(?s)```.*?```").unwrap();
1595    let inline_code_re = Regex::new(r"`[^`]*`").unwrap();
1596    let link_re = Regex::new(r"\[([^\]]+)\]\([^)]+\)").unwrap();
1597    let emphasis_re = Regex::new(r"[*_~]+").unwrap();
1598    let cleaned = code_block_re.replace_all(input, "");
1599    let cleaned = inline_code_re.replace_all(&cleaned, "");
1600    let cleaned = link_re.replace_all(&cleaned, "$1");
1601    let cleaned = emphasis_re.replace_all(&cleaned, "");
1602    let cleaned = cleaned.replace('#', "");
1603    let whitespace_re = Regex::new(r"\n{3,}").unwrap();
1604    let cleaned = whitespace_re.replace_all(&cleaned, "\n\n");
1605    cleaned.trim().to_string()
1606}
1607
1608struct McpDebugTool;
1609#[async_trait]
1610impl Tool for McpDebugTool {
1611    fn schema(&self) -> ToolSchema {
1612        tool_schema(
1613            "mcp_debug",
1614            "Call an MCP tool and return the raw response",
1615            json!({
1616                "type":"object",
1617                "properties":{
1618                    "url":{"type":"string"},
1619                    "tool":{"type":"string"},
1620                    "args":{"type":"object"},
1621                    "headers":{"type":"object"},
1622                    "timeout_ms":{"type":"integer"},
1623                    "max_bytes":{"type":"integer"}
1624                }
1625            }),
1626        )
1627    }
1628    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
1629        let url = args["url"].as_str().unwrap_or("").trim();
1630        let tool = args["tool"].as_str().unwrap_or("").trim();
1631        if url.is_empty() || tool.is_empty() {
1632            return Ok(ToolResult {
1633                output: "url and tool are required".to_string(),
1634                metadata: json!({"url": url, "tool": tool}),
1635            });
1636        }
1637        let timeout_ms = args["timeout_ms"]
1638            .as_u64()
1639            .unwrap_or(15_000)
1640            .clamp(1_000, 120_000);
1641        let max_bytes = args["max_bytes"].as_u64().unwrap_or(200_000).min(5_000_000) as usize;
1642        let request_args = args.get("args").cloned().unwrap_or_else(|| json!({}));
1643
1644        #[derive(serde::Serialize)]
1645        struct McpCallRequest {
1646            jsonrpc: String,
1647            id: u32,
1648            method: String,
1649            params: McpCallParams,
1650        }
1651
1652        #[derive(serde::Serialize)]
1653        struct McpCallParams {
1654            name: String,
1655            arguments: Value,
1656        }
1657
1658        let request = McpCallRequest {
1659            jsonrpc: "2.0".to_string(),
1660            id: 1,
1661            method: "tools/call".to_string(),
1662            params: McpCallParams {
1663                name: tool.to_string(),
1664                arguments: request_args,
1665            },
1666        };
1667
1668        let client = reqwest::Client::builder()
1669            .timeout(std::time::Duration::from_millis(timeout_ms))
1670            .build()?;
1671
1672        let mut builder = client
1673            .post(url)
1674            .header("Content-Type", "application/json")
1675            .header("Accept", "application/json, text/event-stream");
1676
1677        if let Some(headers) = args.get("headers").and_then(|v| v.as_object()) {
1678            for (key, value) in headers {
1679                if let Some(value) = value.as_str() {
1680                    builder = builder.header(key, value);
1681                }
1682            }
1683        }
1684
1685        let res = builder.json(&request).send().await?;
1686        let status = res.status().as_u16();
1687
1688        let mut response_headers = serde_json::Map::new();
1689        for (key, value) in res.headers().iter() {
1690            if let Ok(value) = value.to_str() {
1691                response_headers.insert(key.to_string(), Value::String(value.to_string()));
1692            }
1693        }
1694
1695        let mut stream = res.bytes_stream();
1696        let mut buffer: Vec<u8> = Vec::new();
1697        let mut truncated = false;
1698
1699        while let Some(chunk) = stream.next().await {
1700            let chunk = chunk?;
1701            if buffer.len() + chunk.len() > max_bytes {
1702                let remaining = max_bytes.saturating_sub(buffer.len());
1703                buffer.extend_from_slice(&chunk[..remaining]);
1704                truncated = true;
1705                break;
1706            }
1707            buffer.extend_from_slice(&chunk);
1708        }
1709
1710        let body = String::from_utf8_lossy(&buffer).to_string();
1711        let output = json!({
1712            "status": status,
1713            "headers": response_headers,
1714            "body": body,
1715            "truncated": truncated,
1716            "bytes": buffer.len()
1717        });
1718
1719        Ok(ToolResult {
1720            output: serde_json::to_string_pretty(&output)?,
1721            metadata: json!({
1722                "url": url,
1723                "tool": tool,
1724                "timeout_ms": timeout_ms,
1725                "max_bytes": max_bytes
1726            }),
1727        })
1728    }
1729}
1730
1731struct WebSearchTool {
1732    backend: SearchBackend,
1733}
1734#[async_trait]
1735impl Tool for WebSearchTool {
1736    fn schema(&self) -> ToolSchema {
1737        tool_schema_with_capabilities(
1738            "websearch",
1739            self.backend.schema_description(),
1740            json!({
1741                "type": "object",
1742                "properties": {
1743                    "query": { "type": "string" },
1744                    "limit": { "type": "integer" }
1745                },
1746                "required": ["query"]
1747            }),
1748            web_fetch_capabilities(),
1749        )
1750    }
1751    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
1752        let query = extract_websearch_query(&args).unwrap_or_default();
1753        let query_source = args
1754            .get("__query_source")
1755            .and_then(|v| v.as_str())
1756            .map(|s| s.to_string())
1757            .unwrap_or_else(|| {
1758                if query.is_empty() {
1759                    "missing".to_string()
1760                } else {
1761                    "tool_args".to_string()
1762                }
1763            });
1764        let query_hash = if query.is_empty() {
1765            None
1766        } else {
1767            Some(stable_hash(&query))
1768        };
1769        if query.is_empty() {
1770            tracing::warn!("WebSearchTool missing query. Args: {}", args);
1771            return Ok(ToolResult {
1772                output: format!("missing query. Received args: {}", args),
1773                metadata: json!({
1774                    "count": 0,
1775                    "error": "missing_query",
1776                    "query_source": query_source,
1777                    "query_hash": query_hash,
1778                    "loop_guard_triggered": false
1779                }),
1780            });
1781        }
1782        let num_results = extract_websearch_limit(&args).unwrap_or(8);
1783        let outcome = execute_websearch_backend(&self.backend, &query, num_results).await?;
1784        let configured_backend = self.backend.name();
1785        let backend_used = outcome
1786            .backend_used
1787            .as_deref()
1788            .unwrap_or(configured_backend);
1789        let mut metadata = json!({
1790            "query": query,
1791            "query_source": query_source,
1792            "query_hash": query_hash,
1793            "backend": backend_used,
1794            "configured_backend": configured_backend,
1795            "attempted_backends": outcome.attempted_backends,
1796            "loop_guard_triggered": false,
1797            "count": outcome.results.len(),
1798            "partial": outcome.partial
1799        });
1800        if let Some(kind) = outcome.unavailable_kind {
1801            metadata["error"] = json!(kind);
1802        }
1803
1804        if let Some(message) = outcome.unavailable_message {
1805            return Ok(ToolResult {
1806                output: message,
1807                metadata: metadata,
1808            });
1809        }
1810
1811        let output = json!({
1812            "query": query,
1813            "backend": backend_used,
1814            "configured_backend": configured_backend,
1815            "attempted_backends": metadata["attempted_backends"],
1816            "result_count": outcome.results.len(),
1817            "partial": outcome.partial,
1818            "results": outcome.results,
1819        });
1820
1821        Ok(ToolResult {
1822            output: serde_json::to_string_pretty(&output)?,
1823            metadata,
1824        })
1825    }
1826}
1827
1828struct SearchExecutionOutcome {
1829    results: Vec<SearchResultEntry>,
1830    partial: bool,
1831    unavailable_message: Option<String>,
1832    unavailable_kind: Option<&'static str>,
1833    backend_used: Option<String>,
1834    attempted_backends: Vec<String>,
1835}
1836
1837async fn execute_websearch_backend(
1838    backend: &SearchBackend,
1839    query: &str,
1840    num_results: u64,
1841) -> anyhow::Result<SearchExecutionOutcome> {
1842    match backend {
1843        SearchBackend::Auto { backends } => {
1844            let mut attempted_backends = Vec::new();
1845            let mut best_unavailable: Option<SearchExecutionOutcome> = None;
1846
1847            for candidate in backends {
1848                let mut outcome =
1849                    execute_websearch_backend_once(candidate, query, num_results).await?;
1850                attempted_backends.extend(outcome.attempted_backends.iter().cloned());
1851                if outcome.unavailable_kind.is_none() {
1852                    if outcome.backend_used.is_none() {
1853                        outcome.backend_used = Some(candidate.name().to_string());
1854                    }
1855                    outcome.attempted_backends = attempted_backends;
1856                    return Ok(outcome);
1857                }
1858
1859                let should_replace = best_unavailable
1860                    .as_ref()
1861                    .map(|current| {
1862                        search_unavailability_priority(outcome.unavailable_kind)
1863                            > search_unavailability_priority(current.unavailable_kind)
1864                    })
1865                    .unwrap_or(true);
1866                outcome.attempted_backends = attempted_backends.clone();
1867                if should_replace {
1868                    best_unavailable = Some(outcome);
1869                }
1870            }
1871
1872            let mut outcome = best_unavailable.unwrap_or_else(search_backend_unavailable_outcome);
1873            outcome.attempted_backends = attempted_backends;
1874            Ok(outcome)
1875        }
1876        _ => execute_websearch_backend_once(backend, query, num_results).await,
1877    }
1878}
1879
1880async fn execute_websearch_backend_once(
1881    backend: &SearchBackend,
1882    query: &str,
1883    num_results: u64,
1884) -> anyhow::Result<SearchExecutionOutcome> {
1885    match backend {
1886        SearchBackend::Disabled { reason } => Ok(SearchExecutionOutcome {
1887            results: Vec::new(),
1888            partial: false,
1889            unavailable_message: Some(format!(
1890                "Search backend is unavailable for `websearch`: {reason}"
1891            )),
1892            unavailable_kind: Some("backend_unavailable"),
1893            backend_used: Some("disabled".to_string()),
1894            attempted_backends: vec!["disabled".to_string()],
1895        }),
1896        SearchBackend::Tandem {
1897            base_url,
1898            timeout_ms,
1899        } => execute_tandem_search(base_url, *timeout_ms, query, num_results).await,
1900        SearchBackend::Searxng {
1901            base_url,
1902            engines,
1903            timeout_ms,
1904        } => {
1905            execute_searxng_search(
1906                base_url,
1907                engines.as_deref(),
1908                *timeout_ms,
1909                query,
1910                num_results,
1911            )
1912            .await
1913        }
1914        SearchBackend::Exa {
1915            api_key,
1916            timeout_ms,
1917        } => execute_exa_search(api_key, *timeout_ms, query, num_results).await,
1918        SearchBackend::Brave {
1919            api_key,
1920            timeout_ms,
1921        } => execute_brave_search(api_key, *timeout_ms, query, num_results).await,
1922        SearchBackend::Auto { .. } => unreachable!("auto backend should be handled by the wrapper"),
1923    }
1924}
1925
1926fn search_backend_unavailable_outcome() -> SearchExecutionOutcome {
1927    SearchExecutionOutcome {
1928        results: Vec::new(),
1929        partial: false,
1930        unavailable_message: Some(
1931            "Web search is currently unavailable for `websearch`.\nContinue with local file reads and note that external research could not be completed in this run."
1932                .to_string(),
1933        ),
1934        unavailable_kind: Some("backend_unavailable"),
1935        backend_used: None,
1936        attempted_backends: Vec::new(),
1937    }
1938}
1939
1940fn search_backend_authorization_required_outcome() -> SearchExecutionOutcome {
1941    SearchExecutionOutcome {
1942        results: Vec::new(),
1943        partial: false,
1944        unavailable_message: Some(
1945            "Authorization required for `websearch`.\nThis integration requires authorization before this action can run."
1946                .to_string(),
1947        ),
1948        unavailable_kind: Some("authorization_required"),
1949        backend_used: None,
1950        attempted_backends: Vec::new(),
1951    }
1952}
1953
1954fn search_backend_rate_limited_outcome(
1955    backend_name: &str,
1956    retry_after_secs: Option<u64>,
1957) -> SearchExecutionOutcome {
1958    let retry_hint = retry_after_secs
1959        .map(|value| format!("\nRetry after about {value} second(s)."))
1960        .unwrap_or_default();
1961    SearchExecutionOutcome {
1962        results: Vec::new(),
1963        partial: false,
1964        unavailable_message: Some(format!(
1965            "Web search is currently rate limited for `websearch` on the {backend_name} backend.\nContinue with local file reads and note that external research could not be completed in this run.{retry_hint}"
1966        )),
1967        unavailable_kind: Some("rate_limited"),
1968        backend_used: Some(backend_name.to_string()),
1969        attempted_backends: vec![backend_name.to_string()],
1970    }
1971}
1972
1973fn search_unavailability_priority(kind: Option<&'static str>) -> u8 {
1974    match kind {
1975        Some("authorization_required") => 3,
1976        Some("rate_limited") => 2,
1977        Some("backend_unavailable") => 1,
1978        _ => 0,
1979    }
1980}
1981
1982async fn execute_tandem_search(
1983    base_url: &str,
1984    timeout_ms: u64,
1985    query: &str,
1986    num_results: u64,
1987) -> anyhow::Result<SearchExecutionOutcome> {
1988    let client = reqwest::Client::builder()
1989        .timeout(Duration::from_millis(timeout_ms))
1990        .build()?;
1991    let endpoint = format!("{}/search", base_url.trim_end_matches('/'));
1992    let response = match client
1993        .post(&endpoint)
1994        .header("Content-Type", "application/json")
1995        .header("Accept", "application/json")
1996        .json(&json!({
1997            "query": query,
1998            "limit": num_results,
1999        }))
2000        .send()
2001        .await
2002    {
2003        Ok(response) => response,
2004        Err(_) => {
2005            let mut outcome = search_backend_unavailable_outcome();
2006            outcome.backend_used = Some("tandem".to_string());
2007            outcome.attempted_backends = vec!["tandem".to_string()];
2008            return Ok(outcome);
2009        }
2010    };
2011    let status = response.status();
2012    if matches!(
2013        status,
2014        reqwest::StatusCode::UNAUTHORIZED | reqwest::StatusCode::FORBIDDEN
2015    ) {
2016        let mut outcome = search_backend_authorization_required_outcome();
2017        outcome.backend_used = Some("tandem".to_string());
2018        outcome.attempted_backends = vec!["tandem".to_string()];
2019        return Ok(outcome);
2020    }
2021    if !status.is_success() {
2022        let mut outcome = search_backend_unavailable_outcome();
2023        outcome.backend_used = Some("tandem".to_string());
2024        outcome.attempted_backends = vec!["tandem".to_string()];
2025        return Ok(outcome);
2026    }
2027    let body: Value = match response.json().await {
2028        Ok(value) => value,
2029        Err(_) => {
2030            let mut outcome = search_backend_unavailable_outcome();
2031            outcome.backend_used = Some("tandem".to_string());
2032            outcome.attempted_backends = vec!["tandem".to_string()];
2033            return Ok(outcome);
2034        }
2035    };
2036    let raw_results = body
2037        .get("results")
2038        .and_then(Value::as_array)
2039        .cloned()
2040        .unwrap_or_default();
2041    let results = normalize_tandem_results(&raw_results, num_results as usize);
2042    let partial = body
2043        .get("partial")
2044        .and_then(Value::as_bool)
2045        .unwrap_or_else(|| raw_results.len() > results.len());
2046    Ok(SearchExecutionOutcome {
2047        results,
2048        partial,
2049        unavailable_message: None,
2050        unavailable_kind: None,
2051        backend_used: Some("tandem".to_string()),
2052        attempted_backends: vec!["tandem".to_string()],
2053    })
2054}
2055
2056async fn execute_searxng_search(
2057    base_url: &str,
2058    engines: Option<&str>,
2059    timeout_ms: u64,
2060    query: &str,
2061    num_results: u64,
2062) -> anyhow::Result<SearchExecutionOutcome> {
2063    let client = reqwest::Client::builder()
2064        .timeout(Duration::from_millis(timeout_ms))
2065        .build()?;
2066    let endpoint = format!("{}/search", base_url.trim_end_matches('/'));
2067    let mut params: Vec<(&str, String)> = vec![
2068        ("q", query.to_string()),
2069        ("format", "json".to_string()),
2070        ("pageno", "1".to_string()),
2071    ];
2072    if let Some(engines) = engines {
2073        params.push(("engines", engines.to_string()));
2074    }
2075    let response = client.get(&endpoint).query(&params).send().await?;
2076
2077    let status = response.status();
2078    if status == reqwest::StatusCode::FORBIDDEN {
2079        let mut outcome = search_backend_authorization_required_outcome();
2080        outcome.backend_used = Some("searxng".to_string());
2081        outcome.attempted_backends = vec!["searxng".to_string()];
2082        return Ok(outcome);
2083    }
2084    if !status.is_success() {
2085        let mut outcome = search_backend_unavailable_outcome();
2086        outcome.backend_used = Some("searxng".to_string());
2087        outcome.attempted_backends = vec!["searxng".to_string()];
2088        return Ok(outcome);
2089    }
2090    let status_for_error = status;
2091    let body: Value = match response.json().await {
2092        Ok(value) => value,
2093        Err(_) => {
2094            let mut outcome = search_backend_unavailable_outcome();
2095            outcome.backend_used = Some("searxng".to_string());
2096            outcome.attempted_backends = vec!["searxng".to_string()];
2097            return Ok(outcome);
2098        }
2099    };
2100    let raw_results = body
2101        .get("results")
2102        .and_then(Value::as_array)
2103        .cloned()
2104        .unwrap_or_default();
2105    let results = normalize_searxng_results(&raw_results, num_results as usize);
2106    let partial = raw_results.len() > results.len()
2107        || status_for_error == reqwest::StatusCode::PARTIAL_CONTENT;
2108    Ok(SearchExecutionOutcome {
2109        results,
2110        partial,
2111        unavailable_message: None,
2112        unavailable_kind: None,
2113        backend_used: Some("searxng".to_string()),
2114        attempted_backends: vec!["searxng".to_string()],
2115    })
2116}
2117
2118async fn execute_exa_search(
2119    api_key: &str,
2120    timeout_ms: u64,
2121    query: &str,
2122    num_results: u64,
2123) -> anyhow::Result<SearchExecutionOutcome> {
2124    let client = reqwest::Client::builder()
2125        .timeout(Duration::from_millis(timeout_ms))
2126        .build()?;
2127    let response = match client
2128        .post("https://api.exa.ai/search")
2129        .header("Content-Type", "application/json")
2130        .header("Accept", "application/json")
2131        .header("x-api-key", api_key)
2132        .json(&json!({
2133            "query": query,
2134            "numResults": num_results,
2135        }))
2136        .send()
2137        .await
2138    {
2139        Ok(response) => response,
2140        Err(_) => {
2141            let mut outcome = search_backend_unavailable_outcome();
2142            outcome.backend_used = Some("exa".to_string());
2143            outcome.attempted_backends = vec!["exa".to_string()];
2144            return Ok(outcome);
2145        }
2146    };
2147    let status = response.status();
2148    if matches!(
2149        status,
2150        reqwest::StatusCode::UNAUTHORIZED
2151            | reqwest::StatusCode::FORBIDDEN
2152            | reqwest::StatusCode::PAYMENT_REQUIRED
2153    ) {
2154        let mut outcome = search_backend_authorization_required_outcome();
2155        outcome.backend_used = Some("exa".to_string());
2156        outcome.attempted_backends = vec!["exa".to_string()];
2157        return Ok(outcome);
2158    }
2159    if status == reqwest::StatusCode::TOO_MANY_REQUESTS {
2160        let retry_after_secs = response
2161            .headers()
2162            .get("retry-after")
2163            .and_then(|value| value.to_str().ok())
2164            .and_then(|value| value.trim().parse::<u64>().ok());
2165        return Ok(search_backend_rate_limited_outcome("exa", retry_after_secs));
2166    }
2167    if !status.is_success() {
2168        let mut outcome = search_backend_unavailable_outcome();
2169        outcome.backend_used = Some("exa".to_string());
2170        outcome.attempted_backends = vec!["exa".to_string()];
2171        return Ok(outcome);
2172    }
2173    let body: Value = match response.json().await {
2174        Ok(value) => value,
2175        Err(_) => {
2176            let mut outcome = search_backend_unavailable_outcome();
2177            outcome.backend_used = Some("exa".to_string());
2178            outcome.attempted_backends = vec!["exa".to_string()];
2179            return Ok(outcome);
2180        }
2181    };
2182    let raw_results = body
2183        .get("results")
2184        .and_then(Value::as_array)
2185        .cloned()
2186        .unwrap_or_default();
2187    let results = normalize_exa_results(&raw_results, num_results as usize);
2188    Ok(SearchExecutionOutcome {
2189        partial: raw_results.len() > results.len(),
2190        results,
2191        unavailable_message: None,
2192        unavailable_kind: None,
2193        backend_used: Some("exa".to_string()),
2194        attempted_backends: vec!["exa".to_string()],
2195    })
2196}
2197
2198async fn execute_brave_search(
2199    api_key: &str,
2200    timeout_ms: u64,
2201    query: &str,
2202    num_results: u64,
2203) -> anyhow::Result<SearchExecutionOutcome> {
2204    let client = reqwest::Client::builder()
2205        .timeout(Duration::from_millis(timeout_ms))
2206        .build()?;
2207    let count = num_results.to_string();
2208    let response = match client
2209        .get("https://api.search.brave.com/res/v1/web/search")
2210        .header("Accept", "application/json")
2211        .header("X-Subscription-Token", api_key)
2212        .query(&[("q", query), ("count", count.as_str())])
2213        .send()
2214        .await
2215    {
2216        Ok(response) => response,
2217        Err(error) => {
2218            tracing::warn!("brave websearch request failed: {}", error);
2219            let mut outcome = search_backend_unavailable_outcome();
2220            outcome.backend_used = Some("brave".to_string());
2221            outcome.attempted_backends = vec!["brave".to_string()];
2222            return Ok(outcome);
2223        }
2224    };
2225    let status = response.status();
2226    if matches!(
2227        status,
2228        reqwest::StatusCode::UNAUTHORIZED
2229            | reqwest::StatusCode::FORBIDDEN
2230            | reqwest::StatusCode::PAYMENT_REQUIRED
2231    ) {
2232        let mut outcome = search_backend_authorization_required_outcome();
2233        outcome.backend_used = Some("brave".to_string());
2234        outcome.attempted_backends = vec!["brave".to_string()];
2235        return Ok(outcome);
2236    }
2237    if status == reqwest::StatusCode::TOO_MANY_REQUESTS {
2238        let retry_after_secs = response
2239            .headers()
2240            .get("retry-after")
2241            .and_then(|value| value.to_str().ok())
2242            .and_then(|value| value.trim().parse::<u64>().ok());
2243        return Ok(search_backend_rate_limited_outcome(
2244            "brave",
2245            retry_after_secs,
2246        ));
2247    }
2248    if !status.is_success() {
2249        tracing::warn!("brave websearch returned non-success status: {}", status);
2250        let mut outcome = search_backend_unavailable_outcome();
2251        outcome.backend_used = Some("brave".to_string());
2252        outcome.attempted_backends = vec!["brave".to_string()];
2253        return Ok(outcome);
2254    }
2255    let body_text = match response.text().await {
2256        Ok(value) => value,
2257        Err(error) => {
2258            tracing::warn!("brave websearch body read failed: {}", error);
2259            let mut outcome = search_backend_unavailable_outcome();
2260            outcome.backend_used = Some("brave".to_string());
2261            outcome.attempted_backends = vec!["brave".to_string()];
2262            return Ok(outcome);
2263        }
2264    };
2265    let body: Value = match serde_json::from_str(&body_text) {
2266        Ok(value) => value,
2267        Err(error) => {
2268            let snippet = body_text.chars().take(200).collect::<String>();
2269            tracing::warn!(
2270                "brave websearch JSON parse failed: {} body_prefix={:?}",
2271                error,
2272                snippet
2273            );
2274            let mut outcome = search_backend_unavailable_outcome();
2275            outcome.backend_used = Some("brave".to_string());
2276            outcome.attempted_backends = vec!["brave".to_string()];
2277            return Ok(outcome);
2278        }
2279    };
2280    let raw_results = body
2281        .get("web")
2282        .and_then(|value| value.get("results"))
2283        .and_then(Value::as_array)
2284        .cloned()
2285        .unwrap_or_default();
2286    let results = normalize_brave_results(&raw_results, num_results as usize);
2287    Ok(SearchExecutionOutcome {
2288        partial: raw_results.len() > results.len(),
2289        results,
2290        unavailable_message: None,
2291        unavailable_kind: None,
2292        backend_used: Some("brave".to_string()),
2293        attempted_backends: vec!["brave".to_string()],
2294    })
2295}
2296
2297fn normalize_tandem_results(raw_results: &[Value], limit: usize) -> Vec<SearchResultEntry> {
2298    raw_results
2299        .iter()
2300        .filter_map(|entry| {
2301            let title = entry
2302                .get("title")
2303                .or_else(|| entry.get("name"))
2304                .and_then(Value::as_str)?
2305                .trim()
2306                .to_string();
2307            let url = entry.get("url").and_then(Value::as_str)?.trim().to_string();
2308            if title.is_empty() || url.is_empty() {
2309                return None;
2310            }
2311            let snippet = entry
2312                .get("snippet")
2313                .or_else(|| entry.get("content"))
2314                .or_else(|| entry.get("description"))
2315                .and_then(Value::as_str)
2316                .map(str::trim)
2317                .unwrap_or_default()
2318                .to_string();
2319            let source = entry
2320                .get("source")
2321                .or_else(|| entry.get("provider"))
2322                .and_then(Value::as_str)
2323                .map(str::trim)
2324                .filter(|value| !value.is_empty())
2325                .unwrap_or("tandem")
2326                .to_string();
2327            Some(SearchResultEntry {
2328                title,
2329                url,
2330                snippet,
2331                source,
2332            })
2333        })
2334        .take(limit)
2335        .collect()
2336}
2337
2338fn normalize_searxng_results(raw_results: &[Value], limit: usize) -> Vec<SearchResultEntry> {
2339    raw_results
2340        .iter()
2341        .filter_map(|entry| {
2342            let title = entry
2343                .get("title")
2344                .and_then(Value::as_str)?
2345                .trim()
2346                .to_string();
2347            let url = entry.get("url").and_then(Value::as_str)?.trim().to_string();
2348            if title.is_empty() || url.is_empty() {
2349                return None;
2350            }
2351            let snippet = entry
2352                .get("content")
2353                .and_then(Value::as_str)
2354                .or_else(|| entry.get("snippet").and_then(Value::as_str))
2355                .unwrap_or("")
2356                .trim()
2357                .to_string();
2358            let source = entry
2359                .get("engine")
2360                .and_then(Value::as_str)
2361                .map(|engine| format!("searxng:{engine}"))
2362                .unwrap_or_else(|| "searxng".to_string());
2363            Some(SearchResultEntry {
2364                title,
2365                url,
2366                snippet,
2367                source,
2368            })
2369        })
2370        .take(limit)
2371        .collect()
2372}
2373
2374fn normalize_exa_results(raw_results: &[Value], limit: usize) -> Vec<SearchResultEntry> {
2375    raw_results
2376        .iter()
2377        .filter_map(|entry| {
2378            let title = entry
2379                .get("title")
2380                .and_then(Value::as_str)?
2381                .trim()
2382                .to_string();
2383            let url = entry.get("url").and_then(Value::as_str)?.trim().to_string();
2384            if title.is_empty() || url.is_empty() {
2385                return None;
2386            }
2387            let snippet = entry
2388                .get("text")
2389                .and_then(Value::as_str)
2390                .or_else(|| {
2391                    entry
2392                        .get("highlights")
2393                        .and_then(Value::as_array)
2394                        .and_then(|items| items.iter().find_map(Value::as_str))
2395                })
2396                .unwrap_or("")
2397                .chars()
2398                .take(400)
2399                .collect::<String>()
2400                .trim()
2401                .to_string();
2402            Some(SearchResultEntry {
2403                title,
2404                url,
2405                snippet,
2406                source: "exa".to_string(),
2407            })
2408        })
2409        .take(limit)
2410        .collect()
2411}
2412
2413fn normalize_brave_results(raw_results: &[Value], limit: usize) -> Vec<SearchResultEntry> {
2414    raw_results
2415        .iter()
2416        .filter_map(|entry| {
2417            let title = entry
2418                .get("title")
2419                .and_then(Value::as_str)?
2420                .trim()
2421                .to_string();
2422            let url = entry.get("url").and_then(Value::as_str)?.trim().to_string();
2423            if title.is_empty() || url.is_empty() {
2424                return None;
2425            }
2426            let snippet = entry
2427                .get("description")
2428                .and_then(Value::as_str)
2429                .or_else(|| entry.get("snippet").and_then(Value::as_str))
2430                .unwrap_or("")
2431                .trim()
2432                .to_string();
2433            let source = entry
2434                .get("profile")
2435                .and_then(|value| value.get("long_name"))
2436                .and_then(Value::as_str)
2437                .map(|value| format!("brave:{value}"))
2438                .unwrap_or_else(|| "brave".to_string());
2439            Some(SearchResultEntry {
2440                title,
2441                url,
2442                snippet,
2443                source,
2444            })
2445        })
2446        .take(limit)
2447        .collect()
2448}
2449
2450fn stable_hash(input: &str) -> String {
2451    let mut hasher = DefaultHasher::new();
2452    input.hash(&mut hasher);
2453    format!("{:016x}", hasher.finish())
2454}
2455
2456fn extract_websearch_query(args: &Value) -> Option<String> {
2457    // Direct keys first.
2458    const QUERY_KEYS: [&str; 5] = ["query", "q", "search_query", "searchQuery", "keywords"];
2459    for key in QUERY_KEYS {
2460        if let Some(query) = args.get(key).and_then(|v| v.as_str()) {
2461            if let Some(cleaned) = sanitize_websearch_query_candidate(query) {
2462                return Some(cleaned);
2463            }
2464        }
2465    }
2466
2467    // Some tool-call envelopes nest args.
2468    for container in ["arguments", "args", "input", "params"] {
2469        if let Some(obj) = args.get(container) {
2470            for key in QUERY_KEYS {
2471                if let Some(query) = obj.get(key).and_then(|v| v.as_str()) {
2472                    if let Some(cleaned) = sanitize_websearch_query_candidate(query) {
2473                        return Some(cleaned);
2474                    }
2475                }
2476            }
2477        }
2478    }
2479
2480    // Last resort: plain string args.
2481    args.as_str().and_then(sanitize_websearch_query_candidate)
2482}
2483
2484fn sanitize_websearch_query_candidate(raw: &str) -> Option<String> {
2485    let trimmed = raw.trim();
2486    if trimmed.is_empty() {
2487        return None;
2488    }
2489
2490    let lower = trimmed.to_ascii_lowercase();
2491    if let Some(start) = lower.find("<arg_value>") {
2492        let value_start = start + "<arg_value>".len();
2493        let tail = &trimmed[value_start..];
2494        let value = if let Some(end) = tail.to_ascii_lowercase().find("</arg_value>") {
2495            &tail[..end]
2496        } else {
2497            tail
2498        };
2499        let cleaned = value.trim();
2500        if !cleaned.is_empty() {
2501            return Some(cleaned.to_string());
2502        }
2503    }
2504
2505    let without_wrappers = trimmed
2506        .replace("<arg_key>", " ")
2507        .replace("</arg_key>", " ")
2508        .replace("<arg_value>", " ")
2509        .replace("</arg_value>", " ");
2510    let collapsed = without_wrappers
2511        .split_whitespace()
2512        .collect::<Vec<_>>()
2513        .join(" ");
2514    if collapsed.is_empty() {
2515        return None;
2516    }
2517
2518    let collapsed_lower = collapsed.to_ascii_lowercase();
2519    if let Some(rest) = collapsed_lower.strip_prefix("websearch query ") {
2520        let offset = collapsed.len() - rest.len();
2521        let q = collapsed[offset..].trim();
2522        if !q.is_empty() {
2523            return Some(q.to_string());
2524        }
2525    }
2526    if let Some(rest) = collapsed_lower.strip_prefix("query ") {
2527        let offset = collapsed.len() - rest.len();
2528        let q = collapsed[offset..].trim();
2529        if !q.is_empty() {
2530            return Some(q.to_string());
2531        }
2532    }
2533
2534    Some(collapsed)
2535}
2536
2537fn extract_websearch_limit(args: &Value) -> Option<u64> {
2538    let mut read_limit = |value: &Value| value.as_u64().map(|v| v.clamp(1, 10));
2539
2540    if let Some(limit) = args
2541        .get("limit")
2542        .and_then(&mut read_limit)
2543        .or_else(|| args.get("numResults").and_then(&mut read_limit))
2544        .or_else(|| args.get("num_results").and_then(&mut read_limit))
2545    {
2546        return Some(limit);
2547    }
2548
2549    for container in ["arguments", "args", "input", "params"] {
2550        if let Some(obj) = args.get(container) {
2551            if let Some(limit) = obj
2552                .get("limit")
2553                .and_then(&mut read_limit)
2554                .or_else(|| obj.get("numResults").and_then(&mut read_limit))
2555                .or_else(|| obj.get("num_results").and_then(&mut read_limit))
2556            {
2557                return Some(limit);
2558            }
2559        }
2560    }
2561    None
2562}
2563
2564struct CodeSearchTool;
2565#[async_trait]
2566impl Tool for CodeSearchTool {
2567    fn schema(&self) -> ToolSchema {
2568        tool_schema_with_capabilities(
2569            "codesearch",
2570            "Search code in workspace files",
2571            json!({"type":"object","properties":{"query":{"type":"string"},"path":{"type":"string"},"limit":{"type":"integer"}}}),
2572            workspace_search_capabilities(),
2573        )
2574    }
2575    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
2576        let query = args["query"].as_str().unwrap_or("").trim();
2577        if query.is_empty() {
2578            return Ok(ToolResult {
2579                output: "missing query".to_string(),
2580                metadata: json!({"count": 0}),
2581            });
2582        }
2583        let root = args["path"].as_str().unwrap_or(".");
2584        let Some(root_path) = resolve_walk_root(root, &args) else {
2585            return Ok(sandbox_path_denied_result(root, &args));
2586        };
2587        let limit = args["limit"]
2588            .as_u64()
2589            .map(|v| v.clamp(1, 200) as usize)
2590            .unwrap_or(50);
2591        let mut hits = Vec::new();
2592        let lower = query.to_lowercase();
2593        for entry in WalkBuilder::new(&root_path).build().flatten() {
2594            if !entry.file_type().map(|t| t.is_file()).unwrap_or(false) {
2595                continue;
2596            }
2597            let path = entry.path();
2598            let ext = path.extension().and_then(|v| v.to_str()).unwrap_or("");
2599            if !matches!(
2600                ext,
2601                "rs" | "ts" | "tsx" | "js" | "jsx" | "py" | "md" | "toml" | "json"
2602            ) {
2603                continue;
2604            }
2605            if let Ok(content) = fs::read_to_string(path).await {
2606                for (idx, line) in content.lines().enumerate() {
2607                    if line.to_lowercase().contains(&lower) {
2608                        hits.push(format!("{}:{}:{}", path.display(), idx + 1, line.trim()));
2609                        if hits.len() >= limit {
2610                            break;
2611                        }
2612                    }
2613                }
2614            }
2615            if hits.len() >= limit {
2616                break;
2617            }
2618        }
2619        Ok(ToolResult {
2620            output: hits.join("\n"),
2621            metadata: json!({"count": hits.len(), "query": query, "path": root_path.to_string_lossy()}),
2622        })
2623    }
2624}
2625
2626struct TodoWriteTool;
2627#[async_trait]
2628impl Tool for TodoWriteTool {
2629    fn schema(&self) -> ToolSchema {
2630        tool_schema(
2631            "todo_write",
2632            "Update todo list",
2633            json!({
2634                "type":"object",
2635                "properties":{
2636                    "todos":{
2637                        "type":"array",
2638                        "items":{
2639                            "type":"object",
2640                            "properties":{
2641                                "id":{"type":"string"},
2642                                "content":{"type":"string"},
2643                                "text":{"type":"string"},
2644                                "status":{"type":"string"}
2645                            }
2646                        }
2647                    }
2648                }
2649            }),
2650        )
2651    }
2652    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
2653        let todos = normalize_todos(args["todos"].as_array().cloned().unwrap_or_default());
2654        Ok(ToolResult {
2655            output: format!("todo list updated: {} items", todos.len()),
2656            metadata: json!({"todos": todos}),
2657        })
2658    }
2659}
2660
2661struct TaskTool;
2662#[async_trait]
2663impl Tool for TaskTool {
2664    fn schema(&self) -> ToolSchema {
2665        tool_schema(
2666            "task",
2667            "Create a subtask summary or spawn a teammate when team_name is provided.",
2668            task_schema(),
2669        )
2670    }
2671    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
2672        let input = serde_json::from_value::<TaskInput>(args.clone())
2673            .map_err(|err| anyhow!("invalid Task args: {}", err))?;
2674        let description = input.description;
2675        if let Some(team_name_raw) = input.team_name {
2676            let team_name = sanitize_team_name(&team_name_raw)?;
2677            let paths = resolve_agent_team_paths(&args)?;
2678            fs::create_dir_all(paths.team_dir(&team_name)).await?;
2679            fs::create_dir_all(paths.tasks_dir(&team_name)).await?;
2680            fs::create_dir_all(paths.mailboxes_dir(&team_name)).await?;
2681            fs::create_dir_all(paths.requests_dir(&team_name)).await?;
2682            upsert_team_index(&paths, &team_name).await?;
2683
2684            let member_name = if let Some(requested_name) = input.name {
2685                sanitize_member_name(&requested_name)?
2686            } else {
2687                next_default_member_name(&paths, &team_name).await?
2688            };
2689            let inserted = upsert_team_member(
2690                &paths,
2691                &team_name,
2692                &member_name,
2693                Some(input.subagent_type.clone()),
2694                input.model.clone(),
2695            )
2696            .await?;
2697            append_mailbox_message(
2698                &paths,
2699                &team_name,
2700                &member_name,
2701                json!({
2702                    "id": format!("msg_{}", uuid_like(now_ms_u64())),
2703                    "type": "task_prompt",
2704                    "from": args.get("sender").and_then(|v| v.as_str()).unwrap_or("team-lead"),
2705                    "to": member_name,
2706                    "content": input.prompt,
2707                    "summary": description,
2708                    "timestampMs": now_ms_u64(),
2709                    "read": false
2710                }),
2711            )
2712            .await?;
2713            let mut events = Vec::new();
2714            if inserted {
2715                events.push(json!({
2716                    "type": "agent_team.member.spawned",
2717                    "properties": {
2718                        "teamName": team_name,
2719                        "memberName": member_name,
2720                        "agentType": input.subagent_type,
2721                        "model": input.model,
2722                    }
2723                }));
2724            }
2725            events.push(json!({
2726                "type": "agent_team.mailbox.enqueued",
2727                "properties": {
2728                    "teamName": team_name,
2729                    "recipient": member_name,
2730                    "messageType": "task_prompt",
2731                }
2732            }));
2733            return Ok(ToolResult {
2734                output: format!("Teammate task queued for {}", member_name),
2735                metadata: json!({
2736                    "ok": true,
2737                    "team_name": team_name,
2738                    "teammate_name": member_name,
2739                    "events": events
2740                }),
2741            });
2742        }
2743        Ok(ToolResult {
2744            output: format!("Subtask planned: {description}"),
2745            metadata: json!({"description": description, "prompt": input.prompt}),
2746        })
2747    }
2748}
2749
2750struct QuestionTool;
2751#[async_trait]
2752impl Tool for QuestionTool {
2753    fn schema(&self) -> ToolSchema {
2754        tool_schema(
2755            "question",
2756            "Emit a question request for the user",
2757            json!({
2758                "type":"object",
2759                "properties":{
2760                    "questions":{
2761                        "type":"array",
2762                        "items":{
2763                            "type":"object",
2764                            "properties":{
2765                                "question":{"type":"string"},
2766                                "choices":{"type":"array","items":{"type":"string"}}
2767                            }
2768                        }
2769                    }
2770                }
2771            }),
2772        )
2773    }
2774    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
2775        let questions = normalize_question_payload(&args);
2776        if questions.is_empty() {
2777            return Err(anyhow!(
2778                "QUESTION_INVALID_ARGS: expected non-empty `questions` with at least one non-empty `question` string"
2779            ));
2780        }
2781        Ok(ToolResult {
2782            output: "Question requested. Use /question endpoints to respond.".to_string(),
2783            metadata: json!({"questions": questions}),
2784        })
2785    }
2786}
2787
2788fn normalize_question_payload(args: &Value) -> Vec<Value> {
2789    let parsed_args;
2790    let args = if let Some(raw) = args.as_str() {
2791        if let Ok(decoded) = serde_json::from_str::<Value>(raw) {
2792            parsed_args = decoded;
2793            &parsed_args
2794        } else {
2795            args
2796        }
2797    } else {
2798        args
2799    };
2800
2801    let Some(obj) = args.as_object() else {
2802        return Vec::new();
2803    };
2804
2805    if let Some(items) = obj.get("questions").and_then(|v| v.as_array()) {
2806        let normalized = items
2807            .iter()
2808            .filter_map(normalize_question_entry)
2809            .collect::<Vec<_>>();
2810        if !normalized.is_empty() {
2811            return normalized;
2812        }
2813    }
2814
2815    let question = obj
2816        .get("question")
2817        .or_else(|| obj.get("prompt"))
2818        .or_else(|| obj.get("query"))
2819        .or_else(|| obj.get("text"))
2820        .and_then(|v| v.as_str())
2821        .map(str::trim)
2822        .filter(|s| !s.is_empty());
2823    let Some(question) = question else {
2824        return Vec::new();
2825    };
2826    let options = obj
2827        .get("options")
2828        .or_else(|| obj.get("choices"))
2829        .and_then(|v| v.as_array())
2830        .map(|arr| {
2831            arr.iter()
2832                .filter_map(normalize_question_choice)
2833                .collect::<Vec<_>>()
2834        })
2835        .unwrap_or_default();
2836    let multiple = obj
2837        .get("multiple")
2838        .or_else(|| obj.get("multi_select"))
2839        .or_else(|| obj.get("multiSelect"))
2840        .and_then(|v| v.as_bool())
2841        .unwrap_or(false);
2842    let custom = obj
2843        .get("custom")
2844        .and_then(|v| v.as_bool())
2845        .unwrap_or(options.is_empty());
2846    vec![json!({
2847        "header": obj.get("header").and_then(|v| v.as_str()).unwrap_or("Question"),
2848        "question": question,
2849        "options": options,
2850        "multiple": multiple,
2851        "custom": custom
2852    })]
2853}
2854
2855fn normalize_question_entry(entry: &Value) -> Option<Value> {
2856    if let Some(raw) = entry.as_str() {
2857        let question = raw.trim();
2858        if question.is_empty() {
2859            return None;
2860        }
2861        return Some(json!({
2862            "header": "Question",
2863            "question": question,
2864            "options": [],
2865            "multiple": false,
2866            "custom": true
2867        }));
2868    }
2869    let obj = entry.as_object()?;
2870    let question = obj
2871        .get("question")
2872        .or_else(|| obj.get("prompt"))
2873        .or_else(|| obj.get("query"))
2874        .or_else(|| obj.get("text"))
2875        .and_then(|v| v.as_str())
2876        .map(str::trim)
2877        .filter(|s| !s.is_empty())?;
2878    let options = obj
2879        .get("options")
2880        .or_else(|| obj.get("choices"))
2881        .and_then(|v| v.as_array())
2882        .map(|arr| {
2883            arr.iter()
2884                .filter_map(normalize_question_choice)
2885                .collect::<Vec<_>>()
2886        })
2887        .unwrap_or_default();
2888    let multiple = obj
2889        .get("multiple")
2890        .or_else(|| obj.get("multi_select"))
2891        .or_else(|| obj.get("multiSelect"))
2892        .and_then(|v| v.as_bool())
2893        .unwrap_or(false);
2894    let custom = obj
2895        .get("custom")
2896        .and_then(|v| v.as_bool())
2897        .unwrap_or(options.is_empty());
2898    Some(json!({
2899        "header": obj.get("header").and_then(|v| v.as_str()).unwrap_or("Question"),
2900        "question": question,
2901        "options": options,
2902        "multiple": multiple,
2903        "custom": custom
2904    }))
2905}
2906
2907fn normalize_question_choice(choice: &Value) -> Option<Value> {
2908    if let Some(label) = choice.as_str().map(str::trim).filter(|s| !s.is_empty()) {
2909        return Some(json!({
2910            "label": label,
2911            "description": ""
2912        }));
2913    }
2914    let obj = choice.as_object()?;
2915    let label = obj
2916        .get("label")
2917        .or_else(|| obj.get("title"))
2918        .or_else(|| obj.get("name"))
2919        .or_else(|| obj.get("value"))
2920        .or_else(|| obj.get("text"))
2921        .and_then(|v| {
2922            if let Some(s) = v.as_str() {
2923                Some(s.trim().to_string())
2924            } else {
2925                v.as_i64()
2926                    .map(|n| n.to_string())
2927                    .or_else(|| v.as_u64().map(|n| n.to_string()))
2928            }
2929        })
2930        .filter(|s| !s.is_empty())?;
2931    let description = obj
2932        .get("description")
2933        .or_else(|| obj.get("hint"))
2934        .or_else(|| obj.get("subtitle"))
2935        .and_then(|v| v.as_str())
2936        .unwrap_or("")
2937        .to_string();
2938    Some(json!({
2939        "label": label,
2940        "description": description
2941    }))
2942}
2943
2944struct SpawnAgentTool;
2945#[async_trait]
2946impl Tool for SpawnAgentTool {
2947    fn schema(&self) -> ToolSchema {
2948        tool_schema(
2949            "spawn_agent",
2950            "Spawn an agent-team instance through server policy enforcement.",
2951            json!({
2952                "type":"object",
2953                "properties":{
2954                    "missionID":{"type":"string"},
2955                    "parentInstanceID":{"type":"string"},
2956                    "templateID":{"type":"string"},
2957                    "role":{"type":"string","enum":["orchestrator","delegator","worker","watcher","reviewer","tester","committer"]},
2958                    "source":{"type":"string","enum":["tool_call"]},
2959                    "justification":{"type":"string"},
2960                    "budgetOverride":{"type":"object"}
2961                },
2962                "required":["role","justification"]
2963            }),
2964        )
2965    }
2966
2967    async fn execute(&self, _args: Value) -> anyhow::Result<ToolResult> {
2968        Ok(ToolResult {
2969            output: "spawn_agent must be executed through the engine runtime.".to_string(),
2970            metadata: json!({
2971                "ok": false,
2972                "code": "SPAWN_HOOK_UNAVAILABLE"
2973            }),
2974        })
2975    }
2976}
2977
2978struct TeamCreateTool;
2979#[async_trait]
2980impl Tool for TeamCreateTool {
2981    fn schema(&self) -> ToolSchema {
2982        tool_schema(
2983            "TeamCreate",
2984            "Create a coordinated team and shared task context.",
2985            team_create_schema(),
2986        )
2987    }
2988
2989    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
2990        let input = serde_json::from_value::<TeamCreateInput>(args.clone())
2991            .map_err(|err| anyhow!("invalid TeamCreate args: {}", err))?;
2992        let now_ms = now_ms_u64();
2993        let paths = resolve_agent_team_paths(&args)?;
2994        let team_name = sanitize_team_name(&input.team_name)?;
2995        let team_dir = paths.team_dir(&team_name);
2996        fs::create_dir_all(paths.tasks_dir(&team_name)).await?;
2997        fs::create_dir_all(paths.mailboxes_dir(&team_name)).await?;
2998        fs::create_dir_all(paths.requests_dir(&team_name)).await?;
2999
3000        let config = json!({
3001            "teamName": team_name,
3002            "description": input.description,
3003            "agentType": input.agent_type,
3004            "createdAtMs": now_ms
3005        });
3006        write_json_file(paths.config_file(&team_name), &config).await?;
3007
3008        let lead_name = args
3009            .get("lead_name")
3010            .and_then(|v| v.as_str())
3011            .filter(|s| !s.trim().is_empty())
3012            .unwrap_or("A1");
3013        let members = json!([{
3014            "name": lead_name,
3015            "agentType": input.agent_type.clone().unwrap_or_else(|| "lead".to_string()),
3016            "createdAtMs": now_ms
3017        }]);
3018        write_json_file(paths.members_file(&team_name), &members).await?;
3019
3020        upsert_team_index(&paths, &team_name).await?;
3021        if let Some(session_id) = args.get("__session_id").and_then(|v| v.as_str()) {
3022            write_team_session_context(&paths, session_id, &team_name).await?;
3023        }
3024
3025        Ok(ToolResult {
3026            output: format!("Team created: {}", team_name),
3027            metadata: json!({
3028                "ok": true,
3029                "team_name": team_name,
3030                "path": team_dir.to_string_lossy(),
3031                "events": [{
3032                    "type": "agent_team.team.created",
3033                    "properties": {
3034                        "teamName": team_name,
3035                        "path": team_dir.to_string_lossy(),
3036                    }
3037                }]
3038            }),
3039        })
3040    }
3041}
3042
3043struct TaskCreateCompatTool;
3044#[async_trait]
3045impl Tool for TaskCreateCompatTool {
3046    fn schema(&self) -> ToolSchema {
3047        tool_schema(
3048            "TaskCreate",
3049            "Create a task in the shared team task list.",
3050            task_create_schema(),
3051        )
3052    }
3053
3054    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
3055        let input = serde_json::from_value::<TaskCreateInput>(args.clone())
3056            .map_err(|err| anyhow!("invalid TaskCreate args: {}", err))?;
3057        let paths = resolve_agent_team_paths(&args)?;
3058        let team_name = resolve_team_name(&paths, &args).await?;
3059        let tasks_dir = paths.tasks_dir(&team_name);
3060        fs::create_dir_all(&tasks_dir).await?;
3061        let next_id = next_task_id(&tasks_dir).await?;
3062        let now_ms = now_ms_u64();
3063        let task = json!({
3064            "id": next_id.to_string(),
3065            "subject": input.subject,
3066            "description": input.description,
3067            "activeForm": input.active_form,
3068            "status": "pending",
3069            "owner": Value::Null,
3070            "blocks": [],
3071            "blockedBy": [],
3072            "metadata": input.metadata.unwrap_or_else(|| json!({})),
3073            "createdAtMs": now_ms,
3074            "updatedAtMs": now_ms
3075        });
3076        write_json_file(paths.task_file(&team_name, &next_id.to_string()), &task).await?;
3077        Ok(ToolResult {
3078            output: format!("Task created: {}", next_id),
3079            metadata: json!({
3080                "ok": true,
3081                "team_name": team_name,
3082                "task": task,
3083                "events": [{
3084                    "type": "agent_team.task.created",
3085                    "properties": {
3086                        "teamName": team_name,
3087                        "taskId": next_id.to_string(),
3088                    }
3089                }]
3090            }),
3091        })
3092    }
3093}
3094
3095struct TaskUpdateCompatTool;
3096#[async_trait]
3097impl Tool for TaskUpdateCompatTool {
3098    fn schema(&self) -> ToolSchema {
3099        tool_schema(
3100            "TaskUpdate",
3101            "Update ownership/state/dependencies of a shared task.",
3102            task_update_schema(),
3103        )
3104    }
3105
3106    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
3107        let input = serde_json::from_value::<TaskUpdateInput>(args.clone())
3108            .map_err(|err| anyhow!("invalid TaskUpdate args: {}", err))?;
3109        let paths = resolve_agent_team_paths(&args)?;
3110        let team_name = resolve_team_name(&paths, &args).await?;
3111        let task_path = paths.task_file(&team_name, &input.task_id);
3112        if !task_path.exists() {
3113            return Ok(ToolResult {
3114                output: format!("Task not found: {}", input.task_id),
3115                metadata: json!({"ok": false, "code": "TASK_NOT_FOUND"}),
3116            });
3117        }
3118        let raw = fs::read_to_string(&task_path).await?;
3119        let mut task = serde_json::from_str::<Value>(&raw)
3120            .map_err(|err| anyhow!("failed parsing task {}: {}", input.task_id, err))?;
3121        let Some(obj) = task.as_object_mut() else {
3122            return Err(anyhow!("task payload is not an object"));
3123        };
3124
3125        if let Some(subject) = input.subject {
3126            obj.insert("subject".to_string(), Value::String(subject));
3127        }
3128        if let Some(description) = input.description {
3129            obj.insert("description".to_string(), Value::String(description));
3130        }
3131        if let Some(active) = input.active_form {
3132            obj.insert("activeForm".to_string(), Value::String(active));
3133        }
3134        if let Some(status) = input.status {
3135            if status == "deleted" {
3136                let _ = fs::remove_file(&task_path).await;
3137                return Ok(ToolResult {
3138                    output: format!("Task deleted: {}", input.task_id),
3139                    metadata: json!({
3140                        "ok": true,
3141                        "deleted": true,
3142                        "taskId": input.task_id,
3143                        "events": [{
3144                            "type": "agent_team.task.deleted",
3145                            "properties": {
3146                                "teamName": team_name,
3147                                "taskId": input.task_id
3148                            }
3149                        }]
3150                    }),
3151                });
3152            }
3153            obj.insert("status".to_string(), Value::String(status));
3154        }
3155        if let Some(owner) = input.owner {
3156            obj.insert("owner".to_string(), Value::String(owner));
3157        }
3158        if let Some(add_blocks) = input.add_blocks {
3159            let current = obj
3160                .get("blocks")
3161                .and_then(|v| v.as_array())
3162                .cloned()
3163                .unwrap_or_default();
3164            obj.insert(
3165                "blocks".to_string(),
3166                Value::Array(merge_unique_strings(current, add_blocks)),
3167            );
3168        }
3169        if let Some(add_blocked_by) = input.add_blocked_by {
3170            let current = obj
3171                .get("blockedBy")
3172                .and_then(|v| v.as_array())
3173                .cloned()
3174                .unwrap_or_default();
3175            obj.insert(
3176                "blockedBy".to_string(),
3177                Value::Array(merge_unique_strings(current, add_blocked_by)),
3178            );
3179        }
3180        if let Some(metadata) = input.metadata {
3181            if let Some(current) = obj.get_mut("metadata").and_then(|v| v.as_object_mut()) {
3182                if let Some(incoming) = metadata.as_object() {
3183                    for (k, v) in incoming {
3184                        if v.is_null() {
3185                            current.remove(k);
3186                        } else {
3187                            current.insert(k.clone(), v.clone());
3188                        }
3189                    }
3190                }
3191            } else {
3192                obj.insert("metadata".to_string(), metadata);
3193            }
3194        }
3195        obj.insert("updatedAtMs".to_string(), json!(now_ms_u64()));
3196        write_json_file(task_path, &task).await?;
3197        Ok(ToolResult {
3198            output: format!("Task updated: {}", input.task_id),
3199            metadata: json!({
3200                "ok": true,
3201                "team_name": team_name,
3202                "task": task,
3203                "events": [{
3204                    "type": "agent_team.task.updated",
3205                    "properties": {
3206                        "teamName": team_name,
3207                        "taskId": input.task_id
3208                    }
3209                }]
3210            }),
3211        })
3212    }
3213}
3214
3215struct TaskListCompatTool;
3216#[async_trait]
3217impl Tool for TaskListCompatTool {
3218    fn schema(&self) -> ToolSchema {
3219        tool_schema(
3220            "TaskList",
3221            "List tasks from the shared task list.",
3222            task_list_schema(),
3223        )
3224    }
3225
3226    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
3227        let _ = serde_json::from_value::<TaskListInput>(args.clone())
3228            .map_err(|err| anyhow!("invalid TaskList args: {}", err))?;
3229        let paths = resolve_agent_team_paths(&args)?;
3230        let team_name = resolve_team_name(&paths, &args).await?;
3231        let tasks = read_tasks(&paths.tasks_dir(&team_name)).await?;
3232        let mut lines = Vec::new();
3233        for task in &tasks {
3234            let id = task
3235                .get("id")
3236                .and_then(|v| v.as_str())
3237                .unwrap_or("?")
3238                .to_string();
3239            let subject = task
3240                .get("subject")
3241                .and_then(|v| v.as_str())
3242                .unwrap_or("(untitled)")
3243                .to_string();
3244            let status = task
3245                .get("status")
3246                .and_then(|v| v.as_str())
3247                .unwrap_or("pending")
3248                .to_string();
3249            let owner = task
3250                .get("owner")
3251                .and_then(|v| v.as_str())
3252                .unwrap_or("")
3253                .to_string();
3254            lines.push(format!(
3255                "{} [{}] {}{}",
3256                id,
3257                status,
3258                subject,
3259                if owner.is_empty() {
3260                    "".to_string()
3261                } else {
3262                    format!(" (owner: {})", owner)
3263                }
3264            ));
3265        }
3266        Ok(ToolResult {
3267            output: if lines.is_empty() {
3268                "No tasks.".to_string()
3269            } else {
3270                lines.join("\n")
3271            },
3272            metadata: json!({
3273                "ok": true,
3274                "team_name": team_name,
3275                "tasks": tasks
3276            }),
3277        })
3278    }
3279}
3280
3281struct SendMessageCompatTool;
3282#[async_trait]
3283impl Tool for SendMessageCompatTool {
3284    fn schema(&self) -> ToolSchema {
3285        tool_schema(
3286            "SendMessage",
3287            "Send teammate messages and coordination protocol responses.",
3288            send_message_schema(),
3289        )
3290    }
3291
3292    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
3293        let input = serde_json::from_value::<SendMessageInput>(args.clone())
3294            .map_err(|err| anyhow!("invalid SendMessage args: {}", err))?;
3295        let paths = resolve_agent_team_paths(&args)?;
3296        let team_name = resolve_team_name(&paths, &args).await?;
3297        fs::create_dir_all(paths.mailboxes_dir(&team_name)).await?;
3298        let sender = args
3299            .get("sender")
3300            .and_then(|v| v.as_str())
3301            .filter(|s| !s.trim().is_empty())
3302            .unwrap_or("team-lead")
3303            .to_string();
3304        let now_ms = now_ms_u64();
3305
3306        match input.message_type {
3307            SendMessageType::Message | SendMessageType::ShutdownRequest => {
3308                let recipient = required_non_empty(input.recipient, "recipient")?;
3309                let content = required_non_empty(input.content, "content")?;
3310                append_mailbox_message(
3311                    &paths,
3312                    &team_name,
3313                    &recipient,
3314                    json!({
3315                        "id": format!("msg_{}", uuid_like(now_ms)),
3316                        "type": message_type_name(&input.message_type),
3317                        "from": sender,
3318                        "to": recipient,
3319                        "content": content,
3320                        "summary": input.summary,
3321                        "timestampMs": now_ms,
3322                        "read": false
3323                    }),
3324                )
3325                .await?;
3326                Ok(ToolResult {
3327                    output: "Message queued.".to_string(),
3328                    metadata: json!({
3329                        "ok": true,
3330                        "team_name": team_name,
3331                        "events": [{
3332                            "type": "agent_team.mailbox.enqueued",
3333                            "properties": {
3334                                "teamName": team_name,
3335                                "recipient": recipient,
3336                                "messageType": message_type_name(&input.message_type),
3337                            }
3338                        }]
3339                    }),
3340                })
3341            }
3342            SendMessageType::Broadcast => {
3343                let content = required_non_empty(input.content, "content")?;
3344                let members = read_team_member_names(&paths, &team_name).await?;
3345                for recipient in members {
3346                    append_mailbox_message(
3347                        &paths,
3348                        &team_name,
3349                        &recipient,
3350                        json!({
3351                            "id": format!("msg_{}_{}", uuid_like(now_ms), recipient),
3352                            "type": "broadcast",
3353                            "from": sender,
3354                            "to": recipient,
3355                            "content": content,
3356                            "summary": input.summary,
3357                            "timestampMs": now_ms,
3358                            "read": false
3359                        }),
3360                    )
3361                    .await?;
3362                }
3363                Ok(ToolResult {
3364                    output: "Broadcast queued.".to_string(),
3365                    metadata: json!({
3366                        "ok": true,
3367                        "team_name": team_name,
3368                        "events": [{
3369                            "type": "agent_team.mailbox.enqueued",
3370                            "properties": {
3371                                "teamName": team_name,
3372                                "recipient": "*",
3373                                "messageType": "broadcast",
3374                            }
3375                        }]
3376                    }),
3377                })
3378            }
3379            SendMessageType::ShutdownResponse | SendMessageType::PlanApprovalResponse => {
3380                let request_id = required_non_empty(input.request_id, "request_id")?;
3381                let request = json!({
3382                    "requestId": request_id,
3383                    "type": message_type_name(&input.message_type),
3384                    "from": sender,
3385                    "recipient": input.recipient,
3386                    "approve": input.approve,
3387                    "content": input.content,
3388                    "updatedAtMs": now_ms
3389                });
3390                write_json_file(paths.request_file(&team_name, &request_id), &request).await?;
3391                Ok(ToolResult {
3392                    output: "Response recorded.".to_string(),
3393                    metadata: json!({
3394                        "ok": true,
3395                        "team_name": team_name,
3396                        "request": request,
3397                        "events": [{
3398                            "type": "agent_team.request.resolved",
3399                            "properties": {
3400                                "teamName": team_name,
3401                                "requestId": request_id,
3402                                "requestType": message_type_name(&input.message_type),
3403                                "approve": input.approve
3404                            }
3405                        }]
3406                    }),
3407                })
3408            }
3409        }
3410    }
3411}
3412
3413fn message_type_name(ty: &SendMessageType) -> &'static str {
3414    match ty {
3415        SendMessageType::Message => "message",
3416        SendMessageType::Broadcast => "broadcast",
3417        SendMessageType::ShutdownRequest => "shutdown_request",
3418        SendMessageType::ShutdownResponse => "shutdown_response",
3419        SendMessageType::PlanApprovalResponse => "plan_approval_response",
3420    }
3421}
3422
3423fn required_non_empty(value: Option<String>, field: &str) -> anyhow::Result<String> {
3424    let Some(v) = value
3425        .map(|s| s.trim().to_string())
3426        .filter(|s| !s.is_empty())
3427    else {
3428        return Err(anyhow!("{} is required", field));
3429    };
3430    Ok(v)
3431}
3432
3433fn resolve_agent_team_paths(args: &Value) -> anyhow::Result<AgentTeamPaths> {
3434    let workspace_root = args
3435        .get("__workspace_root")
3436        .and_then(|v| v.as_str())
3437        .map(PathBuf::from)
3438        .or_else(|| std::env::current_dir().ok())
3439        .ok_or_else(|| anyhow!("workspace root unavailable"))?;
3440    Ok(AgentTeamPaths::new(workspace_root.join(".tandem")))
3441}
3442
3443async fn resolve_team_name(paths: &AgentTeamPaths, args: &Value) -> anyhow::Result<String> {
3444    if let Some(name) = args
3445        .get("team_name")
3446        .and_then(|v| v.as_str())
3447        .map(str::trim)
3448        .filter(|s| !s.is_empty())
3449    {
3450        return sanitize_team_name(name);
3451    }
3452    if let Some(session_id) = args.get("__session_id").and_then(|v| v.as_str()) {
3453        let context_path = paths
3454            .root()
3455            .join("session-context")
3456            .join(format!("{}.json", session_id));
3457        if context_path.exists() {
3458            let raw = fs::read_to_string(context_path).await?;
3459            let parsed = serde_json::from_str::<Value>(&raw)?;
3460            if let Some(name) = parsed
3461                .get("team_name")
3462                .and_then(|v| v.as_str())
3463                .map(str::trim)
3464                .filter(|s| !s.is_empty())
3465            {
3466                return sanitize_team_name(name);
3467            }
3468        }
3469    }
3470    Err(anyhow!(
3471        "team_name is required (no active team context for this session)"
3472    ))
3473}
3474
3475fn sanitize_team_name(input: &str) -> anyhow::Result<String> {
3476    let trimmed = input.trim();
3477    if trimmed.is_empty() {
3478        return Err(anyhow!("team_name cannot be empty"));
3479    }
3480    let sanitized = trimmed
3481        .chars()
3482        .map(|c| {
3483            if c.is_ascii_alphanumeric() || c == '-' || c == '_' {
3484                c
3485            } else {
3486                '-'
3487            }
3488        })
3489        .collect::<String>();
3490    Ok(sanitized)
3491}
3492
3493fn sanitize_member_name(input: &str) -> anyhow::Result<String> {
3494    let trimmed = input.trim();
3495    if trimmed.is_empty() {
3496        return Err(anyhow!("member name cannot be empty"));
3497    }
3498    if let Some(rest) = trimmed
3499        .strip_prefix('A')
3500        .or_else(|| trimmed.strip_prefix('a'))
3501    {
3502        if let Ok(n) = rest.parse::<u32>() {
3503            if n > 0 {
3504                return Ok(format!("A{}", n));
3505            }
3506        }
3507    }
3508    let sanitized = trimmed
3509        .chars()
3510        .map(|c| {
3511            if c.is_ascii_alphanumeric() || c == '-' || c == '_' {
3512                c
3513            } else {
3514                '-'
3515            }
3516        })
3517        .collect::<String>();
3518    if sanitized.is_empty() {
3519        return Err(anyhow!("member name cannot be empty"));
3520    }
3521    Ok(sanitized)
3522}
3523
3524async fn next_default_member_name(
3525    paths: &AgentTeamPaths,
3526    team_name: &str,
3527) -> anyhow::Result<String> {
3528    let names = read_team_member_names(paths, team_name).await?;
3529    let mut max_index = 1u32;
3530    for name in names {
3531        let trimmed = name.trim();
3532        let Some(rest) = trimmed
3533            .strip_prefix('A')
3534            .or_else(|| trimmed.strip_prefix('a'))
3535        else {
3536            continue;
3537        };
3538        let Ok(index) = rest.parse::<u32>() else {
3539            continue;
3540        };
3541        if index > max_index {
3542            max_index = index;
3543        }
3544    }
3545    Ok(format!("A{}", max_index.saturating_add(1)))
3546}
3547
3548async fn write_json_file(path: PathBuf, value: &Value) -> anyhow::Result<()> {
3549    if let Some(parent) = path.parent() {
3550        fs::create_dir_all(parent).await?;
3551    }
3552    fs::write(path, serde_json::to_vec_pretty(value)?).await?;
3553    Ok(())
3554}
3555
3556async fn upsert_team_index(paths: &AgentTeamPaths, team_name: &str) -> anyhow::Result<()> {
3557    let index_path = paths.index_file();
3558    let mut teams = if index_path.exists() {
3559        let raw = fs::read_to_string(&index_path).await?;
3560        serde_json::from_str::<Vec<String>>(&raw).unwrap_or_default()
3561    } else {
3562        Vec::new()
3563    };
3564    if !teams.iter().any(|team| team == team_name) {
3565        teams.push(team_name.to_string());
3566        teams.sort();
3567    }
3568    write_json_file(index_path, &json!(teams)).await
3569}
3570
3571async fn write_team_session_context(
3572    paths: &AgentTeamPaths,
3573    session_id: &str,
3574    team_name: &str,
3575) -> anyhow::Result<()> {
3576    let context_path = paths
3577        .root()
3578        .join("session-context")
3579        .join(format!("{}.json", session_id));
3580    write_json_file(context_path, &json!({ "team_name": team_name })).await
3581}
3582
3583async fn next_task_id(tasks_dir: &Path) -> anyhow::Result<u64> {
3584    let mut max_id = 0u64;
3585    let mut entries = match fs::read_dir(tasks_dir).await {
3586        Ok(entries) => entries,
3587        Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(1),
3588        Err(err) => return Err(err.into()),
3589    };
3590    while let Some(entry) = entries.next_entry().await? {
3591        let path = entry.path();
3592        if !path.is_file() {
3593            continue;
3594        }
3595        let Some(stem) = path.file_stem().and_then(|s| s.to_str()) else {
3596            continue;
3597        };
3598        if let Ok(id) = stem.parse::<u64>() {
3599            max_id = max_id.max(id);
3600        }
3601    }
3602    Ok(max_id + 1)
3603}
3604
3605fn merge_unique_strings(current: Vec<Value>, incoming: Vec<String>) -> Vec<Value> {
3606    let mut seen = HashSet::new();
3607    let mut out = Vec::new();
3608    for value in current {
3609        if let Some(text) = value.as_str() {
3610            let text = text.to_string();
3611            if seen.insert(text.clone()) {
3612                out.push(Value::String(text));
3613            }
3614        }
3615    }
3616    for value in incoming {
3617        if seen.insert(value.clone()) {
3618            out.push(Value::String(value));
3619        }
3620    }
3621    out
3622}
3623
3624async fn read_tasks(tasks_dir: &Path) -> anyhow::Result<Vec<Value>> {
3625    let mut tasks = Vec::new();
3626    let mut entries = match fs::read_dir(tasks_dir).await {
3627        Ok(entries) => entries,
3628        Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(tasks),
3629        Err(err) => return Err(err.into()),
3630    };
3631    while let Some(entry) = entries.next_entry().await? {
3632        let path = entry.path();
3633        if !path.is_file() {
3634            continue;
3635        }
3636        let raw = fs::read_to_string(path).await?;
3637        let task = serde_json::from_str::<Value>(&raw)?;
3638        tasks.push(task);
3639    }
3640    tasks.sort_by_key(|task| {
3641        task.get("id")
3642            .and_then(|v| v.as_str())
3643            .and_then(|s| s.parse::<u64>().ok())
3644            .unwrap_or(0)
3645    });
3646    Ok(tasks)
3647}
3648
3649async fn append_mailbox_message(
3650    paths: &AgentTeamPaths,
3651    team_name: &str,
3652    recipient: &str,
3653    message: Value,
3654) -> anyhow::Result<()> {
3655    let mailbox_path = paths.mailbox_file(team_name, recipient);
3656    if let Some(parent) = mailbox_path.parent() {
3657        fs::create_dir_all(parent).await?;
3658    }
3659    let line = format!("{}\n", serde_json::to_string(&message)?);
3660    if mailbox_path.exists() {
3661        use tokio::io::AsyncWriteExt;
3662        let mut file = tokio::fs::OpenOptions::new()
3663            .create(true)
3664            .append(true)
3665            .open(mailbox_path)
3666            .await?;
3667        file.write_all(line.as_bytes()).await?;
3668        file.flush().await?;
3669    } else {
3670        fs::write(mailbox_path, line).await?;
3671    }
3672    Ok(())
3673}
3674
3675async fn read_team_member_names(
3676    paths: &AgentTeamPaths,
3677    team_name: &str,
3678) -> anyhow::Result<Vec<String>> {
3679    let members_path = paths.members_file(team_name);
3680    if !members_path.exists() {
3681        return Ok(Vec::new());
3682    }
3683    let raw = fs::read_to_string(members_path).await?;
3684    let parsed = serde_json::from_str::<Value>(&raw)?;
3685    let Some(items) = parsed.as_array() else {
3686        return Ok(Vec::new());
3687    };
3688    let mut out = Vec::new();
3689    for item in items {
3690        if let Some(name) = item
3691            .get("name")
3692            .and_then(|v| v.as_str())
3693            .map(str::trim)
3694            .filter(|s| !s.is_empty())
3695        {
3696            out.push(name.to_string());
3697        }
3698    }
3699    Ok(out)
3700}
3701
3702async fn upsert_team_member(
3703    paths: &AgentTeamPaths,
3704    team_name: &str,
3705    member_name: &str,
3706    agent_type: Option<String>,
3707    model: Option<String>,
3708) -> anyhow::Result<bool> {
3709    let members_path = paths.members_file(team_name);
3710    let mut members = if members_path.exists() {
3711        let raw = fs::read_to_string(&members_path).await?;
3712        serde_json::from_str::<Value>(&raw)?
3713            .as_array()
3714            .cloned()
3715            .unwrap_or_default()
3716    } else {
3717        Vec::new()
3718    };
3719    let already_present = members.iter().any(|item| {
3720        item.get("name")
3721            .and_then(|v| v.as_str())
3722            .map(|s| s == member_name)
3723            .unwrap_or(false)
3724    });
3725    if already_present {
3726        return Ok(false);
3727    }
3728    members.push(json!({
3729        "name": member_name,
3730        "agentType": agent_type,
3731        "model": model,
3732        "createdAtMs": now_ms_u64()
3733    }));
3734    write_json_file(members_path, &Value::Array(members)).await?;
3735    Ok(true)
3736}
3737
3738fn now_ms_u64() -> u64 {
3739    std::time::SystemTime::now()
3740        .duration_since(std::time::UNIX_EPOCH)
3741        .map(|d| d.as_millis() as u64)
3742        .unwrap_or(0)
3743}
3744
3745fn uuid_like(seed: u64) -> String {
3746    format!("{:x}", seed)
3747}
3748
3749struct MemorySearchTool;
3750#[async_trait]
3751impl Tool for MemorySearchTool {
3752    fn schema(&self) -> ToolSchema {
3753        tool_schema(
3754            "memory_search",
3755            "Search tandem memory across session/project/global tiers. If scope fields are omitted, the tool defaults to the current session/project context and may include global memory when policy allows it.",
3756            json!({
3757                "type":"object",
3758                "properties":{
3759                    "query":{"type":"string"},
3760                    "session_id":{"type":"string"},
3761                    "project_id":{"type":"string"},
3762                    "tier":{"type":"string","enum":["session","project","global"]},
3763                    "limit":{"type":"integer","minimum":1,"maximum":20},
3764                    "allow_global":{"type":"boolean"}
3765                },
3766                "required":["query"]
3767            }),
3768        )
3769    }
3770
3771    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
3772        let query = args
3773            .get("query")
3774            .or_else(|| args.get("q"))
3775            .and_then(|v| v.as_str())
3776            .map(str::trim)
3777            .unwrap_or("");
3778        if query.is_empty() {
3779            return Ok(ToolResult {
3780                output: "memory_search requires a non-empty query".to_string(),
3781                metadata: json!({"ok": false, "reason": "missing_query"}),
3782            });
3783        }
3784
3785        let session_id = memory_session_id(&args);
3786        let project_id = memory_project_id(&args);
3787        let allow_global = global_memory_enabled(&args);
3788        if session_id.is_none() && project_id.is_none() && !allow_global {
3789            return Ok(ToolResult {
3790                output: "memory_search requires a current session/project context or global memory enabled by policy"
3791                    .to_string(),
3792                metadata: json!({"ok": false, "reason": "missing_scope"}),
3793            });
3794        }
3795
3796        let tier = match args
3797            .get("tier")
3798            .and_then(|v| v.as_str())
3799            .map(|s| s.trim().to_ascii_lowercase())
3800        {
3801            Some(t) if t == "session" => Some(MemoryTier::Session),
3802            Some(t) if t == "project" => Some(MemoryTier::Project),
3803            Some(t) if t == "global" => Some(MemoryTier::Global),
3804            Some(_) => {
3805                return Ok(ToolResult {
3806                    output: "memory_search tier must be one of: session, project, global"
3807                        .to_string(),
3808                    metadata: json!({"ok": false, "reason": "invalid_tier"}),
3809                });
3810            }
3811            None => None,
3812        };
3813        if matches!(tier, Some(MemoryTier::Session)) && session_id.is_none() {
3814            return Ok(ToolResult {
3815                output: "tier=session requires session_id".to_string(),
3816                metadata: json!({"ok": false, "reason": "missing_session_scope"}),
3817            });
3818        }
3819        if matches!(tier, Some(MemoryTier::Project)) && project_id.is_none() {
3820            return Ok(ToolResult {
3821                output: "tier=project requires project_id".to_string(),
3822                metadata: json!({"ok": false, "reason": "missing_project_scope"}),
3823            });
3824        }
3825        if matches!(tier, Some(MemoryTier::Global)) && !allow_global {
3826            return Ok(ToolResult {
3827                output: "tier=global requires allow_global=true".to_string(),
3828                metadata: json!({"ok": false, "reason": "global_scope_disabled"}),
3829            });
3830        }
3831
3832        let limit = args
3833            .get("limit")
3834            .and_then(|v| v.as_i64())
3835            .unwrap_or(5)
3836            .clamp(1, 20);
3837
3838        let db_path = resolve_memory_db_path(&args);
3839        let db_exists = db_path.exists();
3840        if !db_exists {
3841            return Ok(ToolResult {
3842                output: "memory database not found".to_string(),
3843                metadata: json!({
3844                    "ok": false,
3845                    "reason": "memory_db_missing",
3846                    "db_path": db_path,
3847                }),
3848            });
3849        }
3850
3851        let manager = MemoryManager::new(&db_path).await?;
3852        let health = manager.embedding_health().await;
3853        if health.status != "ok" {
3854            return Ok(ToolResult {
3855                output: "memory embeddings unavailable; semantic search is disabled".to_string(),
3856                metadata: json!({
3857                    "ok": false,
3858                    "reason": "embeddings_unavailable",
3859                    "embedding_status": health.status,
3860                    "embedding_reason": health.reason,
3861                }),
3862            });
3863        }
3864
3865        let mut results: Vec<MemorySearchResult> = Vec::new();
3866        match tier {
3867            Some(MemoryTier::Session) => {
3868                results.extend(
3869                    manager
3870                        .search(
3871                            query,
3872                            Some(MemoryTier::Session),
3873                            project_id.as_deref(),
3874                            session_id.as_deref(),
3875                            Some(limit),
3876                        )
3877                        .await?,
3878                );
3879            }
3880            Some(MemoryTier::Project) => {
3881                results.extend(
3882                    manager
3883                        .search(
3884                            query,
3885                            Some(MemoryTier::Project),
3886                            project_id.as_deref(),
3887                            session_id.as_deref(),
3888                            Some(limit),
3889                        )
3890                        .await?,
3891                );
3892            }
3893            Some(MemoryTier::Global) => {
3894                results.extend(
3895                    manager
3896                        .search(query, Some(MemoryTier::Global), None, None, Some(limit))
3897                        .await?,
3898                );
3899            }
3900            _ => {
3901                if session_id.is_some() {
3902                    results.extend(
3903                        manager
3904                            .search(
3905                                query,
3906                                Some(MemoryTier::Session),
3907                                project_id.as_deref(),
3908                                session_id.as_deref(),
3909                                Some(limit),
3910                            )
3911                            .await?,
3912                    );
3913                }
3914                if project_id.is_some() {
3915                    results.extend(
3916                        manager
3917                            .search(
3918                                query,
3919                                Some(MemoryTier::Project),
3920                                project_id.as_deref(),
3921                                session_id.as_deref(),
3922                                Some(limit),
3923                            )
3924                            .await?,
3925                    );
3926                }
3927                if allow_global {
3928                    results.extend(
3929                        manager
3930                            .search(query, Some(MemoryTier::Global), None, None, Some(limit))
3931                            .await?,
3932                    );
3933                }
3934            }
3935        }
3936
3937        let mut dedup: HashMap<String, MemorySearchResult> = HashMap::new();
3938        for result in results {
3939            match dedup.get(&result.chunk.id) {
3940                Some(existing) if existing.similarity >= result.similarity => {}
3941                _ => {
3942                    dedup.insert(result.chunk.id.clone(), result);
3943                }
3944            }
3945        }
3946        let mut merged = dedup.into_values().collect::<Vec<_>>();
3947        merged.sort_by(|a, b| b.similarity.total_cmp(&a.similarity));
3948        merged.truncate(limit as usize);
3949
3950        let output_rows = merged
3951            .iter()
3952            .map(|item| {
3953                json!({
3954                    "chunk_id": item.chunk.id,
3955                    "tier": item.chunk.tier.to_string(),
3956                    "session_id": item.chunk.session_id,
3957                    "project_id": item.chunk.project_id,
3958                    "source": item.chunk.source,
3959                    "similarity": item.similarity,
3960                    "content": item.chunk.content,
3961                    "created_at": item.chunk.created_at,
3962                })
3963            })
3964            .collect::<Vec<_>>();
3965
3966        Ok(ToolResult {
3967            output: serde_json::to_string_pretty(&output_rows).unwrap_or_default(),
3968            metadata: json!({
3969                "ok": true,
3970                "count": output_rows.len(),
3971                "limit": limit,
3972                "query": query,
3973                "session_id": session_id,
3974                "project_id": project_id,
3975                "allow_global": allow_global,
3976                "embedding_status": health.status,
3977                "embedding_reason": health.reason,
3978                "strict_scope": !allow_global,
3979            }),
3980        })
3981    }
3982}
3983
3984struct MemoryStoreTool;
3985#[async_trait]
3986impl Tool for MemoryStoreTool {
3987    fn schema(&self) -> ToolSchema {
3988        tool_schema(
3989            "memory_store",
3990            "Store memory chunks in session/project/global tiers. If scope is omitted, the tool defaults to the current project, then session, and only uses global memory when policy allows it.",
3991            json!({
3992                "type":"object",
3993                "properties":{
3994                    "content":{"type":"string"},
3995                    "tier":{"type":"string","enum":["session","project","global"]},
3996                    "session_id":{"type":"string"},
3997                    "project_id":{"type":"string"},
3998                    "source":{"type":"string"},
3999                    "metadata":{"type":"object"},
4000                    "allow_global":{"type":"boolean"}
4001                },
4002                "required":["content"]
4003            }),
4004        )
4005    }
4006
4007    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
4008        let content = args
4009            .get("content")
4010            .and_then(|v| v.as_str())
4011            .map(str::trim)
4012            .unwrap_or("");
4013        if content.is_empty() {
4014            return Ok(ToolResult {
4015                output: "memory_store requires non-empty content".to_string(),
4016                metadata: json!({"ok": false, "reason": "missing_content"}),
4017            });
4018        }
4019
4020        let session_id = memory_session_id(&args);
4021        let project_id = memory_project_id(&args);
4022        let allow_global = global_memory_enabled(&args);
4023
4024        let tier = match args
4025            .get("tier")
4026            .and_then(|v| v.as_str())
4027            .map(|s| s.trim().to_ascii_lowercase())
4028        {
4029            Some(t) if t == "session" => MemoryTier::Session,
4030            Some(t) if t == "project" => MemoryTier::Project,
4031            Some(t) if t == "global" => MemoryTier::Global,
4032            Some(_) => {
4033                return Ok(ToolResult {
4034                    output: "memory_store tier must be one of: session, project, global"
4035                        .to_string(),
4036                    metadata: json!({"ok": false, "reason": "invalid_tier"}),
4037                });
4038            }
4039            None => {
4040                if project_id.is_some() {
4041                    MemoryTier::Project
4042                } else if session_id.is_some() {
4043                    MemoryTier::Session
4044                } else if allow_global {
4045                    MemoryTier::Global
4046                } else {
4047                    return Ok(ToolResult {
4048                        output: "memory_store requires a current session/project context or global memory enabled by policy"
4049                            .to_string(),
4050                        metadata: json!({"ok": false, "reason": "missing_scope"}),
4051                    });
4052                }
4053            }
4054        };
4055
4056        if matches!(tier, MemoryTier::Session) && session_id.is_none() {
4057            return Ok(ToolResult {
4058                output: "tier=session requires session_id".to_string(),
4059                metadata: json!({"ok": false, "reason": "missing_session_scope"}),
4060            });
4061        }
4062        if matches!(tier, MemoryTier::Project) && project_id.is_none() {
4063            return Ok(ToolResult {
4064                output: "tier=project requires project_id".to_string(),
4065                metadata: json!({"ok": false, "reason": "missing_project_scope"}),
4066            });
4067        }
4068        if matches!(tier, MemoryTier::Global) && !allow_global {
4069            return Ok(ToolResult {
4070                output: "tier=global requires allow_global=true".to_string(),
4071                metadata: json!({"ok": false, "reason": "global_scope_disabled"}),
4072            });
4073        }
4074
4075        let db_path = resolve_memory_db_path(&args);
4076        let manager = MemoryManager::new(&db_path).await?;
4077        let health = manager.embedding_health().await;
4078        if health.status != "ok" {
4079            return Ok(ToolResult {
4080                output: "memory embeddings unavailable; semantic memory store is disabled"
4081                    .to_string(),
4082                metadata: json!({
4083                    "ok": false,
4084                    "reason": "embeddings_unavailable",
4085                    "embedding_status": health.status,
4086                    "embedding_reason": health.reason,
4087                }),
4088            });
4089        }
4090
4091        let source = args
4092            .get("source")
4093            .and_then(|v| v.as_str())
4094            .map(str::trim)
4095            .filter(|s| !s.is_empty())
4096            .unwrap_or("agent_note")
4097            .to_string();
4098        let metadata = args.get("metadata").cloned();
4099
4100        let request = tandem_memory::types::StoreMessageRequest {
4101            content: content.to_string(),
4102            tier,
4103            session_id: session_id.clone(),
4104            project_id: project_id.clone(),
4105            source,
4106            source_path: None,
4107            source_mtime: None,
4108            source_size: None,
4109            source_hash: None,
4110            metadata,
4111        };
4112        let chunk_ids = manager.store_message(request).await?;
4113
4114        Ok(ToolResult {
4115            output: format!("stored {} chunk(s) in {} memory", chunk_ids.len(), tier),
4116            metadata: json!({
4117                "ok": true,
4118                "chunk_ids": chunk_ids,
4119                "count": chunk_ids.len(),
4120                "tier": tier.to_string(),
4121                "session_id": session_id,
4122                "project_id": project_id,
4123                "allow_global": allow_global,
4124                "embedding_status": health.status,
4125                "embedding_reason": health.reason,
4126                "db_path": db_path,
4127            }),
4128        })
4129    }
4130}
4131
4132struct MemoryListTool;
4133#[async_trait]
4134impl Tool for MemoryListTool {
4135    fn schema(&self) -> ToolSchema {
4136        tool_schema(
4137            "memory_list",
4138            "List stored memory chunks for auditing and knowledge-base browsing.",
4139            json!({
4140                "type":"object",
4141                "properties":{
4142                    "tier":{"type":"string","enum":["session","project","global","all"]},
4143                    "session_id":{"type":"string"},
4144                    "project_id":{"type":"string"},
4145                    "limit":{"type":"integer","minimum":1,"maximum":200},
4146                    "allow_global":{"type":"boolean"}
4147                }
4148            }),
4149        )
4150    }
4151
4152    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
4153        let session_id = memory_session_id(&args);
4154        let project_id = memory_project_id(&args);
4155        let allow_global = global_memory_enabled(&args);
4156        let limit = args
4157            .get("limit")
4158            .and_then(|v| v.as_i64())
4159            .unwrap_or(50)
4160            .clamp(1, 200) as usize;
4161
4162        let tier = args
4163            .get("tier")
4164            .and_then(|v| v.as_str())
4165            .map(|s| s.trim().to_ascii_lowercase())
4166            .unwrap_or_else(|| "all".to_string());
4167        if tier == "global" && !allow_global {
4168            return Ok(ToolResult {
4169                output: "tier=global requires allow_global=true".to_string(),
4170                metadata: json!({"ok": false, "reason": "global_scope_disabled"}),
4171            });
4172        }
4173        if session_id.is_none() && project_id.is_none() && tier != "global" && !allow_global {
4174            return Ok(ToolResult {
4175                output: "memory_list requires a current session/project context or global memory enabled by policy".to_string(),
4176                metadata: json!({"ok": false, "reason": "missing_scope"}),
4177            });
4178        }
4179
4180        let db_path = resolve_memory_db_path(&args);
4181        let manager = MemoryManager::new(&db_path).await?;
4182
4183        let mut chunks: Vec<tandem_memory::types::MemoryChunk> = Vec::new();
4184        match tier.as_str() {
4185            "session" => {
4186                let Some(sid) = session_id.as_deref() else {
4187                    return Ok(ToolResult {
4188                        output: "tier=session requires session_id".to_string(),
4189                        metadata: json!({"ok": false, "reason": "missing_session_scope"}),
4190                    });
4191                };
4192                chunks.extend(manager.db().get_session_chunks(sid).await?);
4193            }
4194            "project" => {
4195                let Some(pid) = project_id.as_deref() else {
4196                    return Ok(ToolResult {
4197                        output: "tier=project requires project_id".to_string(),
4198                        metadata: json!({"ok": false, "reason": "missing_project_scope"}),
4199                    });
4200                };
4201                chunks.extend(manager.db().get_project_chunks(pid).await?);
4202            }
4203            "global" => {
4204                chunks.extend(manager.db().get_global_chunks(limit as i64).await?);
4205            }
4206            "all" => {
4207                if let Some(sid) = session_id.as_deref() {
4208                    chunks.extend(manager.db().get_session_chunks(sid).await?);
4209                }
4210                if let Some(pid) = project_id.as_deref() {
4211                    chunks.extend(manager.db().get_project_chunks(pid).await?);
4212                }
4213                if allow_global {
4214                    chunks.extend(manager.db().get_global_chunks(limit as i64).await?);
4215                }
4216            }
4217            _ => {
4218                return Ok(ToolResult {
4219                    output: "memory_list tier must be one of: session, project, global, all"
4220                        .to_string(),
4221                    metadata: json!({"ok": false, "reason": "invalid_tier"}),
4222                });
4223            }
4224        }
4225
4226        chunks.sort_by(|a, b| b.created_at.cmp(&a.created_at));
4227        chunks.truncate(limit);
4228        let rows = chunks
4229            .iter()
4230            .map(|chunk| {
4231                json!({
4232                    "chunk_id": chunk.id,
4233                    "tier": chunk.tier.to_string(),
4234                    "session_id": chunk.session_id,
4235                    "project_id": chunk.project_id,
4236                    "source": chunk.source,
4237                    "content": chunk.content,
4238                    "created_at": chunk.created_at,
4239                    "metadata": chunk.metadata,
4240                })
4241            })
4242            .collect::<Vec<_>>();
4243
4244        Ok(ToolResult {
4245            output: serde_json::to_string_pretty(&rows).unwrap_or_default(),
4246            metadata: json!({
4247                "ok": true,
4248                "count": rows.len(),
4249                "limit": limit,
4250                "tier": tier,
4251                "session_id": session_id,
4252                "project_id": project_id,
4253                "allow_global": allow_global,
4254                "db_path": db_path,
4255            }),
4256        })
4257    }
4258}
4259
4260struct MemoryDeleteTool;
4261#[async_trait]
4262impl Tool for MemoryDeleteTool {
4263    fn schema(&self) -> ToolSchema {
4264        tool_schema(
4265            "memory_delete",
4266            "Delete a stored memory chunk from session/project/global memory within the current allowed scope.",
4267            json!({
4268                "type":"object",
4269                "properties":{
4270                    "chunk_id":{"type":"string"},
4271                    "id":{"type":"string"},
4272                    "tier":{"type":"string","enum":["session","project","global"]},
4273                    "session_id":{"type":"string"},
4274                    "project_id":{"type":"string"},
4275                    "allow_global":{"type":"boolean"}
4276                },
4277                "required":["chunk_id"]
4278            }),
4279        )
4280    }
4281
4282    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
4283        let chunk_id = args
4284            .get("chunk_id")
4285            .or_else(|| args.get("id"))
4286            .and_then(|v| v.as_str())
4287            .map(str::trim)
4288            .unwrap_or("");
4289        if chunk_id.is_empty() {
4290            return Ok(ToolResult {
4291                output: "memory_delete requires chunk_id".to_string(),
4292                metadata: json!({"ok": false, "reason": "missing_chunk_id"}),
4293            });
4294        }
4295
4296        let session_id = memory_session_id(&args);
4297        let project_id = memory_project_id(&args);
4298        let allow_global = global_memory_enabled(&args);
4299
4300        let tier = match args
4301            .get("tier")
4302            .and_then(|v| v.as_str())
4303            .map(|s| s.trim().to_ascii_lowercase())
4304        {
4305            Some(t) if t == "session" => MemoryTier::Session,
4306            Some(t) if t == "project" => MemoryTier::Project,
4307            Some(t) if t == "global" => MemoryTier::Global,
4308            Some(_) => {
4309                return Ok(ToolResult {
4310                    output: "memory_delete tier must be one of: session, project, global"
4311                        .to_string(),
4312                    metadata: json!({"ok": false, "reason": "invalid_tier"}),
4313                });
4314            }
4315            None => {
4316                if project_id.is_some() {
4317                    MemoryTier::Project
4318                } else if session_id.is_some() {
4319                    MemoryTier::Session
4320                } else if allow_global {
4321                    MemoryTier::Global
4322                } else {
4323                    return Ok(ToolResult {
4324                        output: "memory_delete requires a current session/project context or global memory enabled by policy".to_string(),
4325                        metadata: json!({"ok": false, "reason": "missing_scope"}),
4326                    });
4327                }
4328            }
4329        };
4330
4331        if matches!(tier, MemoryTier::Session) && session_id.is_none() {
4332            return Ok(ToolResult {
4333                output: "tier=session requires session_id".to_string(),
4334                metadata: json!({"ok": false, "reason": "missing_session_scope"}),
4335            });
4336        }
4337        if matches!(tier, MemoryTier::Project) && project_id.is_none() {
4338            return Ok(ToolResult {
4339                output: "tier=project requires project_id".to_string(),
4340                metadata: json!({"ok": false, "reason": "missing_project_scope"}),
4341            });
4342        }
4343        if matches!(tier, MemoryTier::Global) && !allow_global {
4344            return Ok(ToolResult {
4345                output: "tier=global requires allow_global=true".to_string(),
4346                metadata: json!({"ok": false, "reason": "global_scope_disabled"}),
4347            });
4348        }
4349
4350        let db_path = resolve_memory_db_path(&args);
4351        let manager = MemoryManager::new(&db_path).await?;
4352        let deleted = manager
4353            .db()
4354            .delete_chunk(tier, chunk_id, project_id.as_deref(), session_id.as_deref())
4355            .await?;
4356
4357        if deleted == 0 {
4358            return Ok(ToolResult {
4359                output: format!("memory chunk `{chunk_id}` not found in {tier} memory"),
4360                metadata: json!({
4361                    "ok": false,
4362                    "reason": "not_found",
4363                    "chunk_id": chunk_id,
4364                    "tier": tier.to_string(),
4365                    "session_id": session_id,
4366                    "project_id": project_id,
4367                    "allow_global": allow_global,
4368                    "db_path": db_path,
4369                }),
4370            });
4371        }
4372
4373        Ok(ToolResult {
4374            output: format!("deleted memory chunk `{chunk_id}` from {tier} memory"),
4375            metadata: json!({
4376                "ok": true,
4377                "deleted": true,
4378                "chunk_id": chunk_id,
4379                "count": deleted,
4380                "tier": tier.to_string(),
4381                "session_id": session_id,
4382                "project_id": project_id,
4383                "allow_global": allow_global,
4384                "db_path": db_path,
4385            }),
4386        })
4387    }
4388}
4389
4390fn resolve_memory_db_path(args: &Value) -> PathBuf {
4391    if let Some(path) = args
4392        .get("__memory_db_path")
4393        .and_then(|v| v.as_str())
4394        .map(str::trim)
4395        .filter(|s| !s.is_empty())
4396    {
4397        return PathBuf::from(path);
4398    }
4399    if let Ok(path) = std::env::var("TANDEM_MEMORY_DB_PATH") {
4400        let trimmed = path.trim();
4401        if !trimmed.is_empty() {
4402            return PathBuf::from(trimmed);
4403        }
4404    }
4405    if let Ok(state_dir) = std::env::var("TANDEM_STATE_DIR") {
4406        let trimmed = state_dir.trim();
4407        if !trimmed.is_empty() {
4408            return PathBuf::from(trimmed).join("memory.sqlite");
4409        }
4410    }
4411    if let Some(data_dir) = dirs::data_dir() {
4412        return data_dir.join("tandem").join("memory.sqlite");
4413    }
4414    PathBuf::from("memory.sqlite")
4415}
4416
4417#[derive(Clone, Copy, Debug, Eq, PartialEq)]
4418enum MemoryVisibleScope {
4419    Session,
4420    Project,
4421    Global,
4422}
4423
4424fn parse_memory_visible_scope(raw: &str) -> Option<MemoryVisibleScope> {
4425    match raw.trim().to_ascii_lowercase().as_str() {
4426        "session" => Some(MemoryVisibleScope::Session),
4427        "project" | "workspace" => Some(MemoryVisibleScope::Project),
4428        "global" => Some(MemoryVisibleScope::Global),
4429        _ => None,
4430    }
4431}
4432
4433fn memory_visible_scope(args: &Value) -> MemoryVisibleScope {
4434    if let Some(scope) = args
4435        .get("__memory_max_visible_scope")
4436        .and_then(|v| v.as_str())
4437        .and_then(parse_memory_visible_scope)
4438    {
4439        return scope;
4440    }
4441    if let Ok(raw) = std::env::var("TANDEM_MEMORY_MAX_VISIBLE_SCOPE") {
4442        if let Some(scope) = parse_memory_visible_scope(&raw) {
4443            return scope;
4444        }
4445    }
4446    MemoryVisibleScope::Global
4447}
4448
4449fn memory_session_id(args: &Value) -> Option<String> {
4450    args.get("session_id")
4451        .or_else(|| args.get("__session_id"))
4452        .and_then(|v| v.as_str())
4453        .map(str::trim)
4454        .filter(|s| !s.is_empty())
4455        .map(ToString::to_string)
4456}
4457
4458fn memory_project_id(args: &Value) -> Option<String> {
4459    args.get("project_id")
4460        .or_else(|| args.get("__project_id"))
4461        .and_then(|v| v.as_str())
4462        .map(str::trim)
4463        .filter(|s| !s.is_empty())
4464        .map(ToString::to_string)
4465}
4466
4467fn global_memory_enabled(args: &Value) -> bool {
4468    if memory_visible_scope(args) != MemoryVisibleScope::Global {
4469        return false;
4470    }
4471    if let Some(explicit) = args.get("allow_global").and_then(|v| v.as_bool()) {
4472        return explicit;
4473    }
4474    match std::env::var("TANDEM_ENABLE_GLOBAL_MEMORY") {
4475        Ok(raw) => !matches!(
4476            raw.trim().to_ascii_lowercase().as_str(),
4477            "0" | "false" | "no" | "off"
4478        ),
4479        Err(_) => true,
4480    }
4481}
4482
4483struct SkillTool;
4484#[async_trait]
4485impl Tool for SkillTool {
4486    fn schema(&self) -> ToolSchema {
4487        tool_schema(
4488            "skill",
4489            "List or load installed Tandem skills. Call without name to list available skills; provide name to load full SKILL.md content.",
4490            json!({"type":"object","properties":{"name":{"type":"string"}}}),
4491        )
4492    }
4493    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
4494        let workspace_root = std::env::current_dir().ok();
4495        let service = SkillService::for_workspace(workspace_root);
4496        let requested = args["name"].as_str().map(str::trim).unwrap_or("");
4497        let allowed_skills = parse_allowed_skills(&args);
4498
4499        if requested.is_empty() {
4500            let mut skills = service.list_skills().unwrap_or_default();
4501            if let Some(allowed) = &allowed_skills {
4502                skills.retain(|s| allowed.contains(&s.name));
4503            }
4504            if skills.is_empty() {
4505                return Ok(ToolResult {
4506                    output: "No skills available.".to_string(),
4507                    metadata: json!({"count": 0, "skills": []}),
4508                });
4509            }
4510            let mut lines = vec![
4511                "Available Tandem skills:".to_string(),
4512                "<available_skills>".to_string(),
4513            ];
4514            for skill in &skills {
4515                lines.push("  <skill>".to_string());
4516                lines.push(format!("    <name>{}</name>", skill.name));
4517                lines.push(format!(
4518                    "    <description>{}</description>",
4519                    escape_xml_text(&skill.description)
4520                ));
4521                lines.push(format!("    <location>{}</location>", skill.path));
4522                lines.push("  </skill>".to_string());
4523            }
4524            lines.push("</available_skills>".to_string());
4525            return Ok(ToolResult {
4526                output: lines.join("\n"),
4527                metadata: json!({"count": skills.len(), "skills": skills}),
4528            });
4529        }
4530
4531        if let Some(allowed) = &allowed_skills {
4532            if !allowed.contains(requested) {
4533                let mut allowed_list = allowed.iter().cloned().collect::<Vec<_>>();
4534                allowed_list.sort();
4535                return Ok(ToolResult {
4536                    output: format!(
4537                        "Skill \"{}\" is not enabled for this agent. Enabled skills: {}",
4538                        requested,
4539                        allowed_list.join(", ")
4540                    ),
4541                    metadata: json!({"name": requested, "enabled": allowed_list}),
4542                });
4543            }
4544        }
4545
4546        let loaded = service.load_skill(requested).map_err(anyhow::Error::msg)?;
4547        let Some(skill) = loaded else {
4548            let available = service
4549                .list_skills()
4550                .unwrap_or_default()
4551                .into_iter()
4552                .map(|s| s.name)
4553                .collect::<Vec<_>>();
4554            return Ok(ToolResult {
4555                output: format!(
4556                    "Skill \"{}\" not found. Available skills: {}",
4557                    requested,
4558                    if available.is_empty() {
4559                        "none".to_string()
4560                    } else {
4561                        available.join(", ")
4562                    }
4563                ),
4564                metadata: json!({"name": requested, "matches": [], "available": available}),
4565            });
4566        };
4567
4568        let files = skill
4569            .files
4570            .iter()
4571            .map(|f| format!("<file>{}</file>", f))
4572            .collect::<Vec<_>>()
4573            .join("\n");
4574        let output = [
4575            format!("<skill_content name=\"{}\">", skill.info.name),
4576            format!("# Skill: {}", skill.info.name),
4577            String::new(),
4578            skill.content.trim().to_string(),
4579            String::new(),
4580            format!("Base directory for this skill: {}", skill.base_dir),
4581            "Relative paths in this skill are resolved from this base directory.".to_string(),
4582            "Note: file list is sampled.".to_string(),
4583            String::new(),
4584            "<skill_files>".to_string(),
4585            files,
4586            "</skill_files>".to_string(),
4587            "</skill_content>".to_string(),
4588        ]
4589        .join("\n");
4590        Ok(ToolResult {
4591            output,
4592            metadata: json!({
4593                "name": skill.info.name,
4594                "dir": skill.base_dir,
4595                "path": skill.info.path
4596            }),
4597        })
4598    }
4599}
4600
4601fn escape_xml_text(input: &str) -> String {
4602    input
4603        .replace('&', "&amp;")
4604        .replace('<', "&lt;")
4605        .replace('>', "&gt;")
4606}
4607
4608fn parse_allowed_skills(args: &Value) -> Option<HashSet<String>> {
4609    let values = args
4610        .get("allowed_skills")
4611        .or_else(|| args.get("allowedSkills"))
4612        .and_then(|v| v.as_array())?;
4613    let out = values
4614        .iter()
4615        .filter_map(|v| v.as_str())
4616        .map(str::trim)
4617        .filter(|s| !s.is_empty())
4618        .map(ToString::to_string)
4619        .collect::<HashSet<_>>();
4620    Some(out)
4621}
4622
4623struct ApplyPatchTool;
4624#[async_trait]
4625impl Tool for ApplyPatchTool {
4626    fn schema(&self) -> ToolSchema {
4627        tool_schema_with_capabilities(
4628            "apply_patch",
4629            "Apply a Codex-style patch in a git workspace, or validate patch text when git patching is unavailable",
4630            json!({"type":"object","properties":{"patchText":{"type":"string"}}}),
4631            apply_patch_capabilities(),
4632        )
4633    }
4634    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
4635        let patch = args["patchText"].as_str().unwrap_or("");
4636        let has_begin = patch.contains("*** Begin Patch");
4637        let has_end = patch.contains("*** End Patch");
4638        let patch_paths = extract_apply_patch_paths(patch);
4639        let file_ops = patch_paths.len();
4640        let valid = has_begin && has_end && file_ops > 0;
4641        if !valid {
4642            return Ok(ToolResult {
4643                output: "Invalid patch format. Expected Begin/End markers and at least one file operation."
4644                    .to_string(),
4645                metadata: json!({"valid": false, "fileOps": file_ops}),
4646            });
4647        }
4648        let workspace_root =
4649            workspace_root_from_args(&args).unwrap_or_else(|| effective_cwd_from_args(&args));
4650        let git_root = resolve_git_root_for_dir(&workspace_root).await;
4651        if let Some(git_root) = git_root {
4652            let denied_paths = patch_paths
4653                .iter()
4654                .filter_map(|rel| {
4655                    let resolved = git_root.join(rel);
4656                    if is_within_workspace_root(&resolved, &workspace_root) {
4657                        None
4658                    } else {
4659                        Some(rel.clone())
4660                    }
4661                })
4662                .collect::<Vec<_>>();
4663            if !denied_paths.is_empty() {
4664                return Ok(ToolResult {
4665                    output: format!(
4666                        "patch denied by workspace policy for paths: {}",
4667                        denied_paths.join(", ")
4668                    ),
4669                    metadata: json!({
4670                        "valid": true,
4671                        "applied": false,
4672                        "reason": "path_outside_workspace",
4673                        "paths": patch_paths
4674                    }),
4675                });
4676            }
4677            let tmp_name = format!(
4678                "tandem-apply-patch-{}-{}.patch",
4679                std::process::id(),
4680                now_millis()
4681            );
4682            let patch_path = std::env::temp_dir().join(tmp_name);
4683            fs::write(&patch_path, patch).await?;
4684            let output = Command::new("git")
4685                .current_dir(&git_root)
4686                .arg("apply")
4687                .arg("--3way")
4688                .arg("--recount")
4689                .arg("--whitespace=nowarn")
4690                .arg(&patch_path)
4691                .output()
4692                .await?;
4693            let _ = fs::remove_file(&patch_path).await;
4694            let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
4695            let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
4696            let ok = output.status.success();
4697            return Ok(ToolResult {
4698                output: if ok {
4699                    if stdout.is_empty() {
4700                        "ok".to_string()
4701                    } else {
4702                        stdout.clone()
4703                    }
4704                } else if stderr.is_empty() {
4705                    "git apply failed".to_string()
4706                } else {
4707                    stderr.clone()
4708                },
4709                metadata: json!({
4710                    "valid": true,
4711                    "applied": ok,
4712                    "paths": patch_paths,
4713                    "git_root": git_root.to_string_lossy(),
4714                    "stdout": stdout,
4715                    "stderr": stderr
4716                }),
4717            });
4718        }
4719        Ok(ToolResult {
4720            output: "Patch format validated, but no git workspace was detected. Use `edit` for existing files or `write` for new files in this workspace."
4721                .to_string(),
4722            metadata: json!({
4723                "valid": true,
4724                "applied": false,
4725                "reason": "git_workspace_unavailable",
4726                "paths": patch_paths
4727            }),
4728        })
4729    }
4730}
4731
4732fn extract_apply_patch_paths(patch: &str) -> Vec<String> {
4733    let mut paths = Vec::new();
4734    for line in patch.lines() {
4735        let trimmed = line.trim();
4736        let marker = if let Some(value) = trimmed.strip_prefix("*** Add File: ") {
4737            Some(value)
4738        } else if let Some(value) = trimmed.strip_prefix("*** Update File: ") {
4739            Some(value)
4740        } else {
4741            trimmed.strip_prefix("*** Delete File: ")
4742        };
4743        let Some(path) = marker.map(str::trim).filter(|value| !value.is_empty()) else {
4744            continue;
4745        };
4746        if !paths.iter().any(|existing| existing == path) {
4747            paths.push(path.to_string());
4748        }
4749    }
4750    paths
4751}
4752
4753async fn resolve_git_root_for_dir(dir: &Path) -> Option<PathBuf> {
4754    let output = Command::new("git")
4755        .current_dir(dir)
4756        .arg("rev-parse")
4757        .arg("--show-toplevel")
4758        .stdout(Stdio::piped())
4759        .stderr(Stdio::null())
4760        .output()
4761        .await
4762        .ok()?;
4763    if !output.status.success() {
4764        return None;
4765    }
4766    let root = String::from_utf8_lossy(&output.stdout).trim().to_string();
4767    if root.is_empty() {
4768        None
4769    } else {
4770        Some(PathBuf::from(root))
4771    }
4772}
4773
4774fn now_millis() -> u128 {
4775    std::time::SystemTime::now()
4776        .duration_since(std::time::UNIX_EPOCH)
4777        .map(|value| value.as_millis())
4778        .unwrap_or(0)
4779}
4780
4781struct BatchTool;
4782#[async_trait]
4783impl Tool for BatchTool {
4784    fn schema(&self) -> ToolSchema {
4785        tool_schema(
4786            "batch",
4787            "Execute multiple tool calls sequentially",
4788            json!({
4789                "type":"object",
4790                "properties":{
4791                    "tool_calls":{
4792                        "type":"array",
4793                        "items":{
4794                            "type":"object",
4795                            "properties":{
4796                                "tool":{"type":"string"},
4797                                "name":{"type":"string"},
4798                                "args":{"type":"object"}
4799                            }
4800                        }
4801                    }
4802                }
4803            }),
4804        )
4805    }
4806    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
4807        let calls = args["tool_calls"].as_array().cloned().unwrap_or_default();
4808        let registry = ToolRegistry::new();
4809        let mut outputs = Vec::new();
4810        for call in calls.iter().take(20) {
4811            let Some(tool) = resolve_batch_call_tool_name(call) else {
4812                continue;
4813            };
4814            if tool.is_empty() || tool == "batch" {
4815                continue;
4816            }
4817            let call_args = call.get("args").cloned().unwrap_or_else(|| json!({}));
4818            let mut result = registry.execute(&tool, call_args.clone()).await?;
4819            if result.output.starts_with("Unknown tool:") {
4820                if let Some(fallback_name) = call
4821                    .get("name")
4822                    .and_then(|v| v.as_str())
4823                    .map(str::trim)
4824                    .filter(|s| !s.is_empty() && *s != tool)
4825                {
4826                    result = registry.execute(fallback_name, call_args).await?;
4827                }
4828            }
4829            outputs.push(json!({
4830                "tool": tool,
4831                "output": result.output,
4832                "metadata": result.metadata
4833            }));
4834        }
4835        let count = outputs.len();
4836        Ok(ToolResult {
4837            output: serde_json::to_string_pretty(&outputs).unwrap_or_default(),
4838            metadata: json!({"count": count}),
4839        })
4840    }
4841}
4842
4843struct LspTool;
4844#[async_trait]
4845impl Tool for LspTool {
4846    fn schema(&self) -> ToolSchema {
4847        tool_schema(
4848            "lsp",
4849            "LSP-like workspace diagnostics and symbol operations",
4850            json!({"type":"object","properties":{"operation":{"type":"string"},"filePath":{"type":"string"},"symbol":{"type":"string"},"query":{"type":"string"}}}),
4851        )
4852    }
4853    async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
4854        let operation = args["operation"].as_str().unwrap_or("symbols");
4855        let workspace_root =
4856            workspace_root_from_args(&args).unwrap_or_else(|| effective_cwd_from_args(&args));
4857        let output = match operation {
4858            "diagnostics" => {
4859                let path = args["filePath"].as_str().unwrap_or("");
4860                match resolve_tool_path(path, &args) {
4861                    Some(resolved_path) => {
4862                        diagnostics_for_path(&resolved_path.to_string_lossy()).await
4863                    }
4864                    None => "missing or unsafe filePath".to_string(),
4865                }
4866            }
4867            "definition" => {
4868                let symbol = args["symbol"].as_str().unwrap_or("");
4869                find_symbol_definition(symbol, &workspace_root).await
4870            }
4871            "references" => {
4872                let symbol = args["symbol"].as_str().unwrap_or("");
4873                find_symbol_references(symbol, &workspace_root).await
4874            }
4875            _ => {
4876                let query = args["query"]
4877                    .as_str()
4878                    .or_else(|| args["symbol"].as_str())
4879                    .unwrap_or("");
4880                list_symbols(query, &workspace_root).await
4881            }
4882        };
4883        Ok(ToolResult {
4884            output,
4885            metadata: json!({"operation": operation, "workspace_root": workspace_root.to_string_lossy()}),
4886        })
4887    }
4888}
4889
4890#[allow(dead_code)]
4891fn _safe_path(path: &str) -> PathBuf {
4892    PathBuf::from(path)
4893}
4894
4895static TODO_SEQ: AtomicU64 = AtomicU64::new(1);
4896
4897fn normalize_todos(items: Vec<Value>) -> Vec<Value> {
4898    items
4899        .into_iter()
4900        .filter_map(|item| {
4901            let obj = item.as_object()?;
4902            let content = obj
4903                .get("content")
4904                .and_then(|v| v.as_str())
4905                .or_else(|| obj.get("text").and_then(|v| v.as_str()))
4906                .unwrap_or("")
4907                .trim()
4908                .to_string();
4909            if content.is_empty() {
4910                return None;
4911            }
4912            let id = obj
4913                .get("id")
4914                .and_then(|v| v.as_str())
4915                .filter(|s| !s.trim().is_empty())
4916                .map(ToString::to_string)
4917                .unwrap_or_else(|| format!("todo-{}", TODO_SEQ.fetch_add(1, Ordering::Relaxed)));
4918            let status = obj
4919                .get("status")
4920                .and_then(|v| v.as_str())
4921                .filter(|s| !s.trim().is_empty())
4922                .map(ToString::to_string)
4923                .unwrap_or_else(|| "pending".to_string());
4924            Some(json!({"id": id, "content": content, "status": status}))
4925        })
4926        .collect()
4927}
4928
4929async fn diagnostics_for_path(path: &str) -> String {
4930    let Ok(content) = fs::read_to_string(path).await else {
4931        return "File not found".to_string();
4932    };
4933    let mut issues = Vec::new();
4934    let mut balance = 0i64;
4935    for (idx, line) in content.lines().enumerate() {
4936        for ch in line.chars() {
4937            if ch == '{' {
4938                balance += 1;
4939            } else if ch == '}' {
4940                balance -= 1;
4941            }
4942        }
4943        if line.contains("TODO") {
4944            issues.push(format!("{path}:{}: TODO marker", idx + 1));
4945        }
4946    }
4947    if balance != 0 {
4948        issues.push(format!("{path}:1: Unbalanced braces"));
4949    }
4950    if issues.is_empty() {
4951        "No diagnostics.".to_string()
4952    } else {
4953        issues.join("\n")
4954    }
4955}
4956
4957async fn list_symbols(query: &str, root: &Path) -> String {
4958    let query = query.to_lowercase();
4959    let rust_fn = Regex::new(r"^\s*(pub\s+)?(async\s+)?fn\s+([A-Za-z_][A-Za-z0-9_]*)")
4960        .unwrap_or_else(|_| Regex::new("$^").expect("regex"));
4961    let mut out = Vec::new();
4962    for entry in WalkBuilder::new(root).build().flatten() {
4963        if !entry.file_type().map(|t| t.is_file()).unwrap_or(false) {
4964            continue;
4965        }
4966        let path = entry.path();
4967        let ext = path.extension().and_then(|v| v.to_str()).unwrap_or("");
4968        if !matches!(ext, "rs" | "ts" | "tsx" | "js" | "jsx" | "py") {
4969            continue;
4970        }
4971        if let Ok(content) = fs::read_to_string(path).await {
4972            for (idx, line) in content.lines().enumerate() {
4973                if let Some(captures) = rust_fn.captures(line) {
4974                    let name = captures
4975                        .get(3)
4976                        .map(|m| m.as_str().to_string())
4977                        .unwrap_or_default();
4978                    if query.is_empty() || name.to_lowercase().contains(&query) {
4979                        out.push(format!("{}:{}:fn {}", path.display(), idx + 1, name));
4980                        if out.len() >= 100 {
4981                            return out.join("\n");
4982                        }
4983                    }
4984                }
4985            }
4986        }
4987    }
4988    out.join("\n")
4989}
4990
4991async fn find_symbol_definition(symbol: &str, root: &Path) -> String {
4992    if symbol.trim().is_empty() {
4993        return "missing symbol".to_string();
4994    }
4995    let listed = list_symbols(symbol, root).await;
4996    listed
4997        .lines()
4998        .find(|line| line.ends_with(&format!("fn {symbol}")))
4999        .map(ToString::to_string)
5000        .unwrap_or_else(|| "symbol not found".to_string())
5001}
5002
5003#[cfg(test)]
5004mod tests {
5005    use super::*;
5006    use std::collections::HashSet;
5007    use std::path::PathBuf;
5008    use std::sync::{Mutex, OnceLock};
5009    use tokio::fs;
5010    use tokio_util::sync::CancellationToken;
5011
5012    struct TestTool {
5013        schema: ToolSchema,
5014    }
5015
5016    #[async_trait]
5017    impl Tool for TestTool {
5018        fn schema(&self) -> ToolSchema {
5019            self.schema.clone()
5020        }
5021
5022        async fn execute(&self, _args: Value) -> anyhow::Result<ToolResult> {
5023            Ok(ToolResult {
5024                output: "ok".to_string(),
5025                metadata: json!({}),
5026            })
5027        }
5028
5029        async fn execute_with_cancel(
5030            &self,
5031            args: Value,
5032            _cancel: CancellationToken,
5033        ) -> anyhow::Result<ToolResult> {
5034            self.execute(args).await
5035        }
5036    }
5037
5038    fn search_env_lock() -> &'static Mutex<()> {
5039        static LOCK: OnceLock<Mutex<()>> = OnceLock::new();
5040        LOCK.get_or_init(|| Mutex::new(()))
5041    }
5042
5043    fn clear_search_env() {
5044        std::env::remove_var("TANDEM_SEARCH_BACKEND");
5045        std::env::remove_var("TANDEM_SEARCH_URL");
5046        std::env::remove_var("TANDEM_SEARXNG_URL");
5047        std::env::remove_var("TANDEM_SEARXNG_ENGINES");
5048        std::env::remove_var("TANDEM_SEARCH_TIMEOUT_MS");
5049        std::env::remove_var("TANDEM_EXA_API_KEY");
5050        std::env::remove_var("TANDEM_EXA_SEARCH_API_KEY");
5051        std::env::remove_var("EXA_API_KEY");
5052        std::env::remove_var("TANDEM_BRAVE_SEARCH_API_KEY");
5053        std::env::remove_var("BRAVE_SEARCH_API_KEY");
5054    }
5055
5056    #[test]
5057    fn validator_rejects_array_without_items() {
5058        let schemas = vec![ToolSchema::new(
5059            "bad",
5060            "bad schema",
5061            json!({
5062                "type":"object",
5063                "properties":{"todos":{"type":"array"}}
5064            }),
5065        )];
5066        let err = validate_tool_schemas(&schemas).expect_err("expected schema validation failure");
5067        assert_eq!(err.tool_name, "bad");
5068        assert!(err.path.contains("properties.todos"));
5069    }
5070
5071    #[tokio::test]
5072    async fn registry_schemas_are_unique_and_valid() {
5073        let registry = ToolRegistry::new();
5074        let schemas = registry.list().await;
5075        validate_tool_schemas(&schemas).expect("registry tool schemas should validate");
5076        let unique = schemas
5077            .iter()
5078            .map(|schema| schema.name.as_str())
5079            .collect::<HashSet<_>>();
5080        assert_eq!(
5081            unique.len(),
5082            schemas.len(),
5083            "tool schemas must be unique by name"
5084        );
5085    }
5086
5087    #[tokio::test]
5088    async fn core_tool_schemas_include_expected_capabilities() {
5089        let registry = ToolRegistry::new();
5090        let schemas = registry.list().await;
5091        let schema_by_name = schemas
5092            .iter()
5093            .map(|schema| (schema.name.as_str(), schema))
5094            .collect::<HashMap<_, _>>();
5095
5096        let read = schema_by_name.get("read").expect("read tool");
5097        assert!(read.capabilities.reads_workspace);
5098        assert!(read.capabilities.preferred_for_discovery);
5099        assert_eq!(
5100            read.capabilities.effects,
5101            vec![tandem_types::ToolEffect::Read]
5102        );
5103
5104        let write = schema_by_name.get("write").expect("write tool");
5105        assert!(write.capabilities.writes_workspace);
5106        assert!(write.capabilities.requires_verification);
5107        assert_eq!(
5108            write.capabilities.effects,
5109            vec![tandem_types::ToolEffect::Write]
5110        );
5111
5112        let grep = schema_by_name.get("grep").expect("grep tool");
5113        assert!(grep.capabilities.reads_workspace);
5114        assert!(grep.capabilities.preferred_for_discovery);
5115        assert_eq!(
5116            grep.capabilities.effects,
5117            vec![tandem_types::ToolEffect::Search]
5118        );
5119
5120        let bash = schema_by_name.get("bash").expect("bash tool");
5121        assert!(bash.capabilities.destructive);
5122        assert!(bash.capabilities.network_access);
5123        assert_eq!(
5124            bash.capabilities.effects,
5125            vec![tandem_types::ToolEffect::Execute]
5126        );
5127
5128        let webfetch = schema_by_name.get("webfetch").expect("webfetch tool");
5129        assert!(webfetch.capabilities.network_access);
5130        assert!(webfetch.capabilities.preferred_for_discovery);
5131        assert_eq!(
5132            webfetch.capabilities.effects,
5133            vec![tandem_types::ToolEffect::Fetch]
5134        );
5135
5136        let apply_patch = schema_by_name.get("apply_patch").expect("apply_patch tool");
5137        assert!(apply_patch.capabilities.reads_workspace);
5138        assert!(apply_patch.capabilities.writes_workspace);
5139        assert!(apply_patch.capabilities.requires_verification);
5140        assert_eq!(
5141            apply_patch.capabilities.effects,
5142            vec![tandem_types::ToolEffect::Patch]
5143        );
5144    }
5145
5146    #[tokio::test]
5147    async fn mcp_server_names_returns_unique_sorted_names() {
5148        let registry = ToolRegistry::new();
5149        registry
5150            .register_tool(
5151                "mcp.notion.search_pages".to_string(),
5152                Arc::new(TestTool {
5153                    schema: ToolSchema::new("mcp.notion.search_pages", "search", json!({})),
5154                }),
5155            )
5156            .await;
5157        registry
5158            .register_tool(
5159                "mcp.github.list_prs".to_string(),
5160                Arc::new(TestTool {
5161                    schema: ToolSchema::new("mcp.github.list_prs", "list", json!({})),
5162                }),
5163            )
5164            .await;
5165        registry
5166            .register_tool(
5167                "mcp.github.get_pr".to_string(),
5168                Arc::new(TestTool {
5169                    schema: ToolSchema::new("mcp.github.get_pr", "get", json!({})),
5170                }),
5171            )
5172            .await;
5173
5174        let servers = registry.mcp_server_names().await;
5175        assert_eq!(servers, vec!["github".to_string(), "notion".to_string()]);
5176    }
5177
5178    #[tokio::test]
5179    async fn unregister_by_prefix_removes_index_vectors_for_removed_tools() {
5180        let registry = ToolRegistry::new();
5181        registry
5182            .register_tool(
5183                "mcp.test.search".to_string(),
5184                Arc::new(TestTool {
5185                    schema: ToolSchema::new("mcp.test.search", "search", json!({})),
5186                }),
5187            )
5188            .await;
5189        registry
5190            .register_tool(
5191                "mcp.test.get".to_string(),
5192                Arc::new(TestTool {
5193                    schema: ToolSchema::new("mcp.test.get", "get", json!({})),
5194                }),
5195            )
5196            .await;
5197
5198        registry
5199            .tool_vectors
5200            .write()
5201            .await
5202            .insert("mcp.test.search".to_string(), vec![1.0, 0.0, 0.0]);
5203        registry
5204            .tool_vectors
5205            .write()
5206            .await
5207            .insert("mcp.test.get".to_string(), vec![0.0, 1.0, 0.0]);
5208
5209        let removed = registry.unregister_by_prefix("mcp.test.").await;
5210        assert_eq!(removed, 2);
5211        let vectors = registry.tool_vectors.read().await;
5212        assert!(!vectors.contains_key("mcp.test.search"));
5213        assert!(!vectors.contains_key("mcp.test.get"));
5214    }
5215
5216    #[test]
5217    fn websearch_query_extraction_accepts_aliases_and_nested_shapes() {
5218        let direct = json!({"query":"meaning of life"});
5219        assert_eq!(
5220            extract_websearch_query(&direct).as_deref(),
5221            Some("meaning of life")
5222        );
5223
5224        let alias = json!({"q":"hello"});
5225        assert_eq!(extract_websearch_query(&alias).as_deref(), Some("hello"));
5226
5227        let nested = json!({"arguments":{"search_query":"rust tokio"}});
5228        assert_eq!(
5229            extract_websearch_query(&nested).as_deref(),
5230            Some("rust tokio")
5231        );
5232
5233        let as_string = json!("find docs");
5234        assert_eq!(
5235            extract_websearch_query(&as_string).as_deref(),
5236            Some("find docs")
5237        );
5238
5239        let malformed = json!({"query":"websearch query</arg_key><arg_value>taj card what is it benefits how to apply</arg_value>"});
5240        assert_eq!(
5241            extract_websearch_query(&malformed).as_deref(),
5242            Some("taj card what is it benefits how to apply")
5243        );
5244    }
5245
5246    #[test]
5247    fn websearch_limit_extraction_clamps_and_reads_nested_fields() {
5248        assert_eq!(extract_websearch_limit(&json!({"limit": 100})), Some(10));
5249        assert_eq!(
5250            extract_websearch_limit(&json!({"arguments":{"numResults": 0}})),
5251            Some(1)
5252        );
5253        assert_eq!(
5254            extract_websearch_limit(&json!({"input":{"num_results": 6}})),
5255            Some(6)
5256        );
5257    }
5258
5259    #[test]
5260    fn search_backend_defaults_to_searxng_when_configured() {
5261        let _guard = search_env_lock().lock().expect("env lock");
5262        clear_search_env();
5263        std::env::set_var("TANDEM_SEARXNG_URL", "http://localhost:8080");
5264
5265        let backend = SearchBackend::from_env();
5266
5267        match backend {
5268            SearchBackend::Searxng { base_url, .. } => {
5269                assert_eq!(base_url, "http://localhost:8080");
5270            }
5271            other => panic!("expected searxng backend, got {other:?}"),
5272        }
5273
5274        clear_search_env();
5275    }
5276
5277    #[test]
5278    fn search_backend_defaults_to_tandem_when_search_url_configured() {
5279        let _guard = search_env_lock().lock().expect("env lock");
5280        clear_search_env();
5281        std::env::set_var("TANDEM_SEARCH_URL", "https://search.tandem.ac");
5282
5283        let backend = SearchBackend::from_env();
5284
5285        match backend {
5286            SearchBackend::Tandem { base_url, .. } => {
5287                assert_eq!(base_url, "https://search.tandem.ac");
5288            }
5289            other => panic!("expected tandem backend, got {other:?}"),
5290        }
5291
5292        clear_search_env();
5293    }
5294
5295    #[test]
5296    fn search_backend_explicit_auto_is_supported() {
5297        let _guard = search_env_lock().lock().expect("env lock");
5298        clear_search_env();
5299        std::env::set_var("TANDEM_SEARCH_BACKEND", "auto");
5300        std::env::set_var("TANDEM_BRAVE_SEARCH_API_KEY", "brave-test-key");
5301        std::env::set_var("TANDEM_EXA_API_KEY", "exa-test-key");
5302
5303        let backend = SearchBackend::from_env();
5304
5305        match backend {
5306            SearchBackend::Auto { backends } => {
5307                assert_eq!(backends.len(), 2);
5308                assert!(matches!(backends[0], SearchBackend::Brave { .. }));
5309                assert!(matches!(backends[1], SearchBackend::Exa { .. }));
5310            }
5311            other => panic!("expected auto backend, got {other:?}"),
5312        }
5313
5314        clear_search_env();
5315    }
5316
5317    #[test]
5318    fn search_backend_implicit_auto_failover_when_multiple_backends_are_configured() {
5319        let _guard = search_env_lock().lock().expect("env lock");
5320        clear_search_env();
5321        std::env::set_var("TANDEM_BRAVE_SEARCH_API_KEY", "brave-test-key");
5322        std::env::set_var("TANDEM_EXA_API_KEY", "exa-test-key");
5323
5324        let backend = SearchBackend::from_env();
5325
5326        match backend {
5327            SearchBackend::Auto { backends } => {
5328                assert_eq!(backends.len(), 2);
5329                assert!(matches!(backends[0], SearchBackend::Brave { .. }));
5330                assert!(matches!(backends[1], SearchBackend::Exa { .. }));
5331            }
5332            other => panic!("expected auto backend, got {other:?}"),
5333        }
5334
5335        clear_search_env();
5336    }
5337
5338    #[test]
5339    fn search_backend_supports_legacy_exa_env_key() {
5340        let _guard = search_env_lock().lock().expect("env lock");
5341        clear_search_env();
5342        std::env::set_var("TANDEM_SEARCH_BACKEND", "exa");
5343        std::env::set_var("TANDEM_EXA_SEARCH_API_KEY", "legacy-exa-test-key");
5344
5345        let backend = SearchBackend::from_env();
5346
5347        match backend {
5348            SearchBackend::Exa { api_key, .. } => {
5349                assert_eq!(api_key, "legacy-exa-test-key");
5350            }
5351            other => panic!("expected exa backend, got {other:?}"),
5352        }
5353
5354        clear_search_env();
5355    }
5356
5357    #[test]
5358    fn normalize_brave_results_accepts_standard_web_payload_rows() {
5359        let raw = vec![json!({
5360            "title": "Agentic workflows",
5361            "url": "https://example.com/agentic",
5362            "description": "A practical overview of agentic workflows.",
5363            "profile": {
5364                "long_name": "example.com"
5365            }
5366        })];
5367
5368        let results = normalize_brave_results(&raw, 5);
5369
5370        assert_eq!(results.len(), 1);
5371        assert_eq!(results[0].title, "Agentic workflows");
5372        assert_eq!(results[0].url, "https://example.com/agentic");
5373        assert_eq!(
5374            results[0].snippet,
5375            "A practical overview of agentic workflows."
5376        );
5377        assert_eq!(results[0].source, "brave:example.com");
5378    }
5379
5380    #[test]
5381    fn search_backend_explicit_none_disables_websearch() {
5382        let _guard = search_env_lock().lock().expect("env lock");
5383        clear_search_env();
5384        std::env::set_var("TANDEM_SEARCH_BACKEND", "none");
5385        std::env::set_var("TANDEM_SEARXNG_URL", "http://localhost:8080");
5386
5387        let backend = SearchBackend::from_env();
5388
5389        assert!(matches!(backend, SearchBackend::Disabled { .. }));
5390
5391        clear_search_env();
5392    }
5393
5394    #[tokio::test]
5395    async fn tool_registry_includes_websearch_by_default() {
5396        let _guard = search_env_lock().lock().expect("env lock");
5397        clear_search_env();
5398
5399        let registry = ToolRegistry::new();
5400        let names = registry
5401            .list()
5402            .await
5403            .into_iter()
5404            .map(|schema| schema.name)
5405            .collect::<Vec<_>>();
5406
5407        assert!(names.iter().any(|name| name == "websearch"));
5408
5409        clear_search_env();
5410    }
5411
5412    #[tokio::test]
5413    async fn tool_registry_omits_websearch_when_search_backend_explicitly_disabled() {
5414        let _guard = search_env_lock().lock().expect("env lock");
5415        clear_search_env();
5416        std::env::set_var("TANDEM_SEARCH_BACKEND", "none");
5417
5418        let registry = ToolRegistry::new();
5419        let names = registry
5420            .list()
5421            .await
5422            .into_iter()
5423            .map(|schema| schema.name)
5424            .collect::<Vec<_>>();
5425
5426        assert!(!names.iter().any(|name| name == "websearch"));
5427
5428        clear_search_env();
5429    }
5430
5431    #[test]
5432    fn normalize_searxng_results_preserves_title_url_and_engine() {
5433        let results = normalize_searxng_results(
5434            &[json!({
5435                "title": "Tandem Docs",
5436                "url": "https://docs.tandem.ac/",
5437                "content": "Official documentation for Tandem.",
5438                "engine": "duckduckgo"
5439            })],
5440            8,
5441        );
5442
5443        assert_eq!(results.len(), 1);
5444        assert_eq!(results[0].title, "Tandem Docs");
5445        assert_eq!(results[0].url, "https://docs.tandem.ac/");
5446        assert_eq!(results[0].snippet, "Official documentation for Tandem.");
5447        assert_eq!(results[0].source, "searxng:duckduckgo");
5448    }
5449
5450    #[test]
5451    fn test_html_stripping_and_markdown_reduction() {
5452        let html = r#"
5453            <!DOCTYPE html>
5454            <html>
5455            <head>
5456                <title>Test Page</title>
5457                <style>
5458                    body { color: red; }
5459                </style>
5460                <script>
5461                    console.log("noisy script");
5462                </script>
5463            </head>
5464            <body>
5465                <h1>Hello World</h1>
5466                <p>This is a <a href="https://example.com">link</a>.</p>
5467                <noscript>Enable JS</noscript>
5468            </body>
5469            </html>
5470        "#;
5471
5472        let cleaned = strip_html_noise(html);
5473        assert!(!cleaned.contains("noisy script"));
5474        assert!(!cleaned.contains("color: red"));
5475        assert!(!cleaned.contains("Enable JS"));
5476        assert!(cleaned.contains("Hello World"));
5477
5478        let markdown = html2md::parse_html(&cleaned);
5479        let text = markdown_to_text(&markdown);
5480
5481        // Raw length includes all the noise
5482        let raw_len = html.len();
5483        // Markdown length should be significantly smaller
5484        let md_len = markdown.len();
5485
5486        println!("Raw: {}, Markdown: {}", raw_len, md_len);
5487        assert!(
5488            md_len < raw_len / 2,
5489            "Markdown should be < 50% of raw HTML size"
5490        );
5491        assert!(text.contains("Hello World"));
5492        assert!(text.contains("link"));
5493    }
5494
5495    #[test]
5496    fn memory_scope_defaults_to_hidden_context() {
5497        let args = json!({
5498            "__session_id": "session-123",
5499            "__project_id": "workspace-abc"
5500        });
5501        assert_eq!(memory_session_id(&args).as_deref(), Some("session-123"));
5502        assert_eq!(memory_project_id(&args).as_deref(), Some("workspace-abc"));
5503        assert!(global_memory_enabled(&args));
5504    }
5505
5506    #[test]
5507    fn memory_scope_policy_can_disable_global_visibility() {
5508        let args = json!({
5509            "__session_id": "session-123",
5510            "__project_id": "workspace-abc",
5511            "__memory_max_visible_scope": "project"
5512        });
5513        assert_eq!(memory_visible_scope(&args), MemoryVisibleScope::Project);
5514        assert!(!global_memory_enabled(&args));
5515    }
5516
5517    #[test]
5518    fn memory_db_path_ignores_public_db_path_arg() {
5519        std::env::set_var("TANDEM_MEMORY_DB_PATH", "/tmp/global-memory.sqlite");
5520        let resolved = resolve_memory_db_path(&json!({
5521            "db_path": "/home/user123/tandem"
5522        }));
5523        assert_eq!(resolved, PathBuf::from("/tmp/global-memory.sqlite"));
5524        std::env::remove_var("TANDEM_MEMORY_DB_PATH");
5525    }
5526
5527    #[test]
5528    fn memory_db_path_accepts_hidden_override() {
5529        std::env::set_var("TANDEM_MEMORY_DB_PATH", "/tmp/global-memory.sqlite");
5530        let resolved = resolve_memory_db_path(&json!({
5531            "__memory_db_path": "/tmp/internal-memory.sqlite",
5532            "db_path": "/home/user123/tandem"
5533        }));
5534        assert_eq!(resolved, PathBuf::from("/tmp/internal-memory.sqlite"));
5535        std::env::remove_var("TANDEM_MEMORY_DB_PATH");
5536    }
5537
5538    #[tokio::test]
5539    async fn memory_search_uses_global_by_default() {
5540        let tool = MemorySearchTool;
5541        let result = tool
5542            .execute(json!({
5543                "query": "global pattern",
5544                "tier": "global"
5545            }))
5546            .await
5547            .expect("memory_search should return ToolResult");
5548        assert!(
5549            result.output.contains("memory database not found")
5550                || result.output.contains("memory embeddings unavailable")
5551        );
5552        assert_eq!(result.metadata["ok"], json!(false));
5553        let reason = result
5554            .metadata
5555            .get("reason")
5556            .and_then(|v| v.as_str())
5557            .unwrap_or_default();
5558        assert!(matches!(
5559            reason,
5560            "memory_db_missing" | "embeddings_unavailable"
5561        ));
5562    }
5563
5564    #[tokio::test]
5565    async fn memory_store_uses_hidden_project_scope_by_default() {
5566        let tool = MemoryStoreTool;
5567        let result = tool
5568            .execute(json!({
5569                "content": "remember this",
5570                "__session_id": "session-123",
5571                "__project_id": "workspace-abc"
5572            }))
5573            .await
5574            .expect("memory_store should return ToolResult");
5575        assert!(
5576            result.output.contains("memory embeddings unavailable")
5577                || result.output.contains("memory database not found")
5578        );
5579        let reason = result
5580            .metadata
5581            .get("reason")
5582            .and_then(|v| v.as_str())
5583            .unwrap_or_default();
5584        assert!(matches!(
5585            reason,
5586            "embeddings_unavailable" | "memory_db_missing"
5587        ));
5588    }
5589
5590    #[tokio::test]
5591    async fn memory_delete_uses_hidden_project_scope_by_default() {
5592        let tool = MemoryDeleteTool;
5593        let result = tool
5594            .execute(json!({
5595                "chunk_id": "chunk-123",
5596                "__session_id": "session-123",
5597                "__project_id": "workspace-abc",
5598                "__memory_db_path": "/tmp/tandem-memory-delete-test.sqlite"
5599            }))
5600            .await
5601            .expect("memory_delete should return ToolResult");
5602        assert_eq!(result.metadata["tier"], json!("project"));
5603        assert_eq!(result.metadata["project_id"], json!("workspace-abc"));
5604        assert!(matches!(
5605            result
5606                .metadata
5607                .get("reason")
5608                .and_then(|v| v.as_str())
5609                .unwrap_or_default(),
5610            "not_found"
5611        ));
5612    }
5613
5614    #[test]
5615    fn translate_windows_ls_with_all_flag() {
5616        let translated = translate_windows_shell_command("ls -la").expect("translation");
5617        assert!(translated.contains("Get-ChildItem"));
5618        assert!(translated.contains("-Force"));
5619    }
5620
5621    #[test]
5622    fn translate_windows_find_name_pattern() {
5623        let translated =
5624            translate_windows_shell_command("find . -type f -name \"*.rs\"").expect("translation");
5625        assert!(translated.contains("Get-ChildItem"));
5626        assert!(translated.contains("-Recurse"));
5627        assert!(translated.contains("-Filter"));
5628    }
5629
5630    #[test]
5631    fn windows_guardrail_blocks_untranslatable_unix_command() {
5632        assert_eq!(
5633            windows_guardrail_reason("sed -n '1,5p' README.md"),
5634            Some("unix_command_untranslatable")
5635        );
5636    }
5637
5638    #[test]
5639    fn path_policy_rejects_tool_markup_and_globs() {
5640        assert!(resolve_tool_path(
5641            "<tool_call><function=glob><parameter=pattern>**/*</parameter></function></tool_call>",
5642            &json!({})
5643        )
5644        .is_none());
5645        assert!(resolve_tool_path("**/*", &json!({})).is_none());
5646        assert!(resolve_tool_path("/", &json!({})).is_none());
5647        assert!(resolve_tool_path("C:\\", &json!({})).is_none());
5648    }
5649
5650    #[tokio::test]
5651    async fn glob_allows_tandem_artifact_paths() {
5652        let root =
5653            std::env::temp_dir().join(format!("tandem-glob-artifacts-{}", uuid_like(now_ms_u64())));
5654        let artifacts_dir = root.join(".tandem").join("artifacts");
5655        std::fs::create_dir_all(&artifacts_dir).expect("create artifacts dir");
5656        let artifact = artifacts_dir.join("report.json");
5657        std::fs::write(&artifact, "{\"ok\":true}").expect("write artifact");
5658
5659        let tool = GlobTool;
5660        let result = tool
5661            .execute(json!({
5662                "pattern": ".tandem/artifacts/*.json",
5663                "__workspace_root": root.to_string_lossy().to_string(),
5664                "__effective_cwd": root.to_string_lossy().to_string(),
5665            }))
5666            .await
5667            .expect("glob result");
5668
5669        assert!(
5670            result.output.contains(".tandem/artifacts/report.json"),
5671            "expected artifact path in glob output, got: {}",
5672            result.output
5673        );
5674    }
5675
5676    #[tokio::test]
5677    async fn glob_still_hides_non_artifact_tandem_paths() {
5678        let root =
5679            std::env::temp_dir().join(format!("tandem-glob-hidden-{}", uuid_like(now_ms_u64())));
5680        let tandem_dir = root.join(".tandem");
5681        let artifacts_dir = tandem_dir.join("artifacts");
5682        std::fs::create_dir_all(&artifacts_dir).expect("create tandem dirs");
5683        std::fs::write(tandem_dir.join("secrets.json"), "{\"hidden\":true}")
5684            .expect("write hidden file");
5685
5686        let tool = GlobTool;
5687        let result = tool
5688            .execute(json!({
5689                "pattern": ".tandem/*.json",
5690                "__workspace_root": root.to_string_lossy().to_string(),
5691                "__effective_cwd": root.to_string_lossy().to_string(),
5692            }))
5693            .await
5694            .expect("glob result");
5695
5696        assert!(
5697            result.output.trim().is_empty(),
5698            "expected non-artifact tandem paths to stay hidden, got: {}",
5699            result.output
5700        );
5701    }
5702
5703    #[test]
5704    fn normalize_recursive_wildcard_pattern_fixes_common_invalid_forms() {
5705        assert_eq!(
5706            normalize_recursive_wildcard_pattern("docs/**.md").as_deref(),
5707            Some("docs/**/*.md")
5708        );
5709        assert_eq!(
5710            normalize_recursive_wildcard_pattern("src/**README*").as_deref(),
5711            Some("src/**/README*")
5712        );
5713        assert_eq!(
5714            normalize_recursive_wildcard_pattern("**.{md,mdx,txt}").as_deref(),
5715            Some("**/*.{md,mdx,txt}")
5716        );
5717        assert_eq!(normalize_recursive_wildcard_pattern("docs/**/*.md"), None);
5718    }
5719
5720    #[tokio::test]
5721    async fn glob_recovers_from_invalid_recursive_wildcard_syntax() {
5722        let root =
5723            std::env::temp_dir().join(format!("tandem-glob-recover-{}", uuid_like(now_ms_u64())));
5724        let docs_dir = root.join("docs").join("guides");
5725        std::fs::create_dir_all(&docs_dir).expect("create docs dir");
5726        let guide = docs_dir.join("intro.md");
5727        std::fs::write(&guide, "# intro").expect("write guide");
5728
5729        let tool = GlobTool;
5730        let result = tool
5731            .execute(json!({
5732                "pattern": "docs/**.md",
5733                "__workspace_root": root.to_string_lossy().to_string(),
5734                "__effective_cwd": root.to_string_lossy().to_string(),
5735            }))
5736            .await
5737            .expect("glob result");
5738
5739        assert!(
5740            result.output.contains("docs/guides/intro.md"),
5741            "expected recovered glob output, got: {}",
5742            result.output
5743        );
5744        assert_eq!(
5745            result.metadata["effective_pattern"],
5746            json!(format!("{}/docs/**/*.md", root.to_string_lossy()))
5747        );
5748    }
5749
5750    #[cfg(windows)]
5751    #[test]
5752    fn path_policy_allows_windows_verbatim_paths_within_workspace() {
5753        let args = json!({
5754            "__workspace_root": r"C:\tandem-examples",
5755            "__effective_cwd": r"C:\tandem-examples\docs"
5756        });
5757        assert!(resolve_tool_path(r"\\?\C:\tandem-examples\docs\index.html", &args).is_some());
5758    }
5759
5760    #[cfg(not(windows))]
5761    #[test]
5762    fn path_policy_allows_absolute_linux_paths_within_workspace() {
5763        let args = json!({
5764            "__workspace_root": "/tmp/tandem-examples",
5765            "__effective_cwd": "/tmp/tandem-examples/docs"
5766        });
5767        assert!(resolve_tool_path("/tmp/tandem-examples/docs/index.html", &args).is_some());
5768        assert!(resolve_tool_path("/etc/passwd", &args).is_none());
5769    }
5770
5771    #[test]
5772    fn read_fallback_resolves_unique_suffix_filename() {
5773        let root =
5774            std::env::temp_dir().join(format!("tandem-read-fallback-{}", uuid_like(now_ms_u64())));
5775        std::fs::create_dir_all(&root).expect("create root");
5776        let target = root.join("T1011U kitöltési útmutató.pdf");
5777        std::fs::write(&target, b"stub").expect("write test file");
5778
5779        let args = json!({
5780            "__workspace_root": root.to_string_lossy().to_string(),
5781            "__effective_cwd": root.to_string_lossy().to_string()
5782        });
5783        let resolved = resolve_read_path_fallback("útmutató.pdf", &args)
5784            .expect("expected unique suffix match");
5785        assert_eq!(resolved, target);
5786
5787        let _ = std::fs::remove_dir_all(&root);
5788    }
5789
5790    #[tokio::test]
5791    async fn write_tool_rejects_empty_content_by_default() {
5792        let tool = WriteTool;
5793        let result = tool
5794            .execute(json!({
5795                "path":"target/write_guard_test.txt",
5796                "content":""
5797            }))
5798            .await
5799            .expect("write tool should return ToolResult");
5800        assert!(result.output.contains("non-empty `content`"));
5801        assert_eq!(result.metadata["reason"], json!("empty_content"));
5802        assert!(!Path::new("target/write_guard_test.txt").exists());
5803    }
5804
5805    #[tokio::test]
5806    async fn registry_resolves_default_api_namespaced_tool() {
5807        let registry = ToolRegistry::new();
5808        let result = registry
5809            .execute("default_api:read", json!({"path":"Cargo.toml"}))
5810            .await
5811            .expect("registry execute should return ToolResult");
5812        assert!(!result.output.starts_with("Unknown tool:"));
5813    }
5814
5815    #[tokio::test]
5816    async fn batch_resolves_default_api_namespaced_tool() {
5817        let tool = BatchTool;
5818        let result = tool
5819            .execute(json!({
5820                "tool_calls":[
5821                    {"tool":"default_api:read","args":{"path":"Cargo.toml"}}
5822                ]
5823            }))
5824            .await
5825            .expect("batch should return ToolResult");
5826        assert!(!result.output.contains("Unknown tool: default_api:read"));
5827    }
5828
5829    #[tokio::test]
5830    async fn batch_prefers_name_when_tool_is_default_api_wrapper() {
5831        let tool = BatchTool;
5832        let result = tool
5833            .execute(json!({
5834                "tool_calls":[
5835                    {"tool":"default_api","name":"read","args":{"path":"Cargo.toml"}}
5836                ]
5837            }))
5838            .await
5839            .expect("batch should return ToolResult");
5840        assert!(!result.output.contains("Unknown tool: default_api"));
5841    }
5842
5843    #[tokio::test]
5844    async fn batch_resolves_nested_function_name_for_wrapper_tool() {
5845        let tool = BatchTool;
5846        let result = tool
5847            .execute(json!({
5848                "tool_calls":[
5849                    {
5850                        "tool":"default_api",
5851                        "function":{"name":"read"},
5852                        "args":{"path":"Cargo.toml"}
5853                    }
5854                ]
5855            }))
5856            .await
5857            .expect("batch should return ToolResult");
5858        assert!(!result.output.contains("Unknown tool: default_api"));
5859    }
5860
5861    #[tokio::test]
5862    async fn batch_drops_wrapper_calls_without_resolvable_name() {
5863        let tool = BatchTool;
5864        let result = tool
5865            .execute(json!({
5866                "tool_calls":[
5867                    {"tool":"default_api","args":{"path":"Cargo.toml"}}
5868                ]
5869            }))
5870            .await
5871            .expect("batch should return ToolResult");
5872        assert_eq!(result.metadata["count"], json!(0));
5873    }
5874
5875    #[test]
5876    fn sanitize_member_name_normalizes_agent_aliases() {
5877        assert_eq!(sanitize_member_name("A2").expect("valid"), "A2");
5878        assert_eq!(sanitize_member_name("a7").expect("valid"), "A7");
5879        assert_eq!(
5880            sanitize_member_name("  qa reviewer ").expect("valid"),
5881            "qa-reviewer"
5882        );
5883        assert!(sanitize_member_name("   ").is_err());
5884    }
5885
5886    #[tokio::test]
5887    async fn next_default_member_name_skips_existing_indices() {
5888        let root = std::env::temp_dir().join(format!(
5889            "tandem-agent-team-test-{}",
5890            uuid_like(now_ms_u64())
5891        ));
5892        let paths = AgentTeamPaths::new(root.join(".tandem"));
5893        let team_name = "alpha";
5894        fs::create_dir_all(paths.team_dir(team_name))
5895            .await
5896            .expect("create team dir");
5897        write_json_file(
5898            paths.members_file(team_name),
5899            &json!([
5900                {"name":"A1"},
5901                {"name":"A2"},
5902                {"name":"agent-x"},
5903                {"name":"A5"}
5904            ]),
5905        )
5906        .await
5907        .expect("write members");
5908
5909        let next = next_default_member_name(&paths, team_name)
5910            .await
5911            .expect("next member");
5912        assert_eq!(next, "A6");
5913
5914        let _ =
5915            fs::remove_dir_all(PathBuf::from(paths.root().parent().unwrap_or(paths.root()))).await;
5916    }
5917}
5918
5919async fn find_symbol_references(symbol: &str, root: &Path) -> String {
5920    if symbol.trim().is_empty() {
5921        return "missing symbol".to_string();
5922    }
5923    let escaped = regex::escape(symbol);
5924    let re = Regex::new(&format!(r"\b{}\b", escaped));
5925    let Ok(re) = re else {
5926        return "invalid symbol".to_string();
5927    };
5928    let mut refs = Vec::new();
5929    for entry in WalkBuilder::new(root).build().flatten() {
5930        if !entry.file_type().map(|t| t.is_file()).unwrap_or(false) {
5931            continue;
5932        }
5933        let path = entry.path();
5934        if let Ok(content) = fs::read_to_string(path).await {
5935            for (idx, line) in content.lines().enumerate() {
5936                if re.is_match(line) {
5937                    refs.push(format!("{}:{}:{}", path.display(), idx + 1, line.trim()));
5938                    if refs.len() >= 200 {
5939                        return refs.join("\n");
5940                    }
5941                }
5942            }
5943        }
5944    }
5945    refs.join("\n")
5946}