Skip to main content

codetether_agent/session/helper/
router.rs

1use crate::provider::Message;
2use crate::tool::ToolRegistry;
3use serde_json::json;
4use std::path::Path;
5
6use super::text::{extract_candidate_file_paths, latest_user_text, truncate_with_ellipsis};
7
8pub async fn build_proactive_lsp_context_message(
9    _selected_provider: &str,
10    step: usize,
11    tool_registry: &ToolRegistry,
12    session_messages: &[Message],
13    workspace_dir: &Path,
14) -> Option<Message> {
15    if step != 1 {
16        return None;
17    }
18
19    let Some(lsp_tool) = tool_registry.get("lsp") else {
20        return None;
21    };
22
23    let Some(user_text) = latest_user_text(session_messages) else {
24        return None;
25    };
26
27    let max_files = std::env::var("CODETETHER_PROACTIVE_LSP_MAX_FILES")
28        .ok()
29        .and_then(|v| v.parse::<usize>().ok())
30        .filter(|v| *v > 0)
31        .unwrap_or(3);
32
33    let max_chars = std::env::var("CODETETHER_PROACTIVE_LSP_MAX_CHARS")
34        .ok()
35        .and_then(|v| v.parse::<usize>().ok())
36        .filter(|v| *v > 0)
37        .unwrap_or(1600);
38
39    let paths = extract_candidate_file_paths(&user_text, workspace_dir, max_files);
40    if paths.is_empty() {
41        return None;
42    }
43
44    let mut sections: Vec<String> = Vec::new();
45    for path in paths {
46        let diagnostics_args = json!({
47            "action": "diagnostics",
48            "file_path": path,
49        });
50
51        match lsp_tool.execute(diagnostics_args).await {
52            Ok(result) if result.success => {
53                let output = result.output.trim();
54                if !output.is_empty() && output != "No diagnostics found" {
55                    sections.push(format!(
56                        "File: {}\n{}",
57                        path,
58                        truncate_with_ellipsis(output, max_chars)
59                    ));
60                    continue;
61                }
62            }
63            Ok(result) => {
64                tracing::debug!(
65                    file = %path,
66                    output = %truncate_with_ellipsis(&result.output, 200),
67                    "Proactive LSP diagnostics skipped file due to unsuccessful result"
68                );
69            }
70            Err(e) => {
71                tracing::debug!(file = %path, error = %e, "Proactive LSP diagnostics prefetch failed");
72            }
73        }
74
75        let symbol_args = json!({
76            "action": "documentSymbol",
77            "file_path": path,
78        });
79        match lsp_tool.execute(symbol_args).await {
80            Ok(result) if result.success => {
81                sections.push(format!(
82                    "File: {}\n{}",
83                    path,
84                    truncate_with_ellipsis(&result.output, max_chars / 2)
85                ));
86            }
87            Ok(result) => {
88                tracing::debug!(
89                    file = %path,
90                    output = %truncate_with_ellipsis(&result.output, 200),
91                    "Proactive LSP symbol recovery skipped file due to unsuccessful result"
92                );
93            }
94            Err(e) => {
95                tracing::debug!(file = %path, error = %e, "Proactive LSP symbol recovery failed");
96            }
97        }
98    }
99
100    if sections.is_empty() {
101        return None;
102    }
103
104    Some(Message {
105        role: crate::provider::Role::System,
106        content: vec![crate::provider::ContentPart::Text {
107            text: format!(
108                "Mandatory proactive LSP context (prefetched before first reply). Prioritize these real LSP diagnostics and errors over speculation. Do not call the lsp tool just to rediscover the same issues unless you need deeper navigation detail.\n\n{}",
109                sections.join("\n\n---\n\n")
110            ),
111        }],
112    })
113}
114
115pub fn known_good_router_candidates(provider: &str, failed_model: &str) -> Vec<String> {
116    let failed = failed_model.trim();
117    let mut candidates: Vec<String> = match provider {
118        "openrouter" => vec![
119            "openrouter/qwen/qwen3-coder:free".to_string(),
120            "openrouter/openai/gpt-oss-120b:free".to_string(),
121            "openrouter/google/gemma-3-27b-it:free".to_string(),
122            "openrouter/meta-llama/llama-3.3-70b-instruct:free".to_string(),
123        ],
124        "zai" => vec!["zai/glm-5".to_string()],
125        "glm5" => vec!["glm5/glm-5".to_string()],
126        "github-copilot" | "github-copilot-enterprise" => {
127            vec![format!("{provider}/gpt-5-mini")]
128        }
129        "openai-codex" => vec!["openai-codex/gpt-5-mini".to_string()],
130        "gemini-web" => vec!["gemini-web/gemini-2.5-flash".to_string()],
131        "local_cuda" => vec!["local_cuda/qwen3.5-9b".to_string()],
132        "google" => vec!["google/gemini-2.5-flash".to_string()],
133        "anthropic" => vec!["anthropic/claude-3-5-haiku-latest".to_string()],
134        _ => Vec::new(),
135    };
136
137    candidates.retain(|candidate| {
138        !candidate.eq_ignore_ascii_case(failed)
139            && candidate
140                .split('/')
141                .next_back()
142                .map(|model| !model.eq_ignore_ascii_case(failed))
143                .unwrap_or(true)
144    });
145    candidates
146}
147
148pub fn choose_router_target(
149    registry: &crate::provider::ProviderRegistry,
150    selected_provider: &str,
151    current_model: &str,
152) -> Option<(String, String)> {
153    let current_provider = selected_provider.to_ascii_lowercase();
154
155    for candidate in known_good_router_candidates(selected_provider, current_model) {
156        let (provider_name, model_name) = crate::provider::parse_model_string(&candidate);
157        let provider_name = provider_name.unwrap_or(selected_provider);
158        if registry.get(provider_name).is_some() {
159            return Some((provider_name.to_string(), model_name.to_string()));
160        }
161    }
162
163    if current_provider != "zai" && registry.get("zai").is_some() {
164        return Some(("zai".to_string(), "glm-5".to_string()));
165    }
166    if current_provider != "glm5" && registry.get("glm5").is_some() {
167        return Some(("glm5".to_string(), "glm-5".to_string()));
168    }
169    if current_provider != "openrouter" && registry.get("openrouter").is_some() {
170        return Some((
171            "openrouter".to_string(),
172            "openai/gpt-oss-120b:free".to_string(),
173        ));
174    }
175
176    None
177}