Skip to main content

codetether_agent/session/helper/
router.rs

1use crate::provider::Message;
2use crate::tool::ToolRegistry;
3use serde_json::json;
4use std::path::Path;
5
6use super::text::{extract_candidate_file_paths, latest_user_text, truncate_with_ellipsis};
7
8pub async fn build_proactive_lsp_context_message(
9    _selected_provider: &str,
10    step: usize,
11    tool_registry: &ToolRegistry,
12    session_messages: &[Message],
13    workspace_dir: &Path,
14) -> Option<Message> {
15    if step != 1 {
16        return None;
17    }
18
19    let Some(lsp_tool) = tool_registry.get("lsp") else {
20        return None;
21    };
22
23    let Some(user_text) = latest_user_text(session_messages) else {
24        return None;
25    };
26
27    let max_files = std::env::var("CODETETHER_PROACTIVE_LSP_MAX_FILES")
28        .ok()
29        .and_then(|v| v.parse::<usize>().ok())
30        .filter(|v| *v > 0)
31        .unwrap_or(3);
32
33    let max_chars = std::env::var("CODETETHER_PROACTIVE_LSP_MAX_CHARS")
34        .ok()
35        .and_then(|v| v.parse::<usize>().ok())
36        .filter(|v| *v > 0)
37        .unwrap_or(1600);
38
39    let paths = extract_candidate_file_paths(&user_text, workspace_dir, max_files);
40    if paths.is_empty() {
41        return None;
42    }
43
44    let mut sections: Vec<String> = Vec::new();
45    for path in paths {
46        let diagnostics_args = json!({
47            "action": "diagnostics",
48            "file_path": path,
49        });
50
51        match lsp_tool.execute(diagnostics_args).await {
52            Ok(result) if result.success => {
53                let output = result.output.trim();
54                if !output.is_empty() && output != "No diagnostics found" {
55                    sections.push(format!(
56                        "File: {}\n{}",
57                        path,
58                        truncate_with_ellipsis(output, max_chars)
59                    ));
60                    continue;
61                }
62            }
63            Ok(result) => {
64                tracing::debug!(
65                    file = %path,
66                    output = %truncate_with_ellipsis(&result.output, 200),
67                    "Proactive LSP diagnostics skipped file due to unsuccessful result"
68                );
69            }
70            Err(e) => {
71                tracing::debug!(file = %path, error = %e, "Proactive LSP diagnostics prefetch failed");
72            }
73        }
74
75        let symbol_args = json!({
76            "action": "documentSymbol",
77            "file_path": path,
78        });
79        match lsp_tool.execute(symbol_args).await {
80            Ok(result) if result.success => {
81                sections.push(format!(
82                    "File: {}\n{}",
83                    path,
84                    truncate_with_ellipsis(&result.output, max_chars / 2)
85                ));
86            }
87            Ok(result) => {
88                tracing::debug!(
89                    file = %path,
90                    output = %truncate_with_ellipsis(&result.output, 200),
91                    "Proactive LSP symbol recovery skipped file due to unsuccessful result"
92                );
93            }
94            Err(e) => {
95                tracing::debug!(file = %path, error = %e, "Proactive LSP symbol recovery failed");
96            }
97        }
98    }
99
100    if sections.is_empty() {
101        return None;
102    }
103
104    Some(Message {
105        role: crate::provider::Role::System,
106        content: vec![crate::provider::ContentPart::Text {
107            text: format!(
108                "Mandatory proactive LSP context (prefetched before first reply). Prioritize these real LSP diagnostics and errors over speculation. Do not call the lsp tool just to rediscover the same issues unless you need deeper navigation detail.\n\n{}",
109                sections.join("\n\n---\n\n")
110            ),
111        }],
112    })
113}
114
115pub fn known_good_router_candidates(provider: &str, failed_model: &str) -> Vec<String> {
116    let failed = failed_model.trim();
117    let mut candidates: Vec<String> = match provider {
118        "openrouter" => vec![
119            "openrouter/qwen/qwen3-coder:free".to_string(),
120            "openrouter/openai/gpt-oss-120b:free".to_string(),
121            "openrouter/google/gemma-3-27b-it:free".to_string(),
122            "openrouter/meta-llama/llama-3.3-70b-instruct:free".to_string(),
123        ],
124        "zai" => vec!["zai/glm-5".to_string()],
125        "glm5" => vec!["glm5/glm-5".to_string()],
126        "github-copilot" | "github-copilot-enterprise" => {
127            vec![format!("{provider}/gpt-5-mini")]
128        }
129        "openai-codex" => vec!["openai-codex/gpt-5-mini".to_string()],
130        "gemini-web" => vec!["gemini-web/gemini-2.5-flash".to_string()],
131        "local_cuda" => vec!["local_cuda/qwen3.5-9b".to_string()],
132        "google" => vec!["google/gemini-2.5-flash".to_string()],
133        "anthropic" => vec!["anthropic/claude-3-5-haiku-latest".to_string()],
134        _ => Vec::new(),
135    };
136
137    candidates.retain(|candidate| {
138        !candidate.eq_ignore_ascii_case(failed)
139            && candidate
140                .split('/')
141                .next_back()
142                .map(|model| !model.eq_ignore_ascii_case(failed))
143                .unwrap_or(true)
144    });
145    candidates
146}
147
148pub fn choose_router_target(
149    registry: &crate::provider::ProviderRegistry,
150    selected_provider: &str,
151    current_model: &str,
152) -> Option<(String, String)> {
153    let current_provider = selected_provider.to_ascii_lowercase();
154
155    for candidate in known_good_router_candidates(selected_provider, current_model) {
156        let (provider_name, model_name) = crate::provider::parse_model_string(&candidate);
157        let provider_name = provider_name.unwrap_or(selected_provider);
158        if registry.get(provider_name).is_some() {
159            return Some((provider_name.to_string(), model_name.to_string()));
160        }
161    }
162
163    if current_provider != "zai" && registry.get("zai").is_some() {
164        return Some(("zai".to_string(), "glm-5".to_string()));
165    }
166    if current_provider != "glm5" && registry.get("glm5").is_some() {
167        return Some(("glm5".to_string(), "glm-5".to_string()));
168    }
169    if current_provider != "openrouter" && registry.get("openrouter").is_some() {
170        return Some((
171            "openrouter".to_string(),
172            "openai/gpt-oss-120b:free".to_string(),
173        ));
174    }
175
176    None
177}
178
179/// CADMAS-CTX-aware variant of [`choose_router_target`] (Phase C step 17).
180///
181/// When `state.config.enabled` is `true`, the rule-based candidate list
182/// from [`known_good_router_candidates`] is re-ordered by the LCB score
183/// `μ − γ·√u` under the supplied `bucket`. Candidates with no
184/// observations yet keep their original rule order (cold-start
185/// conservatism). When `state.config.enabled` is `false`, this function
186/// is exactly [`choose_router_target`].
187///
188/// The actual outcome update — `state.update(provider, "model_call",
189/// bucket, success)` — lives in the prompt loop's retry handler and is
190/// wired in a follow-up commit. This function is the *selection* half
191/// of the bandit loop; the *update* half is the hook point.
192///
193/// # Arguments
194///
195/// * `registry` — Active provider registry (same as the non-bandit path).
196/// * `state` — CADMAS-CTX sidecar for this session. Read-only here.
197/// * `bucket` — Context bucket extracted from the current turn's
198///   [`RelevanceMeta`](crate::session::relevance::RelevanceMeta).
199/// * `selected_provider` / `current_model` — Same semantics as
200///   [`choose_router_target`].
201///
202/// # Examples
203///
204/// ```rust,no_run
205/// # tokio::runtime::Runtime::new().unwrap().block_on(async {
206/// use codetether_agent::provider::ProviderRegistry;
207/// use codetether_agent::session::delegation::{DelegationConfig, DelegationState};
208/// use codetether_agent::session::helper::router::choose_router_target_bandit;
209/// use codetether_agent::session::relevance::{Bucket, Dependency, Difficulty, ToolUse};
210///
211/// let registry = ProviderRegistry::from_vault().await.unwrap();
212/// let state = DelegationState::with_config(DelegationConfig::default());
213/// let bucket = Bucket {
214///     difficulty: Difficulty::Easy,
215///     dependency: Dependency::Isolated,
216///     tool_use: ToolUse::No,
217/// };
218/// let _ = choose_router_target_bandit(&registry, &state, bucket, "openai", "gpt-5");
219/// # });
220/// ```
221pub fn choose_router_target_bandit(
222    registry: &crate::provider::ProviderRegistry,
223    state: &crate::session::delegation::DelegationState,
224    bucket: crate::session::relevance::Bucket,
225    selected_provider: &str,
226    current_model: &str,
227) -> Option<(String, String)> {
228    if !state.config.enabled {
229        return choose_router_target(registry, selected_provider, current_model);
230    }
231
232    // Enumerate rule-based candidates, annotate each with an LCB score
233    // (or 0.0 when there is no posterior yet), sort stably by score
234    // descending, and return the first one the registry knows about.
235    let candidates = known_good_router_candidates(selected_provider, current_model);
236    let mut scored: Vec<(String, f64)> = candidates
237        .into_iter()
238        .map(|raw| {
239            let (provider_name, _model_name) = crate::provider::parse_model_string(&raw);
240            let provider_name = provider_name.unwrap_or(selected_provider);
241            let score = state
242                .score(
243                    provider_name,
244                    crate::session::delegation_skills::MODEL_CALL,
245                    bucket,
246                )
247                .unwrap_or(0.0);
248            (raw, score)
249        })
250        .collect();
251    scored.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
252
253    for (candidate, _score) in scored {
254        let (provider_name, model_name) = crate::provider::parse_model_string(&candidate);
255        let provider_name = provider_name.unwrap_or(selected_provider);
256        if registry.get(provider_name).is_some() {
257            return Some((provider_name.to_string(), model_name.to_string()));
258        }
259    }
260
261    // No re-ordered candidate was registered — fall back to the
262    // original rule-based ladder so we never regress against the
263    // non-bandit behaviour.
264    choose_router_target(registry, selected_provider, current_model)
265}