scud/
config.rs

1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::fs;
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8    pub llm: LLMConfig,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct LLMConfig {
13    pub provider: String,
14    /// Default model (used when no tier specified)
15    pub model: String,
16    /// Smart provider for validation/analysis tasks
17    #[serde(default = "default_smart_provider")]
18    pub smart_provider: String,
19    /// Smart model for validation/analysis tasks (large context)
20    #[serde(default = "default_smart_model")]
21    pub smart_model: String,
22    /// Fast provider for generation tasks
23    #[serde(default = "default_fast_provider")]
24    pub fast_provider: String,
25    /// Fast model for generation tasks
26    #[serde(default = "default_fast_model")]
27    pub fast_model: String,
28    #[serde(default)]
29    pub max_tokens: u32,
30}
31
32fn default_smart_provider() -> String {
33    "claude-cli".to_string()
34}
35
36fn default_smart_model() -> String {
37    "opus".to_string()
38}
39
40fn default_fast_provider() -> String {
41    "xai".to_string()
42}
43
44fn default_fast_model() -> String {
45    "grok-code-fast-1".to_string()
46}
47
48impl Default for Config {
49    fn default() -> Self {
50        Config {
51            llm: LLMConfig {
52                provider: "claude-cli".to_string(),
53                model: "sonnet".to_string(),
54                smart_provider: "claude-cli".to_string(),
55                smart_model: "opus".to_string(),
56                fast_provider: "xai".to_string(),
57                fast_model: "grok-code-fast-1".to_string(),
58                max_tokens: 16000,
59            },
60        }
61    }
62}
63
64impl Config {
65    pub fn load(path: &Path) -> Result<Self> {
66        let content = fs::read_to_string(path)
67            .with_context(|| format!("Failed to read config file: {}", path.display()))?;
68
69        toml::from_str(&content)
70            .with_context(|| format!("Failed to parse config file: {}", path.display()))
71    }
72
73    pub fn save(&self, path: &Path) -> Result<()> {
74        let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
75
76        if let Some(parent) = path.parent() {
77            fs::create_dir_all(parent).with_context(|| {
78                format!("Failed to create config directory: {}", parent.display())
79            })?;
80        }
81
82        fs::write(path, content)
83            .with_context(|| format!("Failed to write config file: {}", path.display()))
84    }
85
86    pub fn api_key_env_var(&self) -> &str {
87        Self::api_key_env_var_for_provider(&self.llm.provider)
88    }
89
90    pub fn api_key_env_var_for_provider(provider: &str) -> &str {
91        match provider {
92            "anthropic" => "ANTHROPIC_API_KEY",
93            "xai" => "XAI_API_KEY",
94            "openai" => "OPENAI_API_KEY",
95            "openrouter" => "OPENROUTER_API_KEY",
96            "claude-cli" => "NONE", // Claude CLI doesn't need API key
97            "codex" => "NONE",      // Codex CLI doesn't need API key
98            _ => "API_KEY",
99        }
100    }
101
102    pub fn requires_api_key(&self) -> bool {
103        let providers = [
104            &self.llm.provider,
105            &self.llm.smart_provider,
106            &self.llm.fast_provider,
107        ];
108        providers
109            .iter()
110            .any(|p| !matches!(p.as_str(), "claude-cli" | "codex"))
111    }
112
113    pub fn api_endpoint(&self) -> &str {
114        match self.llm.provider.as_str() {
115            "anthropic" => "https://api.anthropic.com/v1/messages",
116            "xai" => "https://api.x.ai/v1/chat/completions",
117            "openai" => "https://api.openai.com/v1/chat/completions",
118            "openrouter" => "https://openrouter.ai/api/v1/chat/completions",
119            _ => "https://api.anthropic.com/v1/messages",
120        }
121    }
122
123    pub fn default_model_for_provider(provider: &str) -> &str {
124        match provider {
125            "xai" => "grok-code-fast-1",
126            "anthropic" => "claude-sonnet-4-5-20250929",
127            "openai" => "o3-mini",
128            "openrouter" => "anthropic/claude-sonnet-4.5",
129            "claude-cli" => "sonnet", // Claude CLI model names: sonnet, opus, haiku
130            "codex" => "gpt-5.1",     // Codex CLI default model
131            _ => "grok-code-fast-1",
132        }
133    }
134
135    /// Get suggested models for a provider (for display in init)
136    pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
137        match provider {
138            "xai" => vec![
139                "grok-code-fast-1",
140                "grok-4-1-fast-reasoning",
141                "grok-4-1-fast",
142                "grok-3-fast",
143            ],
144            "anthropic" => vec![
145                "claude-sonnet-4-5-20250929",
146                "claude-opus-4-5-20251101",
147                "claude-haiku-4-5-20251001",
148                "claude-opus-4-1-20250805",
149            ],
150            "openai" => vec![
151                "gpt-5.2-high",
152                "gpt-5.1",
153                "gpt-5.1-mini",
154                "o3-mini",
155                "o3",
156                "o4-mini",
157                "gpt-4.1",
158            ],
159            "openrouter" => vec![
160                "anthropic/claude-sonnet-4.5",
161                "anthropic/claude-opus-4.5",
162                "openai/o3-mini",
163                "openai/gpt-4.1",
164                "xai/grok-4-1-fast-reasoning",
165            ],
166            "claude-cli" => vec![
167                "opus",   // Claude Opus 4.5 - smart/reasoning
168                "sonnet", // Claude Sonnet - fast/capable
169                "haiku",  // Claude Haiku - fastest
170            ],
171            "codex" => vec![
172                "gpt-5.2-high", // Smart/reasoning model
173                "gpt-5.1",      // Capable model
174                "gpt-5.1-mini", // Fast model
175                "o3",           // Reasoning model
176                "o3-mini",      // Fast reasoning
177            ],
178            _ => vec![],
179        }
180    }
181
182    /// Get the smart provider (for validation/analysis tasks with large context)
183    pub fn smart_provider(&self) -> &str {
184        &self.llm.smart_provider
185    }
186
187    /// Get the smart model (for validation/analysis tasks with large context)
188    pub fn smart_model(&self) -> &str {
189        &self.llm.smart_model
190    }
191
192    /// Get the fast provider (for generation tasks)
193    pub fn fast_provider(&self) -> &str {
194        &self.llm.fast_provider
195    }
196
197    /// Get the fast model (for generation tasks)
198    pub fn fast_model(&self) -> &str {
199        &self.llm.fast_model
200    }
201}
202
203#[cfg(test)]
204mod tests {
205    use super::*;
206    use tempfile::TempDir;
207
208    #[test]
209    fn test_default_config() {
210        let config = Config::default();
211        assert_eq!(config.llm.provider, "claude-cli");
212        assert_eq!(config.llm.model, "sonnet");
213        assert_eq!(config.llm.smart_provider, "claude-cli");
214        assert_eq!(config.llm.smart_model, "opus");
215        assert_eq!(config.llm.fast_provider, "xai");
216        assert_eq!(config.llm.fast_model, "grok-code-fast-1");
217        assert_eq!(config.llm.max_tokens, 16000);
218    }
219
220    #[test]
221    fn test_model_tiers() {
222        let config = Config::default();
223        assert_eq!(config.smart_provider(), "claude-cli");
224        assert_eq!(config.smart_model(), "opus");
225        assert_eq!(config.fast_provider(), "xai");
226        assert_eq!(config.fast_model(), "grok-code-fast-1");
227    }
228
229    #[test]
230    fn test_api_key_env_vars() {
231        let mut config = Config::default();
232
233        config.llm.provider = "anthropic".to_string();
234        assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
235
236        config.llm.provider = "xai".to_string();
237        assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
238
239        config.llm.provider = "openai".to_string();
240        assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
241
242        config.llm.provider = "claude-cli".to_string();
243        config.llm.smart_provider = "claude-cli".to_string();
244        config.llm.fast_provider = "claude-cli".to_string();
245        assert!(!config.requires_api_key());
246    }
247
248    #[test]
249    fn test_api_endpoints() {
250        let mut config = Config::default();
251
252        config.llm.provider = "anthropic".to_string();
253        assert_eq!(
254            config.api_endpoint(),
255            "https://api.anthropic.com/v1/messages"
256        );
257
258        config.llm.provider = "xai".to_string();
259        assert_eq!(
260            config.api_endpoint(),
261            "https://api.x.ai/v1/chat/completions"
262        );
263
264        config.llm.provider = "openai".to_string();
265        assert_eq!(
266            config.api_endpoint(),
267            "https://api.openai.com/v1/chat/completions"
268        );
269    }
270
271    #[test]
272    fn test_save_and_load_config() {
273        let temp_dir = TempDir::new().unwrap();
274        let config_path = temp_dir.path().join("config.toml");
275
276        let config = Config {
277            llm: LLMConfig {
278                provider: "claude-cli".to_string(),
279                model: "sonnet".to_string(),
280                smart_provider: "claude-cli".to_string(),
281                smart_model: "opus".to_string(),
282                fast_provider: "xai".to_string(),
283                fast_model: "haiku".to_string(),
284                max_tokens: 8192,
285            },
286        };
287
288        config.save(&config_path).unwrap();
289        assert!(config_path.exists());
290
291        let loaded = Config::load(&config_path).unwrap();
292        assert_eq!(loaded.llm.provider, "claude-cli");
293        assert_eq!(loaded.llm.model, "sonnet");
294        assert_eq!(loaded.llm.smart_provider, "claude-cli");
295        assert_eq!(loaded.llm.smart_model, "opus");
296        assert_eq!(loaded.llm.fast_provider, "xai");
297        assert_eq!(loaded.llm.fast_model, "haiku");
298        assert_eq!(loaded.llm.max_tokens, 8192);
299    }
300
301    #[test]
302    fn test_default_models() {
303        assert_eq!(
304            Config::default_model_for_provider("xai"),
305            "grok-code-fast-1"
306        );
307        assert_eq!(
308            Config::default_model_for_provider("anthropic"),
309            "claude-sonnet-4-5-20250929"
310        );
311        assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
312        assert_eq!(Config::default_model_for_provider("claude-cli"), "sonnet");
313    }
314
315    #[test]
316    fn test_load_config_without_model_tiers() {
317        // Test backward compatibility - loading a config without smart/fast models
318        let temp_dir = TempDir::new().unwrap();
319        let config_path = temp_dir.path().join("config.toml");
320
321        // Write a config without smart_model and fast_model
322        std::fs::write(
323            &config_path,
324            r#"[llm]
325provider = "xai"
326model = "grok-code-fast-1"
327max_tokens = 4096
328"#,
329        )
330        .unwrap();
331
332        let loaded = Config::load(&config_path).unwrap();
333        assert_eq!(loaded.llm.provider, "xai");
334        assert_eq!(loaded.llm.model, "grok-code-fast-1");
335        // Should use defaults for missing fields
336        assert_eq!(loaded.llm.smart_provider, "claude-cli");
337        assert_eq!(loaded.llm.smart_model, "opus");
338        assert_eq!(loaded.llm.fast_provider, "xai");
339        assert_eq!(loaded.llm.fast_model, "grok-code-fast-1");
340    }
341}