scud/
config.rs

1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::fs;
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8    pub llm: LLMConfig,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct LLMConfig {
13    pub provider: String,
14    /// Default model (used when no tier specified)
15    pub model: String,
16    /// Smart provider for validation/analysis tasks
17    #[serde(default = "default_smart_provider")]
18    pub smart_provider: String,
19    /// Smart model for validation/analysis tasks (large context)
20    #[serde(default = "default_smart_model")]
21    pub smart_model: String,
22    /// Fast provider for generation tasks
23    #[serde(default = "default_fast_provider")]
24    pub fast_provider: String,
25    /// Fast model for generation tasks
26    #[serde(default = "default_fast_model")]
27    pub fast_model: String,
28    #[serde(default)]
29    pub max_tokens: u32,
30}
31
32fn default_smart_provider() -> String {
33    "claude-cli".to_string()
34}
35
36fn default_smart_model() -> String {
37    "opus".to_string()
38}
39
40fn default_fast_provider() -> String {
41    "xai".to_string()
42}
43
44fn default_fast_model() -> String {
45    "grok-code-fast-1".to_string()
46}
47
48impl Default for Config {
49    fn default() -> Self {
50        Config {
51            llm: LLMConfig {
52                provider: "claude-cli".to_string(),
53                model: "sonnet".to_string(),
54                smart_provider: "claude-cli".to_string(),
55                smart_model: "opus".to_string(),
56                fast_provider: "xai".to_string(),
57                fast_model: "grok-code-fast-1".to_string(),
58                max_tokens: 16000,
59            },
60        }
61    }
62}
63
64impl Config {
65    pub fn load(path: &Path) -> Result<Self> {
66        let content = fs::read_to_string(path)
67            .with_context(|| format!("Failed to read config file: {}", path.display()))?;
68
69        toml::from_str(&content)
70            .with_context(|| format!("Failed to parse config file: {}", path.display()))
71    }
72
73    pub fn save(&self, path: &Path) -> Result<()> {
74        let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
75
76        if let Some(parent) = path.parent() {
77            fs::create_dir_all(parent).with_context(|| {
78                format!("Failed to create config directory: {}", parent.display())
79            })?;
80        }
81
82        fs::write(path, content)
83            .with_context(|| format!("Failed to write config file: {}", path.display()))
84    }
85
86    pub fn api_key_env_var(&self) -> &str {
87        Self::api_key_env_var_for_provider(&self.llm.provider)
88    }
89
90    pub fn api_key_env_var_for_provider(provider: &str) -> &str {
91        match provider {
92            "anthropic" => "ANTHROPIC_API_KEY",
93            "xai" => "XAI_API_KEY",
94            "openai" => "OPENAI_API_KEY",
95            "openrouter" => "OPENROUTER_API_KEY",
96            "claude-cli" => "NONE", // Claude CLI doesn't need API key
97            "codex" => "NONE",      // Codex CLI doesn't need API key
98            _ => "API_KEY",
99        }
100    }
101
102    pub fn requires_api_key(&self) -> bool {
103        let providers = [&self.llm.provider, &self.llm.smart_provider, &self.llm.fast_provider];
104        providers.iter().any(|p| !matches!(p.as_str(), "claude-cli" | "codex"))
105    }
106
107    pub fn api_endpoint(&self) -> &str {
108        match self.llm.provider.as_str() {
109            "anthropic" => "https://api.anthropic.com/v1/messages",
110            "xai" => "https://api.x.ai/v1/chat/completions",
111            "openai" => "https://api.openai.com/v1/chat/completions",
112            "openrouter" => "https://openrouter.ai/api/v1/chat/completions",
113            _ => "https://api.anthropic.com/v1/messages",
114        }
115    }
116
117    pub fn default_model_for_provider(provider: &str) -> &str {
118        match provider {
119            "xai" => "grok-code-fast-1",
120            "anthropic" => "claude-sonnet-4-5-20250929",
121            "openai" => "o3-mini",
122            "openrouter" => "anthropic/claude-sonnet-4.5",
123            "claude-cli" => "sonnet", // Claude CLI model names: sonnet, opus, haiku
124            "codex" => "gpt-5.1",     // Codex CLI default model
125            _ => "grok-code-fast-1",
126        }
127    }
128
129    /// Get suggested models for a provider (for display in init)
130    pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
131        match provider {
132            "xai" => vec![
133                "grok-code-fast-1",
134                "grok-4-1-fast-reasoning",
135                "grok-4-1-fast",
136                "grok-3-fast",
137            ],
138            "anthropic" => vec![
139                "claude-sonnet-4-5-20250929",
140                "claude-opus-4-5-20251101",
141                "claude-haiku-4-5-20251001",
142                "claude-opus-4-1-20250805",
143            ],
144            "openai" => vec![
145                "gpt-5.2-high",
146                "gpt-5.1",
147                "gpt-5.1-mini",
148                "o3-mini",
149                "o3",
150                "o4-mini",
151                "gpt-4.1",
152            ],
153            "openrouter" => vec![
154                "anthropic/claude-sonnet-4.5",
155                "anthropic/claude-opus-4.5",
156                "openai/o3-mini",
157                "openai/gpt-4.1",
158                "xai/grok-4-1-fast-reasoning",
159            ],
160            "claude-cli" => vec![
161                "opus",   // Claude Opus 4.5 - smart/reasoning
162                "sonnet", // Claude Sonnet - fast/capable
163                "haiku",  // Claude Haiku - fastest
164            ],
165            "codex" => vec![
166                "gpt-5.2-high", // Smart/reasoning model
167                "gpt-5.1",      // Capable model
168                "gpt-5.1-mini", // Fast model
169                "o3",           // Reasoning model
170                "o3-mini",      // Fast reasoning
171            ],
172            _ => vec![],
173        }
174    }
175
176    /// Get the smart provider (for validation/analysis tasks with large context)
177    pub fn smart_provider(&self) -> &str {
178        &self.llm.smart_provider
179    }
180
181    /// Get the smart model (for validation/analysis tasks with large context)
182    pub fn smart_model(&self) -> &str {
183        &self.llm.smart_model
184    }
185
186    /// Get the fast provider (for generation tasks)
187    pub fn fast_provider(&self) -> &str {
188        &self.llm.fast_provider
189    }
190
191    /// Get the fast model (for generation tasks)
192    pub fn fast_model(&self) -> &str {
193        &self.llm.fast_model
194    }
195}
196
197#[cfg(test)]
198mod tests {
199    use super::*;
200    use tempfile::TempDir;
201
202    #[test]
203    fn test_default_config() {
204        let config = Config::default();
205        assert_eq!(config.llm.provider, "claude-cli");
206        assert_eq!(config.llm.model, "sonnet");
207        assert_eq!(config.llm.smart_provider, "claude-cli");
208        assert_eq!(config.llm.smart_model, "opus");
209        assert_eq!(config.llm.fast_provider, "xai");
210        assert_eq!(config.llm.fast_model, "grok-code-fast-1");
211        assert_eq!(config.llm.max_tokens, 16000);
212    }
213
214    #[test]
215    fn test_model_tiers() {
216        let config = Config::default();
217        assert_eq!(config.smart_provider(), "claude-cli");
218        assert_eq!(config.smart_model(), "opus");
219        assert_eq!(config.fast_provider(), "xai");
220        assert_eq!(config.fast_model(), "grok-code-fast-1");
221    }
222
223    #[test]
224    fn test_api_key_env_vars() {
225        let mut config = Config::default();
226
227        config.llm.provider = "anthropic".to_string();
228        assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
229
230        config.llm.provider = "xai".to_string();
231        assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
232
233        config.llm.provider = "openai".to_string();
234        assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
235
236        config.llm.provider = "claude-cli".to_string();
237        config.llm.smart_provider = "claude-cli".to_string();
238        config.llm.fast_provider = "claude-cli".to_string();
239        assert!(!config.requires_api_key());
240    }
241
242    #[test]
243    fn test_api_endpoints() {
244        let mut config = Config::default();
245
246        config.llm.provider = "anthropic".to_string();
247        assert_eq!(
248            config.api_endpoint(),
249            "https://api.anthropic.com/v1/messages"
250        );
251
252        config.llm.provider = "xai".to_string();
253        assert_eq!(
254            config.api_endpoint(),
255            "https://api.x.ai/v1/chat/completions"
256        );
257
258        config.llm.provider = "openai".to_string();
259        assert_eq!(
260            config.api_endpoint(),
261            "https://api.openai.com/v1/chat/completions"
262        );
263    }
264
265    #[test]
266    fn test_save_and_load_config() {
267        let temp_dir = TempDir::new().unwrap();
268        let config_path = temp_dir.path().join("config.toml");
269
270        let config = Config {
271            llm: LLMConfig {
272                provider: "claude-cli".to_string(),
273                model: "sonnet".to_string(),
274                smart_provider: "claude-cli".to_string(),
275                smart_model: "opus".to_string(),
276                fast_provider: "xai".to_string(),
277                fast_model: "haiku".to_string(),
278                max_tokens: 8192,
279            },
280        };
281
282        config.save(&config_path).unwrap();
283        assert!(config_path.exists());
284
285        let loaded = Config::load(&config_path).unwrap();
286        assert_eq!(loaded.llm.provider, "claude-cli");
287        assert_eq!(loaded.llm.model, "sonnet");
288        assert_eq!(loaded.llm.smart_provider, "claude-cli");
289        assert_eq!(loaded.llm.smart_model, "opus");
290        assert_eq!(loaded.llm.fast_provider, "xai");
291        assert_eq!(loaded.llm.fast_model, "haiku");
292        assert_eq!(loaded.llm.max_tokens, 8192);
293    }
294
295    #[test]
296    fn test_default_models() {
297        assert_eq!(
298            Config::default_model_for_provider("xai"),
299            "grok-code-fast-1"
300        );
301        assert_eq!(
302            Config::default_model_for_provider("anthropic"),
303            "claude-sonnet-4-5-20250929"
304        );
305        assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
306        assert_eq!(Config::default_model_for_provider("claude-cli"), "sonnet");
307    }
308
309    #[test]
310    fn test_load_config_without_model_tiers() {
311        // Test backward compatibility - loading a config without smart/fast models
312        let temp_dir = TempDir::new().unwrap();
313        let config_path = temp_dir.path().join("config.toml");
314
315        // Write a config without smart_model and fast_model
316        std::fs::write(
317            &config_path,
318            r#"[llm]
319provider = "xai"
320model = "grok-code-fast-1"
321max_tokens = 4096
322"#,
323        )
324        .unwrap();
325
326        let loaded = Config::load(&config_path).unwrap();
327        assert_eq!(loaded.llm.provider, "xai");
328        assert_eq!(loaded.llm.model, "grok-code-fast-1");
329        // Should use defaults for missing fields
330        assert_eq!(loaded.llm.smart_provider, "claude-cli");
331        assert_eq!(loaded.llm.smart_model, "opus");
332        assert_eq!(loaded.llm.fast_provider, "xai");
333        assert_eq!(loaded.llm.fast_model, "grok-code-fast-1");
334    }
335}