scud/
config.rs

1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::fs;
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8    pub llm: LLMConfig,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct LLMConfig {
13    /// Default provider
14    #[serde(default = "default_provider")]
15    pub provider: String,
16    /// Default model (used when no tier specified)
17    #[serde(default = "default_model")]
18    pub model: String,
19    /// Smart provider for validation/analysis tasks
20    #[serde(default = "default_smart_provider")]
21    pub smart_provider: String,
22    /// Smart model for validation/analysis tasks (large context)
23    #[serde(default = "default_smart_model")]
24    pub smart_model: String,
25    /// Fast provider for generation tasks
26    #[serde(default = "default_fast_provider")]
27    pub fast_provider: String,
28    /// Fast model for generation tasks
29    #[serde(default = "default_fast_model")]
30    pub fast_model: String,
31    /// Max tokens for LLM requests
32    #[serde(default = "default_max_tokens")]
33    pub max_tokens: u32,
34}
35
36fn default_provider() -> String {
37    std::env::var("SCUD_PROVIDER").unwrap_or_else(|_| "xai".to_string())
38}
39
40fn default_model() -> String {
41    std::env::var("SCUD_MODEL").unwrap_or_else(|_| "grok-code-fast-1".to_string())
42}
43
44fn default_smart_provider() -> String {
45    std::env::var("SCUD_SMART_PROVIDER").unwrap_or_else(|_| "claude-cli".to_string())
46}
47
48fn default_smart_model() -> String {
49    std::env::var("SCUD_SMART_MODEL").unwrap_or_else(|_| "opus".to_string())
50}
51
52fn default_fast_provider() -> String {
53    std::env::var("SCUD_FAST_PROVIDER").unwrap_or_else(|_| "xai".to_string())
54}
55
56fn default_fast_model() -> String {
57    std::env::var("SCUD_FAST_MODEL").unwrap_or_else(|_| "grok-code-fast-1".to_string())
58}
59
60fn default_max_tokens() -> u32 {
61    std::env::var("SCUD_MAX_TOKENS")
62        .ok()
63        .and_then(|s| s.parse().ok())
64        .unwrap_or(16000)
65}
66
67impl Default for Config {
68    fn default() -> Self {
69        Config {
70            llm: LLMConfig {
71                provider: default_provider(),
72                model: default_model(),
73                smart_provider: default_smart_provider(),
74                smart_model: default_smart_model(),
75                fast_provider: default_fast_provider(),
76                fast_model: default_fast_model(),
77                max_tokens: default_max_tokens(),
78            },
79        }
80    }
81}
82
83impl Config {
84    pub fn load(path: &Path) -> Result<Self> {
85        let content = fs::read_to_string(path)
86            .with_context(|| format!("Failed to read config file: {}", path.display()))?;
87
88        toml::from_str(&content)
89            .with_context(|| format!("Failed to parse config file: {}", path.display()))
90    }
91
92    pub fn save(&self, path: &Path) -> Result<()> {
93        let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
94
95        if let Some(parent) = path.parent() {
96            fs::create_dir_all(parent).with_context(|| {
97                format!("Failed to create config directory: {}", parent.display())
98            })?;
99        }
100
101        fs::write(path, content)
102            .with_context(|| format!("Failed to write config file: {}", path.display()))
103    }
104
105    pub fn api_key_env_var(&self) -> &str {
106        Self::api_key_env_var_for_provider(&self.llm.provider)
107    }
108
109    pub fn api_key_env_var_for_provider(provider: &str) -> &str {
110        match provider {
111            "anthropic" => "ANTHROPIC_API_KEY",
112            "xai" => "XAI_API_KEY",
113            "openai" => "OPENAI_API_KEY",
114            "openrouter" => "OPENROUTER_API_KEY",
115            "claude-cli" => "NONE", // Claude CLI doesn't need API key
116            "codex" => "NONE",      // Codex CLI doesn't need API key
117            _ => "API_KEY",
118        }
119    }
120
121    pub fn requires_api_key(&self) -> bool {
122        let providers = [
123            &self.llm.provider,
124            &self.llm.smart_provider,
125            &self.llm.fast_provider,
126        ];
127        providers
128            .iter()
129            .any(|p| !matches!(p.as_str(), "claude-cli" | "codex"))
130    }
131
132    pub fn api_endpoint(&self) -> &str {
133        match self.llm.provider.as_str() {
134            "anthropic" => "https://api.anthropic.com/v1/messages",
135            "xai" => "https://api.x.ai/v1/chat/completions",
136            "openai" => "https://api.openai.com/v1/chat/completions",
137            "openrouter" => "https://openrouter.ai/api/v1/chat/completions",
138            _ => "https://api.anthropic.com/v1/messages",
139        }
140    }
141
142    pub fn default_model_for_provider(provider: &str) -> &str {
143        match provider {
144            "xai" => "grok-code-fast-1",
145            "anthropic" => "claude-sonnet-4-5-20250929",
146            "openai" => "o3-mini",
147            "openrouter" => "anthropic/claude-sonnet-4.5",
148            "claude-cli" => "sonnet", // Claude CLI model names: sonnet, opus, haiku
149            "codex" => "gpt-5.1",     // Codex CLI default model
150            _ => "grok-code-fast-1",
151        }
152    }
153
154    /// Get suggested models for a provider (for display in init)
155    pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
156        match provider {
157            "xai" => vec![
158                "grok-code-fast-1",
159                "grok-4-1-fast-reasoning",
160                "grok-4-1-fast",
161                "grok-3-fast",
162            ],
163            "anthropic" => vec![
164                "claude-sonnet-4-5-20250929",
165                "claude-opus-4-5-20251101",
166                "claude-haiku-4-5-20251001",
167                "claude-opus-4-1-20250805",
168            ],
169            "openai" => vec![
170                "gpt-5.2-high",
171                "gpt-5.1",
172                "gpt-5.1-mini",
173                "o3-mini",
174                "o3",
175                "o4-mini",
176                "gpt-4.1",
177            ],
178            "openrouter" => vec![
179                "anthropic/claude-sonnet-4.5",
180                "anthropic/claude-opus-4.5",
181                "openai/o3-mini",
182                "openai/gpt-4.1",
183                "xai/grok-4-1-fast-reasoning",
184            ],
185            "claude-cli" => vec![
186                "opus",   // Claude Opus 4.5 - smart/reasoning
187                "sonnet", // Claude Sonnet - fast/capable
188                "haiku",  // Claude Haiku - fastest
189            ],
190            "codex" => vec![
191                "gpt-5.2-high", // Smart/reasoning model
192                "gpt-5.1",      // Capable model
193                "gpt-5.1-mini", // Fast model
194                "o3",           // Reasoning model
195                "o3-mini",      // Fast reasoning
196            ],
197            _ => vec![],
198        }
199    }
200
201    /// Get the smart provider (for validation/analysis tasks with large context)
202    pub fn smart_provider(&self) -> &str {
203        &self.llm.smart_provider
204    }
205
206    /// Get the smart model (for validation/analysis tasks with large context)
207    pub fn smart_model(&self) -> &str {
208        &self.llm.smart_model
209    }
210
211    /// Get the fast provider (for generation tasks)
212    pub fn fast_provider(&self) -> &str {
213        &self.llm.fast_provider
214    }
215
216    /// Get the fast model (for generation tasks)
217    pub fn fast_model(&self) -> &str {
218        &self.llm.fast_model
219    }
220}
221
222#[cfg(test)]
223mod tests {
224    use super::*;
225    use tempfile::TempDir;
226
227    #[test]
228    fn test_default_config() {
229        let config = Config::default();
230        // Default provider is xai with grok-code-fast-1 for speed
231        assert_eq!(config.llm.provider, "xai");
232        assert_eq!(config.llm.model, "grok-code-fast-1");
233        // Smart tier uses claude-cli with opus
234        assert_eq!(config.llm.smart_provider, "claude-cli");
235        assert_eq!(config.llm.smart_model, "opus");
236        // Fast tier uses xai with grok-code-fast-1
237        assert_eq!(config.llm.fast_provider, "xai");
238        assert_eq!(config.llm.fast_model, "grok-code-fast-1");
239        assert_eq!(config.llm.max_tokens, 16000);
240    }
241
242    #[test]
243    fn test_model_tiers() {
244        let config = Config::default();
245        assert_eq!(config.smart_provider(), "claude-cli");
246        assert_eq!(config.smart_model(), "opus");
247        assert_eq!(config.fast_provider(), "xai");
248        assert_eq!(config.fast_model(), "grok-code-fast-1");
249    }
250
251    #[test]
252    fn test_api_key_env_vars() {
253        let mut config = Config::default();
254
255        config.llm.provider = "anthropic".to_string();
256        assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
257
258        config.llm.provider = "xai".to_string();
259        assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
260
261        config.llm.provider = "openai".to_string();
262        assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
263
264        config.llm.provider = "claude-cli".to_string();
265        config.llm.smart_provider = "claude-cli".to_string();
266        config.llm.fast_provider = "claude-cli".to_string();
267        assert!(!config.requires_api_key());
268    }
269
270    #[test]
271    fn test_api_endpoints() {
272        let mut config = Config::default();
273
274        config.llm.provider = "anthropic".to_string();
275        assert_eq!(
276            config.api_endpoint(),
277            "https://api.anthropic.com/v1/messages"
278        );
279
280        config.llm.provider = "xai".to_string();
281        assert_eq!(
282            config.api_endpoint(),
283            "https://api.x.ai/v1/chat/completions"
284        );
285
286        config.llm.provider = "openai".to_string();
287        assert_eq!(
288            config.api_endpoint(),
289            "https://api.openai.com/v1/chat/completions"
290        );
291    }
292
293    #[test]
294    fn test_save_and_load_config() {
295        let temp_dir = TempDir::new().unwrap();
296        let config_path = temp_dir.path().join("config.toml");
297
298        let config = Config {
299            llm: LLMConfig {
300                provider: "claude-cli".to_string(),
301                model: "sonnet".to_string(),
302                smart_provider: "claude-cli".to_string(),
303                smart_model: "opus".to_string(),
304                fast_provider: "xai".to_string(),
305                fast_model: "haiku".to_string(),
306                max_tokens: 8192,
307            },
308        };
309
310        config.save(&config_path).unwrap();
311        assert!(config_path.exists());
312
313        let loaded = Config::load(&config_path).unwrap();
314        assert_eq!(loaded.llm.provider, "claude-cli");
315        assert_eq!(loaded.llm.model, "sonnet");
316        assert_eq!(loaded.llm.smart_provider, "claude-cli");
317        assert_eq!(loaded.llm.smart_model, "opus");
318        assert_eq!(loaded.llm.fast_provider, "xai");
319        assert_eq!(loaded.llm.fast_model, "haiku");
320        assert_eq!(loaded.llm.max_tokens, 8192);
321    }
322
323    #[test]
324    fn test_default_models() {
325        assert_eq!(
326            Config::default_model_for_provider("xai"),
327            "grok-code-fast-1"
328        );
329        assert_eq!(
330            Config::default_model_for_provider("anthropic"),
331            "claude-sonnet-4-5-20250929"
332        );
333        assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
334        assert_eq!(Config::default_model_for_provider("claude-cli"), "sonnet");
335    }
336
337    #[test]
338    fn test_load_config_without_model_tiers() {
339        // Test backward compatibility - loading a config without smart/fast models
340        let temp_dir = TempDir::new().unwrap();
341        let config_path = temp_dir.path().join("config.toml");
342
343        // Write a config without smart_model and fast_model
344        std::fs::write(
345            &config_path,
346            r#"[llm]
347provider = "xai"
348model = "grok-code-fast-1"
349max_tokens = 4096
350"#,
351        )
352        .unwrap();
353
354        let loaded = Config::load(&config_path).unwrap();
355        assert_eq!(loaded.llm.provider, "xai");
356        assert_eq!(loaded.llm.model, "grok-code-fast-1");
357        // Should use defaults for missing fields
358        assert_eq!(loaded.llm.smart_provider, "claude-cli");
359        assert_eq!(loaded.llm.smart_model, "opus");
360        assert_eq!(loaded.llm.fast_provider, "xai");
361        assert_eq!(loaded.llm.fast_model, "grok-code-fast-1");
362    }
363}