scud/
config.rs

1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::fs;
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8    pub llm: LLMConfig,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct LLMConfig {
13    pub provider: String,
14    /// Default model (used when no tier specified)
15    pub model: String,
16    /// Smart model for validation/analysis tasks (large context)
17    #[serde(default = "default_smart_model")]
18    pub smart_model: String,
19    /// Fast model for generation tasks
20    #[serde(default = "default_fast_model")]
21    pub fast_model: String,
22    #[serde(default)]
23    pub max_tokens: u32,
24}
25
26fn default_smart_model() -> String {
27    "opus".to_string()
28}
29
30fn default_fast_model() -> String {
31    "sonnet".to_string()
32}
33
34impl Default for Config {
35    fn default() -> Self {
36        Config {
37            llm: LLMConfig {
38                provider: "claude-cli".to_string(),
39                model: "sonnet".to_string(),
40                smart_model: "opus".to_string(),
41                fast_model: "sonnet".to_string(),
42                max_tokens: 16000,
43            },
44        }
45    }
46}
47
48impl Config {
49    pub fn load(path: &Path) -> Result<Self> {
50        let content = fs::read_to_string(path)
51            .with_context(|| format!("Failed to read config file: {}", path.display()))?;
52
53        toml::from_str(&content)
54            .with_context(|| format!("Failed to parse config file: {}", path.display()))
55    }
56
57    pub fn save(&self, path: &Path) -> Result<()> {
58        let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
59
60        if let Some(parent) = path.parent() {
61            fs::create_dir_all(parent).with_context(|| {
62                format!("Failed to create config directory: {}", parent.display())
63            })?;
64        }
65
66        fs::write(path, content)
67            .with_context(|| format!("Failed to write config file: {}", path.display()))
68    }
69
70    pub fn api_key_env_var(&self) -> &str {
71        match self.llm.provider.as_str() {
72            "anthropic" => "ANTHROPIC_API_KEY",
73            "xai" => "XAI_API_KEY",
74            "openai" => "OPENAI_API_KEY",
75            "openrouter" => "OPENROUTER_API_KEY",
76            "claude-cli" => "NONE", // Claude CLI doesn't need API key
77            "codex" => "NONE",      // Codex CLI doesn't need API key
78            _ => "API_KEY",
79        }
80    }
81
82    pub fn requires_api_key(&self) -> bool {
83        !matches!(self.llm.provider.as_str(), "claude-cli" | "codex")
84    }
85
86    pub fn api_endpoint(&self) -> &str {
87        match self.llm.provider.as_str() {
88            "anthropic" => "https://api.anthropic.com/v1/messages",
89            "xai" => "https://api.x.ai/v1/chat/completions",
90            "openai" => "https://api.openai.com/v1/chat/completions",
91            "openrouter" => "https://openrouter.ai/api/v1/chat/completions",
92            _ => "https://api.anthropic.com/v1/messages",
93        }
94    }
95
96    pub fn default_model_for_provider(provider: &str) -> &str {
97        match provider {
98            "xai" => "grok-code-fast-1",
99            "anthropic" => "claude-sonnet-4-5-20250929",
100            "openai" => "o3-mini",
101            "openrouter" => "anthropic/claude-sonnet-4.5",
102            "claude-cli" => "sonnet", // Claude CLI model names: sonnet, opus, haiku
103            "codex" => "gpt-5.1",     // Codex CLI default model
104            _ => "grok-code-fast-1",
105        }
106    }
107
108    /// Get suggested models for a provider (for display in init)
109    pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
110        match provider {
111            "xai" => vec![
112                "grok-code-fast-1",
113                "grok-4-1-fast-reasoning",
114                "grok-4-1-fast",
115                "grok-3-fast",
116            ],
117            "anthropic" => vec![
118                "claude-sonnet-4-5-20250929",
119                "claude-opus-4-5-20251101",
120                "claude-haiku-4-5-20251001",
121                "claude-opus-4-1-20250805",
122            ],
123            "openai" => vec![
124                "gpt-5.2-high",
125                "gpt-5.1",
126                "gpt-5.1-mini",
127                "o3-mini",
128                "o3",
129                "o4-mini",
130                "gpt-4.1",
131            ],
132            "openrouter" => vec![
133                "anthropic/claude-sonnet-4.5",
134                "anthropic/claude-opus-4.5",
135                "openai/o3-mini",
136                "openai/gpt-4.1",
137                "xai/grok-4-1-fast-reasoning",
138            ],
139            "claude-cli" => vec![
140                "opus",   // Claude Opus 4.5 - smart/reasoning
141                "sonnet", // Claude Sonnet - fast/capable
142                "haiku",  // Claude Haiku - fastest
143            ],
144            "codex" => vec![
145                "gpt-5.2-high", // Smart/reasoning model
146                "gpt-5.1",      // Capable model
147                "gpt-5.1-mini", // Fast model
148                "o3",           // Reasoning model
149                "o3-mini",      // Fast reasoning
150            ],
151            _ => vec![],
152        }
153    }
154
155    /// Get the smart model (for validation/analysis tasks with large context)
156    pub fn smart_model(&self) -> &str {
157        &self.llm.smart_model
158    }
159
160    /// Get the fast model (for generation tasks)
161    pub fn fast_model(&self) -> &str {
162        &self.llm.fast_model
163    }
164}
165
166#[cfg(test)]
167mod tests {
168    use super::*;
169    use tempfile::TempDir;
170
171    #[test]
172    fn test_default_config() {
173        let config = Config::default();
174        assert_eq!(config.llm.provider, "claude-cli");
175        assert_eq!(config.llm.model, "sonnet");
176        assert_eq!(config.llm.smart_model, "opus");
177        assert_eq!(config.llm.fast_model, "sonnet");
178        assert_eq!(config.llm.max_tokens, 16000);
179    }
180
181    #[test]
182    fn test_model_tiers() {
183        let config = Config::default();
184        assert_eq!(config.smart_model(), "opus");
185        assert_eq!(config.fast_model(), "sonnet");
186    }
187
188    #[test]
189    fn test_api_key_env_vars() {
190        let mut config = Config::default();
191
192        config.llm.provider = "anthropic".to_string();
193        assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
194
195        config.llm.provider = "xai".to_string();
196        assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
197
198        config.llm.provider = "openai".to_string();
199        assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
200
201        config.llm.provider = "claude-cli".to_string();
202        assert!(!config.requires_api_key());
203    }
204
205    #[test]
206    fn test_api_endpoints() {
207        let mut config = Config::default();
208
209        config.llm.provider = "anthropic".to_string();
210        assert_eq!(
211            config.api_endpoint(),
212            "https://api.anthropic.com/v1/messages"
213        );
214
215        config.llm.provider = "xai".to_string();
216        assert_eq!(
217            config.api_endpoint(),
218            "https://api.x.ai/v1/chat/completions"
219        );
220
221        config.llm.provider = "openai".to_string();
222        assert_eq!(
223            config.api_endpoint(),
224            "https://api.openai.com/v1/chat/completions"
225        );
226    }
227
228    #[test]
229    fn test_save_and_load_config() {
230        let temp_dir = TempDir::new().unwrap();
231        let config_path = temp_dir.path().join("config.toml");
232
233        let config = Config {
234            llm: LLMConfig {
235                provider: "claude-cli".to_string(),
236                model: "sonnet".to_string(),
237                smart_model: "opus".to_string(),
238                fast_model: "haiku".to_string(),
239                max_tokens: 8192,
240            },
241        };
242
243        config.save(&config_path).unwrap();
244        assert!(config_path.exists());
245
246        let loaded = Config::load(&config_path).unwrap();
247        assert_eq!(loaded.llm.provider, "claude-cli");
248        assert_eq!(loaded.llm.model, "sonnet");
249        assert_eq!(loaded.llm.smart_model, "opus");
250        assert_eq!(loaded.llm.fast_model, "haiku");
251        assert_eq!(loaded.llm.max_tokens, 8192);
252    }
253
254    #[test]
255    fn test_default_models() {
256        assert_eq!(
257            Config::default_model_for_provider("xai"),
258            "grok-code-fast-1"
259        );
260        assert_eq!(
261            Config::default_model_for_provider("anthropic"),
262            "claude-sonnet-4-5-20250929"
263        );
264        assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
265        assert_eq!(Config::default_model_for_provider("claude-cli"), "sonnet");
266    }
267
268    #[test]
269    fn test_load_config_without_model_tiers() {
270        // Test backward compatibility - loading a config without smart/fast models
271        let temp_dir = TempDir::new().unwrap();
272        let config_path = temp_dir.path().join("config.toml");
273
274        // Write a config without smart_model and fast_model
275        std::fs::write(
276            &config_path,
277            r#"[llm]
278provider = "xai"
279model = "grok-code-fast-1"
280max_tokens = 4096
281"#,
282        )
283        .unwrap();
284
285        let loaded = Config::load(&config_path).unwrap();
286        assert_eq!(loaded.llm.provider, "xai");
287        assert_eq!(loaded.llm.model, "grok-code-fast-1");
288        // Should use defaults for missing fields
289        assert_eq!(loaded.llm.smart_model, "opus");
290        assert_eq!(loaded.llm.fast_model, "sonnet");
291    }
292}