Skip to main content

scud/
config.rs

1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::fs;
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8    pub llm: LLMConfig,
9    #[serde(default)]
10    pub swarm: SwarmConfig,
11}
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
14pub struct SwarmConfig {
15    #[serde(default = "default_swarm_harness")]
16    pub harness: String,
17    #[serde(default = "default_round_size")]
18    pub round_size: usize,
19    #[serde(default = "default_default_tag")]
20    pub default_tag: Option<String>,
21}
22
23fn default_swarm_harness() -> String {
24    "claude".to_string()
25}
26
27fn default_round_size() -> usize {
28    5
29}
30
31fn default_default_tag() -> Option<String> {
32    None
33}
34
35impl Default for SwarmConfig {
36    fn default() -> Self {
37        SwarmConfig {
38            harness: default_swarm_harness(),
39            round_size: default_round_size(),
40            default_tag: default_default_tag(),
41        }
42    }
43}
44
45#[derive(Debug, Clone, Serialize, Deserialize)]
46pub struct LLMConfig {
47    /// Default provider
48    #[serde(default = "default_provider")]
49    pub provider: String,
50    /// Default model (used when no tier specified)
51    #[serde(default = "default_model")]
52    pub model: String,
53    /// Smart provider for validation/analysis tasks
54    #[serde(default = "default_smart_provider")]
55    pub smart_provider: String,
56    /// Smart model for validation/analysis tasks (large context)
57    #[serde(default = "default_smart_model")]
58    pub smart_model: String,
59    /// Fast provider for generation tasks
60    #[serde(default = "default_fast_provider")]
61    pub fast_provider: String,
62    /// Fast model for generation tasks
63    #[serde(default = "default_fast_model")]
64    pub fast_model: String,
65    /// Max tokens for LLM requests
66    #[serde(default = "default_max_tokens")]
67    pub max_tokens: u32,
68}
69
70fn default_provider() -> String {
71    std::env::var("SCUD_PROVIDER").unwrap_or_else(|_| "xai".to_string())
72}
73
74fn default_model() -> String {
75    std::env::var("SCUD_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
76}
77
78fn default_smart_provider() -> String {
79    std::env::var("SCUD_SMART_PROVIDER").unwrap_or_else(|_| "claude-cli".to_string())
80}
81
82fn default_smart_model() -> String {
83    std::env::var("SCUD_SMART_MODEL").unwrap_or_else(|_| "opus".to_string())
84}
85
86fn default_fast_provider() -> String {
87    std::env::var("SCUD_FAST_PROVIDER").unwrap_or_else(|_| "xai".to_string())
88}
89
90fn default_fast_model() -> String {
91    std::env::var("SCUD_FAST_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
92}
93
94fn default_max_tokens() -> u32 {
95    std::env::var("SCUD_MAX_TOKENS")
96        .ok()
97        .and_then(|s| s.parse().ok())
98        .unwrap_or(16000)
99}
100
101impl Default for Config {
102    fn default() -> Self {
103        Config {
104            llm: LLMConfig {
105                provider: default_provider(),
106                model: default_model(),
107                smart_provider: default_smart_provider(),
108                smart_model: default_smart_model(),
109                fast_provider: default_fast_provider(),
110                fast_model: default_fast_model(),
111                max_tokens: default_max_tokens(),
112            },
113            swarm: SwarmConfig::default(),
114        }
115    }
116}
117
118impl Config {
119    pub fn load(path: &Path) -> Result<Self> {
120        let content = fs::read_to_string(path)
121            .with_context(|| format!("Failed to read config file: {}", path.display()))?;
122
123        toml::from_str(&content)
124            .with_context(|| format!("Failed to parse config file: {}", path.display()))
125    }
126
127    pub fn save(&self, path: &Path) -> Result<()> {
128        let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
129
130        if let Some(parent) = path.parent() {
131            fs::create_dir_all(parent).with_context(|| {
132                format!("Failed to create config directory: {}", parent.display())
133            })?;
134        }
135
136        fs::write(path, content)
137            .with_context(|| format!("Failed to write config file: {}", path.display()))
138    }
139
140    pub fn api_key_env_var(&self) -> &str {
141        Self::api_key_env_var_for_provider(&self.llm.provider)
142    }
143
144    pub fn api_key_env_var_for_provider(provider: &str) -> &str {
145        match provider {
146            "anthropic" => "ANTHROPIC_API_KEY",
147            "xai" => "XAI_API_KEY",
148            "openai" => "OPENAI_API_KEY",
149            "openrouter" => "OPENROUTER_API_KEY",
150            "claude-cli" => "NONE", // Claude CLI doesn't need API key
151            "codex" => "NONE",      // Codex CLI doesn't need API key
152            "cursor" => "NONE",     // Cursor Agent CLI doesn't need API key
153            _ => "API_KEY",
154        }
155    }
156
157    pub fn requires_api_key(&self) -> bool {
158        let providers = [
159            &self.llm.provider,
160            &self.llm.smart_provider,
161            &self.llm.fast_provider,
162        ];
163        providers
164            .iter()
165            .any(|p| !matches!(p.as_str(), "claude-cli" | "codex" | "cursor"))
166    }
167
168    pub fn api_endpoint(&self) -> &str {
169        match self.llm.provider.as_str() {
170            "anthropic" => "https://api.anthropic.com/v1/messages",
171            "xai" => "https://api.x.ai/v1/chat/completions",
172            "openai" => "https://api.openai.com/v1/chat/completions",
173            "openrouter" => "https://openrouter.ai/api/v1/chat/completions",
174            _ => "https://api.anthropic.com/v1/messages",
175        }
176    }
177
178    pub fn default_model_for_provider(provider: &str) -> &str {
179        match provider {
180            "xai" => "xai/grok-code-fast-1",
181            "anthropic" => "claude-sonnet-4-5-20250929",
182            "openai" => "o3-mini",
183            "openrouter" => "anthropic/claude-sonnet-4.5",
184            "claude-cli" => "sonnet", // Claude CLI model names: sonnet, opus, haiku
185            "codex" => "gpt-5.1",         // Codex CLI default model
186            "cursor" => "claude-4-sonnet", // Cursor Agent default model
187            _ => "xai/grok-code-fast-1",
188        }
189    }
190
191    /// Get suggested models for a provider (for display in init)
192    pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
193        match provider {
194            "xai" => vec![
195                "xai/grok-code-fast-1",
196                "xai/grok-4-1-fast",
197                "xai/grok-4-fast",
198                "xai/grok-3-fast",
199            ],
200            "anthropic" => vec![
201                "claude-sonnet-4-5-20250929",
202                "claude-opus-4-5-20251101",
203                "claude-haiku-4-5-20251001",
204                "claude-opus-4-1-20250805",
205            ],
206            "openai" => vec![
207                "gpt-5.2-high",
208                "gpt-5.1",
209                "gpt-5.1-mini",
210                "o3-mini",
211                "o3",
212                "o4-mini",
213                "gpt-4.1",
214            ],
215            "openrouter" => vec![
216                "anthropic/claude-sonnet-4.5",
217                "anthropic/claude-opus-4.5",
218                "openai/o3-mini",
219                "openai/gpt-4.1",
220                "xai/grok-4-1-fast-reasoning",
221            ],
222            "claude-cli" => vec![
223                "opus",   // Claude Opus 4.5 - smart/reasoning
224                "sonnet", // Claude Sonnet - fast/capable
225                "haiku",  // Claude Haiku - fastest
226            ],
227            "codex" => vec![
228                "gpt-5.2-high", // Smart/reasoning model
229                "gpt-5.1",      // Capable model
230                "gpt-5.1-mini", // Fast model
231                "o3",           // Reasoning model
232                "o3-mini",      // Fast reasoning
233            ],
234            "cursor" => vec![
235                "claude-4-opus",   // Smart/reasoning
236                "claude-4-sonnet", // Balanced
237                "gpt-5",          // OpenAI model
238                "gpt-5.2-high",   // High-capability
239            ],
240            _ => vec![],
241        }
242    }
243
244    /// Get the smart provider (for validation/analysis tasks with large context)
245    pub fn smart_provider(&self) -> &str {
246        &self.llm.smart_provider
247    }
248
249    /// Get the smart model (for validation/analysis tasks with large context)
250    pub fn smart_model(&self) -> &str {
251        &self.llm.smart_model
252    }
253
254    /// Get the fast provider (for generation tasks)
255    pub fn fast_provider(&self) -> &str {
256        &self.llm.fast_provider
257    }
258
259    /// Get the fast model (for generation tasks)
260    pub fn fast_model(&self) -> &str {
261        &self.llm.fast_model
262    }
263}
264
265#[cfg(test)]
266mod tests {
267    use super::*;
268    use tempfile::TempDir;
269
270    #[test]
271    fn test_default_config() {
272        let config = Config::default();
273        // Default provider is xai with xai/grok-code-fast-1 for speed
274        assert_eq!(config.llm.provider, "xai");
275        assert_eq!(config.llm.model, "xai/grok-code-fast-1");
276        // Smart tier uses claude-cli with opus
277        assert_eq!(config.llm.smart_provider, "claude-cli");
278        assert_eq!(config.llm.smart_model, "opus");
279        // Fast tier uses xai with xai/grok-code-fast-1
280        assert_eq!(config.llm.fast_provider, "xai");
281        assert_eq!(config.llm.fast_model, "xai/grok-code-fast-1");
282        assert_eq!(config.llm.max_tokens, 16000);
283    }
284
285    #[test]
286    fn test_model_tiers() {
287        let config = Config::default();
288        assert_eq!(config.smart_provider(), "claude-cli");
289        assert_eq!(config.smart_model(), "opus");
290        assert_eq!(config.fast_provider(), "xai");
291        assert_eq!(config.fast_model(), "xai/grok-code-fast-1");
292    }
293
294    #[test]
295    fn test_api_key_env_vars() {
296        let mut config = Config::default();
297
298        config.llm.provider = "anthropic".to_string();
299        assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
300
301        config.llm.provider = "xai".to_string();
302        assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
303
304        config.llm.provider = "openai".to_string();
305        assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
306
307        config.llm.provider = "claude-cli".to_string();
308        config.llm.smart_provider = "claude-cli".to_string();
309        config.llm.fast_provider = "claude-cli".to_string();
310        assert!(!config.requires_api_key());
311    }
312
313    #[test]
314    fn test_api_endpoints() {
315        let mut config = Config::default();
316
317        config.llm.provider = "anthropic".to_string();
318        assert_eq!(
319            config.api_endpoint(),
320            "https://api.anthropic.com/v1/messages"
321        );
322
323        config.llm.provider = "xai".to_string();
324        assert_eq!(
325            config.api_endpoint(),
326            "https://api.x.ai/v1/chat/completions"
327        );
328
329        config.llm.provider = "openai".to_string();
330        assert_eq!(
331            config.api_endpoint(),
332            "https://api.openai.com/v1/chat/completions"
333        );
334    }
335
336    #[test]
337    fn test_save_and_load_config() {
338        let temp_dir = TempDir::new().unwrap();
339        let config_path = temp_dir.path().join("config.toml");
340
341        let config = Config {
342            llm: LLMConfig {
343                provider: "claude-cli".to_string(),
344                model: "sonnet".to_string(),
345                smart_provider: "claude-cli".to_string(),
346                smart_model: "opus".to_string(),
347                fast_provider: "xai".to_string(),
348                fast_model: "haiku".to_string(),
349                max_tokens: 8192,
350            },
351            swarm: SwarmConfig::default(),
352        };
353
354        config.save(&config_path).unwrap();
355        assert!(config_path.exists());
356
357        let loaded = Config::load(&config_path).unwrap();
358        assert_eq!(loaded.llm.provider, "claude-cli");
359        assert_eq!(loaded.llm.model, "sonnet");
360        assert_eq!(loaded.llm.smart_provider, "claude-cli");
361        assert_eq!(loaded.llm.smart_model, "opus");
362        assert_eq!(loaded.llm.fast_provider, "xai");
363        assert_eq!(loaded.llm.fast_model, "haiku");
364        assert_eq!(loaded.llm.max_tokens, 8192);
365    }
366
367    #[test]
368    fn test_default_models() {
369        assert_eq!(
370            Config::default_model_for_provider("xai"),
371            "xai/grok-code-fast-1"
372        );
373        assert_eq!(
374            Config::default_model_for_provider("anthropic"),
375            "claude-sonnet-4-5-20250929"
376        );
377        assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
378        assert_eq!(Config::default_model_for_provider("claude-cli"), "sonnet");
379    }
380
381    #[test]
382    fn test_load_config_without_model_tiers() {
383        // Test backward compatibility - loading a config without smart/fast models
384        let temp_dir = TempDir::new().unwrap();
385        let config_path = temp_dir.path().join("config.toml");
386
387        // Write a config without smart_model and fast_model
388        std::fs::write(
389            &config_path,
390            r#"[llm]
391provider = "xai"
392model = "xai/grok-code-fast-1"
393max_tokens = 4096
394"#,
395        )
396        .unwrap();
397
398        let loaded = Config::load(&config_path).unwrap();
399        assert_eq!(loaded.llm.provider, "xai");
400        assert_eq!(loaded.llm.model, "xai/grok-code-fast-1");
401        // Should use defaults for missing fields
402        assert_eq!(loaded.llm.smart_provider, "claude-cli");
403        assert_eq!(loaded.llm.smart_model, "opus");
404        assert_eq!(loaded.llm.fast_provider, "xai");
405        assert_eq!(loaded.llm.fast_model, "xai/grok-code-fast-1");
406    }
407}