Skip to main content

scud/
config.rs

1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::fs;
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8    pub llm: LLMConfig,
9    #[serde(default)]
10    pub swarm: SwarmConfig,
11}
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
14pub struct SwarmConfig {
15    #[serde(default = "default_swarm_harness")]
16    pub harness: String,
17    #[serde(default = "default_round_size")]
18    pub round_size: usize,
19    #[serde(default = "default_default_tag")]
20    pub default_tag: Option<String>,
21    /// Use direct API instead of CLI harnesses.
22    /// Requires `direct-api` Cargo feature.
23    #[serde(default)]
24    pub use_direct_api: bool,
25    /// Provider for direct API mode: anthropic, openai, xai, openrouter, opencode-zen
26    #[serde(default = "default_direct_api_provider")]
27    pub direct_api_provider: String,
28}
29
30fn default_swarm_harness() -> String {
31    "claude".to_string()
32}
33
34fn default_round_size() -> usize {
35    5
36}
37
38fn default_default_tag() -> Option<String> {
39    None
40}
41
42fn default_direct_api_provider() -> String {
43    std::env::var("SCUD_DIRECT_API_PROVIDER").unwrap_or_else(|_| "anthropic".to_string())
44}
45
46impl Default for SwarmConfig {
47    fn default() -> Self {
48        SwarmConfig {
49            harness: default_swarm_harness(),
50            round_size: default_round_size(),
51            default_tag: default_default_tag(),
52            use_direct_api: false,
53            direct_api_provider: default_direct_api_provider(),
54        }
55    }
56}
57
58#[derive(Debug, Clone, Serialize, Deserialize)]
59pub struct LLMConfig {
60    /// Default provider
61    #[serde(default = "default_provider")]
62    pub provider: String,
63    /// Default model (used when no tier specified)
64    #[serde(default = "default_model")]
65    pub model: String,
66    /// Smart provider for validation/analysis tasks
67    #[serde(default = "default_smart_provider")]
68    pub smart_provider: String,
69    /// Smart model for validation/analysis tasks (large context)
70    #[serde(default = "default_smart_model")]
71    pub smart_model: String,
72    /// Fast provider for generation tasks
73    #[serde(default = "default_fast_provider")]
74    pub fast_provider: String,
75    /// Fast model for generation tasks
76    #[serde(default = "default_fast_model")]
77    pub fast_model: String,
78    /// Max tokens for LLM requests
79    #[serde(default = "default_max_tokens")]
80    pub max_tokens: u32,
81}
82
83fn default_provider() -> String {
84    std::env::var("SCUD_PROVIDER").unwrap_or_else(|_| "xai".to_string())
85}
86
87fn default_model() -> String {
88    std::env::var("SCUD_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
89}
90
91fn default_smart_provider() -> String {
92    std::env::var("SCUD_SMART_PROVIDER").unwrap_or_else(|_| "claude-cli".to_string())
93}
94
95fn default_smart_model() -> String {
96    std::env::var("SCUD_SMART_MODEL").unwrap_or_else(|_| "opus".to_string())
97}
98
99fn default_fast_provider() -> String {
100    std::env::var("SCUD_FAST_PROVIDER").unwrap_or_else(|_| "xai".to_string())
101}
102
103fn default_fast_model() -> String {
104    std::env::var("SCUD_FAST_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
105}
106
107fn default_max_tokens() -> u32 {
108    std::env::var("SCUD_MAX_TOKENS")
109        .ok()
110        .and_then(|s| s.parse().ok())
111        .unwrap_or(16000)
112}
113
114impl Default for Config {
115    fn default() -> Self {
116        Config {
117            llm: LLMConfig {
118                provider: default_provider(),
119                model: default_model(),
120                smart_provider: default_smart_provider(),
121                smart_model: default_smart_model(),
122                fast_provider: default_fast_provider(),
123                fast_model: default_fast_model(),
124                max_tokens: default_max_tokens(),
125            },
126            swarm: SwarmConfig::default(),
127        }
128    }
129}
130
131impl Config {
132    pub fn load(path: &Path) -> Result<Self> {
133        let content = fs::read_to_string(path)
134            .with_context(|| format!("Failed to read config file: {}", path.display()))?;
135
136        toml::from_str(&content)
137            .with_context(|| format!("Failed to parse config file: {}", path.display()))
138    }
139
140    pub fn save(&self, path: &Path) -> Result<()> {
141        let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
142
143        if let Some(parent) = path.parent() {
144            fs::create_dir_all(parent).with_context(|| {
145                format!("Failed to create config directory: {}", parent.display())
146            })?;
147        }
148
149        fs::write(path, content)
150            .with_context(|| format!("Failed to write config file: {}", path.display()))
151    }
152
153    pub fn api_key_env_var(&self) -> &str {
154        Self::api_key_env_var_for_provider(&self.llm.provider)
155    }
156
157    pub fn api_key_env_var_for_provider(provider: &str) -> &str {
158        match provider {
159            "anthropic" => "ANTHROPIC_API_KEY",
160            "anthropic-oauth" => "NONE", // Uses Claude Code OAuth from Keychain
161            "xai" => "XAI_API_KEY",
162            "openai" => "OPENAI_API_KEY",
163            "openrouter" => "OPENROUTER_API_KEY",
164            "opencode-zen" | "opencode" | "zen" => "OPENCODE_API_KEY",
165            "claude-cli" => "NONE", // Claude CLI doesn't need API key
166            "codex" => "NONE",      // Codex CLI doesn't need API key
167            "cursor" => "NONE",     // Cursor Agent CLI doesn't need API key
168            _ => "API_KEY",
169        }
170    }
171
172    pub fn requires_api_key(&self) -> bool {
173        let providers = [
174            &self.llm.provider,
175            &self.llm.smart_provider,
176            &self.llm.fast_provider,
177        ];
178        providers.iter().any(|p| {
179            !matches!(
180                p.as_str(),
181                "claude-cli" | "codex" | "cursor" | "anthropic-oauth"
182            )
183        })
184    }
185
186    pub fn api_endpoint(&self) -> &str {
187        match self.llm.provider.as_str() {
188            "anthropic" => "https://api.anthropic.com/v1/messages",
189            "xai" => "https://api.x.ai/v1/chat/completions",
190            "openai" => "https://api.openai.com/v1/chat/completions",
191            "openrouter" => "https://openrouter.ai/api/v1/chat/completions",
192            _ => "https://api.anthropic.com/v1/messages",
193        }
194    }
195
196    pub fn default_model_for_provider(provider: &str) -> &str {
197        match provider {
198            "xai" => "xai/grok-code-fast-1",
199            "anthropic" => "claude-sonnet-4-5-20250929",
200            "anthropic-oauth" => "claude-opus-4-6",
201            "openai" => "o3-mini",
202            "openrouter" => "anthropic/claude-sonnet-4.5",
203            "claude-cli" => "sonnet", // Claude CLI model names: sonnet, opus, haiku
204            "codex" => "gpt-5.1",         // Codex CLI default model
205            "cursor" => "claude-4-sonnet", // Cursor Agent default model
206            _ => "xai/grok-code-fast-1",
207        }
208    }
209
210    /// Get suggested models for a provider (for display in init)
211    pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
212        match provider {
213            "xai" => vec![
214                "xai/grok-code-fast-1",
215                "xai/grok-4-1-fast",
216                "xai/grok-4-fast",
217                "xai/grok-3-fast",
218            ],
219            "anthropic" => vec![
220                "claude-sonnet-4-5-20250929",
221                "claude-opus-4-5-20251101",
222                "claude-haiku-4-5-20251001",
223                "claude-opus-4-1-20250805",
224            ],
225            "anthropic-oauth" => vec![
226                "claude-opus-4-6",
227                "claude-sonnet-4-5-20250929",
228                "claude-opus-4-5-20251101",
229                "claude-haiku-4-5-20251001",
230            ],
231            "openai" => vec![
232                "gpt-5.2-high",
233                "gpt-5.1",
234                "gpt-5.1-mini",
235                "o3-mini",
236                "o3",
237                "o4-mini",
238                "gpt-4.1",
239            ],
240            "openrouter" => vec![
241                "anthropic/claude-sonnet-4.5",
242                "anthropic/claude-opus-4.5",
243                "openai/o3-mini",
244                "openai/gpt-4.1",
245                "xai/grok-4-1-fast-reasoning",
246            ],
247            "claude-cli" => vec![
248                "opus",   // Claude Opus 4.5 - smart/reasoning
249                "sonnet", // Claude Sonnet - fast/capable
250                "haiku",  // Claude Haiku - fastest
251            ],
252            "codex" => vec![
253                "gpt-5.2-high", // Smart/reasoning model
254                "gpt-5.1",      // Capable model
255                "gpt-5.1-mini", // Fast model
256                "o3",           // Reasoning model
257                "o3-mini",      // Fast reasoning
258            ],
259            "cursor" => vec![
260                "claude-4-opus",   // Smart/reasoning
261                "claude-4-sonnet", // Balanced
262                "gpt-5",          // OpenAI model
263                "gpt-5.2-high",   // High-capability
264            ],
265            _ => vec![],
266        }
267    }
268
269    /// Get the smart provider (for validation/analysis tasks with large context)
270    pub fn smart_provider(&self) -> &str {
271        &self.llm.smart_provider
272    }
273
274    /// Get the smart model (for validation/analysis tasks with large context)
275    pub fn smart_model(&self) -> &str {
276        &self.llm.smart_model
277    }
278
279    /// Get the fast provider (for generation tasks)
280    pub fn fast_provider(&self) -> &str {
281        &self.llm.fast_provider
282    }
283
284    /// Get the fast model (for generation tasks)
285    pub fn fast_model(&self) -> &str {
286        &self.llm.fast_model
287    }
288}
289
290#[cfg(test)]
291mod tests {
292    use super::*;
293    use tempfile::TempDir;
294
295    #[test]
296    fn test_default_config() {
297        let config = Config::default();
298        // Default provider is xai with xai/grok-code-fast-1 for speed
299        assert_eq!(config.llm.provider, "xai");
300        assert_eq!(config.llm.model, "xai/grok-code-fast-1");
301        // Smart tier uses claude-cli with opus
302        assert_eq!(config.llm.smart_provider, "claude-cli");
303        assert_eq!(config.llm.smart_model, "opus");
304        // Fast tier uses xai with xai/grok-code-fast-1
305        assert_eq!(config.llm.fast_provider, "xai");
306        assert_eq!(config.llm.fast_model, "xai/grok-code-fast-1");
307        assert_eq!(config.llm.max_tokens, 16000);
308    }
309
310    #[test]
311    fn test_model_tiers() {
312        let config = Config::default();
313        assert_eq!(config.smart_provider(), "claude-cli");
314        assert_eq!(config.smart_model(), "opus");
315        assert_eq!(config.fast_provider(), "xai");
316        assert_eq!(config.fast_model(), "xai/grok-code-fast-1");
317    }
318
319    #[test]
320    fn test_api_key_env_vars() {
321        let mut config = Config::default();
322
323        config.llm.provider = "anthropic".to_string();
324        assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
325
326        config.llm.provider = "xai".to_string();
327        assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
328
329        config.llm.provider = "openai".to_string();
330        assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
331
332        config.llm.provider = "claude-cli".to_string();
333        config.llm.smart_provider = "claude-cli".to_string();
334        config.llm.fast_provider = "claude-cli".to_string();
335        assert!(!config.requires_api_key());
336    }
337
338    #[test]
339    fn test_api_endpoints() {
340        let mut config = Config::default();
341
342        config.llm.provider = "anthropic".to_string();
343        assert_eq!(
344            config.api_endpoint(),
345            "https://api.anthropic.com/v1/messages"
346        );
347
348        config.llm.provider = "xai".to_string();
349        assert_eq!(
350            config.api_endpoint(),
351            "https://api.x.ai/v1/chat/completions"
352        );
353
354        config.llm.provider = "openai".to_string();
355        assert_eq!(
356            config.api_endpoint(),
357            "https://api.openai.com/v1/chat/completions"
358        );
359    }
360
361    #[test]
362    fn test_save_and_load_config() {
363        let temp_dir = TempDir::new().unwrap();
364        let config_path = temp_dir.path().join("config.toml");
365
366        let config = Config {
367            llm: LLMConfig {
368                provider: "claude-cli".to_string(),
369                model: "sonnet".to_string(),
370                smart_provider: "claude-cli".to_string(),
371                smart_model: "opus".to_string(),
372                fast_provider: "xai".to_string(),
373                fast_model: "haiku".to_string(),
374                max_tokens: 8192,
375            },
376            swarm: SwarmConfig::default(),
377        };
378
379        config.save(&config_path).unwrap();
380        assert!(config_path.exists());
381
382        let loaded = Config::load(&config_path).unwrap();
383        assert_eq!(loaded.llm.provider, "claude-cli");
384        assert_eq!(loaded.llm.model, "sonnet");
385        assert_eq!(loaded.llm.smart_provider, "claude-cli");
386        assert_eq!(loaded.llm.smart_model, "opus");
387        assert_eq!(loaded.llm.fast_provider, "xai");
388        assert_eq!(loaded.llm.fast_model, "haiku");
389        assert_eq!(loaded.llm.max_tokens, 8192);
390    }
391
392    #[test]
393    fn test_default_models() {
394        assert_eq!(
395            Config::default_model_for_provider("xai"),
396            "xai/grok-code-fast-1"
397        );
398        assert_eq!(
399            Config::default_model_for_provider("anthropic"),
400            "claude-sonnet-4-5-20250929"
401        );
402        assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
403        assert_eq!(Config::default_model_for_provider("claude-cli"), "sonnet");
404    }
405
406    #[test]
407    fn test_load_config_without_model_tiers() {
408        // Test backward compatibility - loading a config without smart/fast models
409        let temp_dir = TempDir::new().unwrap();
410        let config_path = temp_dir.path().join("config.toml");
411
412        // Write a config without smart_model and fast_model
413        std::fs::write(
414            &config_path,
415            r#"[llm]
416provider = "xai"
417model = "xai/grok-code-fast-1"
418max_tokens = 4096
419"#,
420        )
421        .unwrap();
422
423        let loaded = Config::load(&config_path).unwrap();
424        assert_eq!(loaded.llm.provider, "xai");
425        assert_eq!(loaded.llm.model, "xai/grok-code-fast-1");
426        // Should use defaults for missing fields
427        assert_eq!(loaded.llm.smart_provider, "claude-cli");
428        assert_eq!(loaded.llm.smart_model, "opus");
429        assert_eq!(loaded.llm.fast_provider, "xai");
430        assert_eq!(loaded.llm.fast_model, "xai/grok-code-fast-1");
431    }
432}