Skip to main content

scud/
config.rs

1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::fs;
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8    pub llm: LLMConfig,
9    #[serde(default)]
10    pub swarm: SwarmConfig,
11}
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
14pub struct SwarmConfig {
15    #[serde(default = "default_swarm_harness")]
16    pub harness: String,
17    /// Default model for swarm agents (e.g., "xai/grok-code-fast-1", "opus").
18    /// When unset, inherits from [llm].model.
19    #[serde(default)]
20    pub model: Option<String>,
21    #[serde(default = "default_round_size")]
22    pub round_size: usize,
23    #[serde(default = "default_default_tag")]
24    pub default_tag: Option<String>,
25    /// Use direct API instead of CLI harnesses.
26    /// Requires `direct-api` Cargo feature.
27    #[serde(default)]
28    pub use_direct_api: bool,
29    /// Provider for direct API mode: anthropic, openai, xai, openrouter, opencode-zen
30    #[serde(default = "default_direct_api_provider")]
31    pub direct_api_provider: String,
32}
33
34fn default_swarm_harness() -> String {
35    "rho".to_string()
36}
37
38fn default_round_size() -> usize {
39    5
40}
41
42fn default_default_tag() -> Option<String> {
43    None
44}
45
46fn default_direct_api_provider() -> String {
47    std::env::var("SCUD_DIRECT_API_PROVIDER").unwrap_or_else(|_| "anthropic".to_string())
48}
49
50impl Default for SwarmConfig {
51    fn default() -> Self {
52        SwarmConfig {
53            harness: default_swarm_harness(),
54            model: std::env::var("SCUD_SWARM_MODEL").ok(),
55            round_size: default_round_size(),
56            default_tag: default_default_tag(),
57            use_direct_api: false,
58            direct_api_provider: default_direct_api_provider(),
59        }
60    }
61}
62
63#[derive(Debug, Clone, Serialize, Deserialize)]
64pub struct LLMConfig {
65    /// Default provider
66    #[serde(default = "default_provider")]
67    pub provider: String,
68    /// Default model (used when no tier specified)
69    #[serde(default = "default_model")]
70    pub model: String,
71    /// Smart provider for validation/analysis tasks
72    #[serde(default = "default_smart_provider")]
73    pub smart_provider: String,
74    /// Smart model for validation/analysis tasks (large context)
75    #[serde(default = "default_smart_model")]
76    pub smart_model: String,
77    /// Fast provider for generation tasks
78    #[serde(default = "default_fast_provider")]
79    pub fast_provider: String,
80    /// Fast model for generation tasks
81    #[serde(default = "default_fast_model")]
82    pub fast_model: String,
83    /// Max tokens for LLM requests
84    #[serde(default = "default_max_tokens")]
85    pub max_tokens: u32,
86}
87
88fn default_provider() -> String {
89    std::env::var("SCUD_PROVIDER").unwrap_or_else(|_| "xai".to_string())
90}
91
92fn default_model() -> String {
93    std::env::var("SCUD_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
94}
95
96fn default_smart_provider() -> String {
97    std::env::var("SCUD_SMART_PROVIDER").unwrap_or_else(|_| "claude-cli".to_string())
98}
99
100fn default_smart_model() -> String {
101    std::env::var("SCUD_SMART_MODEL").unwrap_or_else(|_| "opus".to_string())
102}
103
104fn default_fast_provider() -> String {
105    std::env::var("SCUD_FAST_PROVIDER").unwrap_or_else(|_| "xai".to_string())
106}
107
108fn default_fast_model() -> String {
109    std::env::var("SCUD_FAST_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
110}
111
112fn default_max_tokens() -> u32 {
113    std::env::var("SCUD_MAX_TOKENS")
114        .ok()
115        .and_then(|s| s.parse().ok())
116        .unwrap_or(16000)
117}
118
119impl Default for Config {
120    fn default() -> Self {
121        Config {
122            llm: LLMConfig {
123                provider: default_provider(),
124                model: default_model(),
125                smart_provider: default_smart_provider(),
126                smart_model: default_smart_model(),
127                fast_provider: default_fast_provider(),
128                fast_model: default_fast_model(),
129                max_tokens: default_max_tokens(),
130            },
131            swarm: SwarmConfig::default(),
132        }
133    }
134}
135
136impl Config {
137    /// Resolve the swarm model: swarm.model > llm.model
138    pub fn swarm_model(&self) -> &str {
139        self.swarm.model.as_deref().unwrap_or(&self.llm.model)
140    }
141
142    pub fn load(path: &Path) -> Result<Self> {
143        let content = fs::read_to_string(path)
144            .with_context(|| format!("Failed to read config file: {}", path.display()))?;
145
146        toml::from_str(&content)
147            .with_context(|| format!("Failed to parse config file: {}", path.display()))
148    }
149
150    pub fn save(&self, path: &Path) -> Result<()> {
151        let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
152
153        if let Some(parent) = path.parent() {
154            fs::create_dir_all(parent).with_context(|| {
155                format!("Failed to create config directory: {}", parent.display())
156            })?;
157        }
158
159        fs::write(path, content)
160            .with_context(|| format!("Failed to write config file: {}", path.display()))
161    }
162
163    pub fn api_key_env_var(&self) -> &str {
164        Self::api_key_env_var_for_provider(&self.llm.provider)
165    }
166
167    pub fn api_key_env_var_for_provider(provider: &str) -> &str {
168        match provider {
169            "anthropic" => "ANTHROPIC_API_KEY",
170            "anthropic-oauth" => "NONE", // Uses Claude Code OAuth from Keychain
171            "xai" => "XAI_API_KEY",
172            "openai" => "OPENAI_API_KEY",
173            "openrouter" => "OPENROUTER_API_KEY",
174            "opencode-zen" | "opencode" | "zen" => "OPENCODE_API_KEY",
175            "claude-cli" => "NONE", // Claude CLI doesn't need API key
176            "codex" => "NONE",      // Codex CLI doesn't need API key
177            "cursor" => "NONE",     // Cursor Agent CLI doesn't need API key
178            _ => "API_KEY",
179        }
180    }
181
182    pub fn requires_api_key(&self) -> bool {
183        let providers = [
184            &self.llm.provider,
185            &self.llm.smart_provider,
186            &self.llm.fast_provider,
187        ];
188        providers.iter().any(|p| {
189            !matches!(
190                p.as_str(),
191                "claude-cli" | "codex" | "cursor" | "anthropic-oauth"
192            )
193        })
194    }
195
196    pub fn api_endpoint(&self) -> &str {
197        match self.llm.provider.as_str() {
198            "anthropic" => "https://api.anthropic.com/v1/messages",
199            "xai" => "https://api.x.ai/v1/chat/completions",
200            "openai" => "https://api.openai.com/v1/chat/completions",
201            "openrouter" => "https://openrouter.ai/api/v1/chat/completions",
202            _ => "https://api.anthropic.com/v1/messages",
203        }
204    }
205
206    pub fn default_model_for_provider(provider: &str) -> &str {
207        match provider {
208            "xai" => "xai/grok-code-fast-1",
209            "anthropic" => "claude-sonnet-4-5-20250929",
210            "anthropic-oauth" => "claude-opus-4-6",
211            "openai" => "o3-mini",
212            "openrouter" => "anthropic/claude-sonnet-4.5",
213            "claude-cli" => "sonnet", // Claude CLI model names: sonnet, opus, haiku
214            "codex" => "gpt-5.1",     // Codex CLI default model
215            "cursor" => "claude-4-sonnet", // Cursor Agent default model
216            _ => "xai/grok-code-fast-1",
217        }
218    }
219
220    /// Get suggested models for a provider (for display in init)
221    pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
222        match provider {
223            "xai" => vec![
224                "xai/grok-code-fast-1",
225                "xai/grok-4-1-fast",
226                "xai/grok-4.20-experimental-beta-0304-reasoning",
227                "xai/grok-4.20-experimental-beta-0304-non-reasoning",
228                "xai/grok-4.20-multi-agent-experimental-beta-0304",
229                "xai/grok-4-fast",
230                "xai/grok-3-fast",
231            ],
232            "anthropic" => vec![
233                "claude-sonnet-4-5-20250929",
234                "claude-opus-4-5-20251101",
235                "claude-haiku-4-5-20251001",
236                "claude-opus-4-1-20250805",
237            ],
238            "anthropic-oauth" => vec![
239                "claude-opus-4-6",
240                "claude-sonnet-4-5-20250929",
241                "claude-opus-4-5-20251101",
242                "claude-haiku-4-5-20251001",
243            ],
244            "openai" => vec![
245                "gpt-5.2-high",
246                "gpt-5.1",
247                "gpt-5.1-mini",
248                "o3-mini",
249                "o3",
250                "o4-mini",
251                "gpt-4.1",
252            ],
253            "openrouter" => vec![
254                "anthropic/claude-sonnet-4.5",
255                "anthropic/claude-opus-4.5",
256                "openai/o3-mini",
257                "openai/gpt-4.1",
258                "xai/grok-4-1-fast-reasoning",
259            ],
260            "claude-cli" => vec![
261                "opus",   // Claude Opus 4.5 - smart/reasoning
262                "sonnet", // Claude Sonnet - fast/capable
263                "haiku",  // Claude Haiku - fastest
264            ],
265            "codex" => vec![
266                "gpt-5.2-high", // Smart/reasoning model
267                "gpt-5.1",      // Capable model
268                "gpt-5.1-mini", // Fast model
269                "o3",           // Reasoning model
270                "o3-mini",      // Fast reasoning
271            ],
272            "cursor" => vec![
273                "claude-4-opus",   // Smart/reasoning
274                "claude-4-sonnet", // Balanced
275                "gpt-5",           // OpenAI model
276                "gpt-5.2-high",    // High-capability
277            ],
278            _ => vec![],
279        }
280    }
281
282    /// Get the smart provider (for validation/analysis tasks with large context)
283    pub fn smart_provider(&self) -> &str {
284        &self.llm.smart_provider
285    }
286
287    /// Get the smart model (for validation/analysis tasks with large context)
288    pub fn smart_model(&self) -> &str {
289        &self.llm.smart_model
290    }
291
292    /// Get the fast provider (for generation tasks)
293    pub fn fast_provider(&self) -> &str {
294        &self.llm.fast_provider
295    }
296
297    /// Get the fast model (for generation tasks)
298    pub fn fast_model(&self) -> &str {
299        &self.llm.fast_model
300    }
301}
302
303#[cfg(test)]
304mod tests {
305    use super::*;
306    use tempfile::TempDir;
307
308    #[test]
309    fn test_default_config() {
310        let config = Config::default();
311        // Default provider is xai with xai/grok-code-fast-1 for speed
312        assert_eq!(config.llm.provider, "xai");
313        assert_eq!(config.llm.model, "xai/grok-code-fast-1");
314        // Smart tier uses claude-cli with opus
315        assert_eq!(config.llm.smart_provider, "claude-cli");
316        assert_eq!(config.llm.smart_model, "opus");
317        // Fast tier uses xai with xai/grok-code-fast-1
318        assert_eq!(config.llm.fast_provider, "xai");
319        assert_eq!(config.llm.fast_model, "xai/grok-code-fast-1");
320        assert_eq!(config.llm.max_tokens, 16000);
321    }
322
323    #[test]
324    fn test_model_tiers() {
325        let config = Config::default();
326        assert_eq!(config.smart_provider(), "claude-cli");
327        assert_eq!(config.smart_model(), "opus");
328        assert_eq!(config.fast_provider(), "xai");
329        assert_eq!(config.fast_model(), "xai/grok-code-fast-1");
330    }
331
332    #[test]
333    fn test_api_key_env_vars() {
334        let mut config = Config::default();
335
336        config.llm.provider = "anthropic".to_string();
337        assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
338
339        config.llm.provider = "xai".to_string();
340        assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
341
342        config.llm.provider = "openai".to_string();
343        assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
344
345        config.llm.provider = "claude-cli".to_string();
346        config.llm.smart_provider = "claude-cli".to_string();
347        config.llm.fast_provider = "claude-cli".to_string();
348        assert!(!config.requires_api_key());
349    }
350
351    #[test]
352    fn test_api_endpoints() {
353        let mut config = Config::default();
354
355        config.llm.provider = "anthropic".to_string();
356        assert_eq!(
357            config.api_endpoint(),
358            "https://api.anthropic.com/v1/messages"
359        );
360
361        config.llm.provider = "xai".to_string();
362        assert_eq!(
363            config.api_endpoint(),
364            "https://api.x.ai/v1/chat/completions"
365        );
366
367        config.llm.provider = "openai".to_string();
368        assert_eq!(
369            config.api_endpoint(),
370            "https://api.openai.com/v1/chat/completions"
371        );
372    }
373
374    #[test]
375    fn test_save_and_load_config() {
376        let temp_dir = TempDir::new().unwrap();
377        let config_path = temp_dir.path().join("config.toml");
378
379        let config = Config {
380            llm: LLMConfig {
381                provider: "claude-cli".to_string(),
382                model: "sonnet".to_string(),
383                smart_provider: "claude-cli".to_string(),
384                smart_model: "opus".to_string(),
385                fast_provider: "xai".to_string(),
386                fast_model: "haiku".to_string(),
387                max_tokens: 8192,
388            },
389            swarm: SwarmConfig::default(),
390        };
391
392        config.save(&config_path).unwrap();
393        assert!(config_path.exists());
394
395        let loaded = Config::load(&config_path).unwrap();
396        assert_eq!(loaded.llm.provider, "claude-cli");
397        assert_eq!(loaded.llm.model, "sonnet");
398        assert_eq!(loaded.llm.smart_provider, "claude-cli");
399        assert_eq!(loaded.llm.smart_model, "opus");
400        assert_eq!(loaded.llm.fast_provider, "xai");
401        assert_eq!(loaded.llm.fast_model, "haiku");
402        assert_eq!(loaded.llm.max_tokens, 8192);
403    }
404
405    #[test]
406    fn test_default_models() {
407        assert_eq!(
408            Config::default_model_for_provider("xai"),
409            "xai/grok-code-fast-1"
410        );
411        assert_eq!(
412            Config::default_model_for_provider("anthropic"),
413            "claude-sonnet-4-5-20250929"
414        );
415        assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
416        assert_eq!(Config::default_model_for_provider("claude-cli"), "sonnet");
417    }
418
419    #[test]
420    fn test_load_config_without_model_tiers() {
421        // Test backward compatibility - loading a config without smart/fast models
422        let temp_dir = TempDir::new().unwrap();
423        let config_path = temp_dir.path().join("config.toml");
424
425        // Write a config without smart_model and fast_model
426        std::fs::write(
427            &config_path,
428            r#"[llm]
429provider = "xai"
430model = "xai/grok-code-fast-1"
431max_tokens = 4096
432"#,
433        )
434        .unwrap();
435
436        let loaded = Config::load(&config_path).unwrap();
437        assert_eq!(loaded.llm.provider, "xai");
438        assert_eq!(loaded.llm.model, "xai/grok-code-fast-1");
439        // Should use defaults for missing fields
440        assert_eq!(loaded.llm.smart_provider, "claude-cli");
441        assert_eq!(loaded.llm.smart_model, "opus");
442        assert_eq!(loaded.llm.fast_provider, "xai");
443        assert_eq!(loaded.llm.fast_model, "xai/grok-code-fast-1");
444    }
445}