1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::fs;
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8 pub llm: LLMConfig,
9 #[serde(default)]
10 pub swarm: SwarmConfig,
11}
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
14pub struct SwarmConfig {
15 #[serde(default = "default_swarm_harness")]
16 pub harness: String,
17 #[serde(default = "default_round_size")]
18 pub round_size: usize,
19 #[serde(default = "default_default_tag")]
20 pub default_tag: Option<String>,
21}
22
23fn default_swarm_harness() -> String {
24 "claude".to_string()
25}
26
27fn default_round_size() -> usize {
28 5
29}
30
31fn default_default_tag() -> Option<String> {
32 None
33}
34
35impl Default for SwarmConfig {
36 fn default() -> Self {
37 SwarmConfig {
38 harness: default_swarm_harness(),
39 round_size: default_round_size(),
40 default_tag: default_default_tag(),
41 }
42 }
43}
44
45#[derive(Debug, Clone, Serialize, Deserialize)]
46pub struct LLMConfig {
47 #[serde(default = "default_provider")]
49 pub provider: String,
50 #[serde(default = "default_model")]
52 pub model: String,
53 #[serde(default = "default_smart_provider")]
55 pub smart_provider: String,
56 #[serde(default = "default_smart_model")]
58 pub smart_model: String,
59 #[serde(default = "default_fast_provider")]
61 pub fast_provider: String,
62 #[serde(default = "default_fast_model")]
64 pub fast_model: String,
65 #[serde(default = "default_max_tokens")]
67 pub max_tokens: u32,
68}
69
70fn default_provider() -> String {
71 std::env::var("SCUD_PROVIDER").unwrap_or_else(|_| "xai".to_string())
72}
73
74fn default_model() -> String {
75 std::env::var("SCUD_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
76}
77
78fn default_smart_provider() -> String {
79 std::env::var("SCUD_SMART_PROVIDER").unwrap_or_else(|_| "claude-cli".to_string())
80}
81
82fn default_smart_model() -> String {
83 std::env::var("SCUD_SMART_MODEL").unwrap_or_else(|_| "opus".to_string())
84}
85
86fn default_fast_provider() -> String {
87 std::env::var("SCUD_FAST_PROVIDER").unwrap_or_else(|_| "xai".to_string())
88}
89
90fn default_fast_model() -> String {
91 std::env::var("SCUD_FAST_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
92}
93
94fn default_max_tokens() -> u32 {
95 std::env::var("SCUD_MAX_TOKENS")
96 .ok()
97 .and_then(|s| s.parse().ok())
98 .unwrap_or(16000)
99}
100
101impl Default for Config {
102 fn default() -> Self {
103 Config {
104 llm: LLMConfig {
105 provider: default_provider(),
106 model: default_model(),
107 smart_provider: default_smart_provider(),
108 smart_model: default_smart_model(),
109 fast_provider: default_fast_provider(),
110 fast_model: default_fast_model(),
111 max_tokens: default_max_tokens(),
112 },
113 swarm: SwarmConfig::default(),
114 }
115 }
116}
117
118impl Config {
119 pub fn load(path: &Path) -> Result<Self> {
120 let content = fs::read_to_string(path)
121 .with_context(|| format!("Failed to read config file: {}", path.display()))?;
122
123 toml::from_str(&content)
124 .with_context(|| format!("Failed to parse config file: {}", path.display()))
125 }
126
127 pub fn save(&self, path: &Path) -> Result<()> {
128 let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
129
130 if let Some(parent) = path.parent() {
131 fs::create_dir_all(parent).with_context(|| {
132 format!("Failed to create config directory: {}", parent.display())
133 })?;
134 }
135
136 fs::write(path, content)
137 .with_context(|| format!("Failed to write config file: {}", path.display()))
138 }
139
140 pub fn api_key_env_var(&self) -> &str {
141 Self::api_key_env_var_for_provider(&self.llm.provider)
142 }
143
144 pub fn api_key_env_var_for_provider(provider: &str) -> &str {
145 match provider {
146 "anthropic" => "ANTHROPIC_API_KEY",
147 "xai" => "XAI_API_KEY",
148 "openai" => "OPENAI_API_KEY",
149 "openrouter" => "OPENROUTER_API_KEY",
150 "claude-cli" => "NONE", "codex" => "NONE", _ => "API_KEY",
153 }
154 }
155
156 pub fn requires_api_key(&self) -> bool {
157 let providers = [
158 &self.llm.provider,
159 &self.llm.smart_provider,
160 &self.llm.fast_provider,
161 ];
162 providers
163 .iter()
164 .any(|p| !matches!(p.as_str(), "claude-cli" | "codex"))
165 }
166
167 pub fn api_endpoint(&self) -> &str {
168 match self.llm.provider.as_str() {
169 "anthropic" => "https://api.anthropic.com/v1/messages",
170 "xai" => "https://api.x.ai/v1/chat/completions",
171 "openai" => "https://api.openai.com/v1/chat/completions",
172 "openrouter" => "https://openrouter.ai/api/v1/chat/completions",
173 _ => "https://api.anthropic.com/v1/messages",
174 }
175 }
176
177 pub fn default_model_for_provider(provider: &str) -> &str {
178 match provider {
179 "xai" => "xai/grok-code-fast-1",
180 "anthropic" => "claude-sonnet-4-5-20250929",
181 "openai" => "o3-mini",
182 "openrouter" => "anthropic/claude-sonnet-4.5",
183 "claude-cli" => "sonnet", "codex" => "gpt-5.1", _ => "xai/grok-code-fast-1",
186 }
187 }
188
189 pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
191 match provider {
192 "xai" => vec![
193 "xai/grok-code-fast-1",
194 "xai/grok-4-1-fast",
195 "xai/grok-4-fast",
196 "xai/grok-3-fast",
197 ],
198 "anthropic" => vec![
199 "claude-sonnet-4-5-20250929",
200 "claude-opus-4-5-20251101",
201 "claude-haiku-4-5-20251001",
202 "claude-opus-4-1-20250805",
203 ],
204 "openai" => vec![
205 "gpt-5.2-high",
206 "gpt-5.1",
207 "gpt-5.1-mini",
208 "o3-mini",
209 "o3",
210 "o4-mini",
211 "gpt-4.1",
212 ],
213 "openrouter" => vec![
214 "anthropic/claude-sonnet-4.5",
215 "anthropic/claude-opus-4.5",
216 "openai/o3-mini",
217 "openai/gpt-4.1",
218 "xai/grok-4-1-fast-reasoning",
219 ],
220 "claude-cli" => vec![
221 "opus", "sonnet", "haiku", ],
225 "codex" => vec![
226 "gpt-5.2-high", "gpt-5.1", "gpt-5.1-mini", "o3", "o3-mini", ],
232 _ => vec![],
233 }
234 }
235
236 pub fn smart_provider(&self) -> &str {
238 &self.llm.smart_provider
239 }
240
241 pub fn smart_model(&self) -> &str {
243 &self.llm.smart_model
244 }
245
246 pub fn fast_provider(&self) -> &str {
248 &self.llm.fast_provider
249 }
250
251 pub fn fast_model(&self) -> &str {
253 &self.llm.fast_model
254 }
255}
256
257#[cfg(test)]
258mod tests {
259 use super::*;
260 use tempfile::TempDir;
261
262 #[test]
263 fn test_default_config() {
264 let config = Config::default();
265 assert_eq!(config.llm.provider, "xai");
267 assert_eq!(config.llm.model, "xai/grok-code-fast-1");
268 assert_eq!(config.llm.smart_provider, "claude-cli");
270 assert_eq!(config.llm.smart_model, "opus");
271 assert_eq!(config.llm.fast_provider, "xai");
273 assert_eq!(config.llm.fast_model, "xai/grok-code-fast-1");
274 assert_eq!(config.llm.max_tokens, 16000);
275 }
276
277 #[test]
278 fn test_model_tiers() {
279 let config = Config::default();
280 assert_eq!(config.smart_provider(), "claude-cli");
281 assert_eq!(config.smart_model(), "opus");
282 assert_eq!(config.fast_provider(), "xai");
283 assert_eq!(config.fast_model(), "xai/grok-code-fast-1");
284 }
285
286 #[test]
287 fn test_api_key_env_vars() {
288 let mut config = Config::default();
289
290 config.llm.provider = "anthropic".to_string();
291 assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
292
293 config.llm.provider = "xai".to_string();
294 assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
295
296 config.llm.provider = "openai".to_string();
297 assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
298
299 config.llm.provider = "claude-cli".to_string();
300 config.llm.smart_provider = "claude-cli".to_string();
301 config.llm.fast_provider = "claude-cli".to_string();
302 assert!(!config.requires_api_key());
303 }
304
305 #[test]
306 fn test_api_endpoints() {
307 let mut config = Config::default();
308
309 config.llm.provider = "anthropic".to_string();
310 assert_eq!(
311 config.api_endpoint(),
312 "https://api.anthropic.com/v1/messages"
313 );
314
315 config.llm.provider = "xai".to_string();
316 assert_eq!(
317 config.api_endpoint(),
318 "https://api.x.ai/v1/chat/completions"
319 );
320
321 config.llm.provider = "openai".to_string();
322 assert_eq!(
323 config.api_endpoint(),
324 "https://api.openai.com/v1/chat/completions"
325 );
326 }
327
328 #[test]
329 fn test_save_and_load_config() {
330 let temp_dir = TempDir::new().unwrap();
331 let config_path = temp_dir.path().join("config.toml");
332
333 let config = Config {
334 llm: LLMConfig {
335 provider: "claude-cli".to_string(),
336 model: "sonnet".to_string(),
337 smart_provider: "claude-cli".to_string(),
338 smart_model: "opus".to_string(),
339 fast_provider: "xai".to_string(),
340 fast_model: "haiku".to_string(),
341 max_tokens: 8192,
342 },
343 swarm: SwarmConfig::default(),
344 };
345
346 config.save(&config_path).unwrap();
347 assert!(config_path.exists());
348
349 let loaded = Config::load(&config_path).unwrap();
350 assert_eq!(loaded.llm.provider, "claude-cli");
351 assert_eq!(loaded.llm.model, "sonnet");
352 assert_eq!(loaded.llm.smart_provider, "claude-cli");
353 assert_eq!(loaded.llm.smart_model, "opus");
354 assert_eq!(loaded.llm.fast_provider, "xai");
355 assert_eq!(loaded.llm.fast_model, "haiku");
356 assert_eq!(loaded.llm.max_tokens, 8192);
357 }
358
359 #[test]
360 fn test_default_models() {
361 assert_eq!(
362 Config::default_model_for_provider("xai"),
363 "xai/grok-code-fast-1"
364 );
365 assert_eq!(
366 Config::default_model_for_provider("anthropic"),
367 "claude-sonnet-4-5-20250929"
368 );
369 assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
370 assert_eq!(Config::default_model_for_provider("claude-cli"), "sonnet");
371 }
372
373 #[test]
374 fn test_load_config_without_model_tiers() {
375 let temp_dir = TempDir::new().unwrap();
377 let config_path = temp_dir.path().join("config.toml");
378
379 std::fs::write(
381 &config_path,
382 r#"[llm]
383provider = "xai"
384model = "xai/grok-code-fast-1"
385max_tokens = 4096
386"#,
387 )
388 .unwrap();
389
390 let loaded = Config::load(&config_path).unwrap();
391 assert_eq!(loaded.llm.provider, "xai");
392 assert_eq!(loaded.llm.model, "xai/grok-code-fast-1");
393 assert_eq!(loaded.llm.smart_provider, "claude-cli");
395 assert_eq!(loaded.llm.smart_model, "opus");
396 assert_eq!(loaded.llm.fast_provider, "xai");
397 assert_eq!(loaded.llm.fast_model, "xai/grok-code-fast-1");
398 }
399}