scud/
config.rs

1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::fs;
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8    pub llm: LLMConfig,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct LLMConfig {
13    pub provider: String,
14    pub model: String,
15    #[serde(default)]
16    pub max_tokens: u32,
17}
18
19impl Default for Config {
20    fn default() -> Self {
21        Config {
22            llm: LLMConfig {
23                provider: "xai".to_string(),
24                model: "grok-code-fast-1".to_string(),
25                max_tokens: 4096,
26            },
27        }
28    }
29}
30
31impl Config {
32    pub fn load(path: &Path) -> Result<Self> {
33        let content = fs::read_to_string(path)
34            .with_context(|| format!("Failed to read config file: {}", path.display()))?;
35
36        toml::from_str(&content)
37            .with_context(|| format!("Failed to parse config file: {}", path.display()))
38    }
39
40    pub fn save(&self, path: &Path) -> Result<()> {
41        let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
42
43        if let Some(parent) = path.parent() {
44            fs::create_dir_all(parent).with_context(|| {
45                format!("Failed to create config directory: {}", parent.display())
46            })?;
47        }
48
49        fs::write(path, content)
50            .with_context(|| format!("Failed to write config file: {}", path.display()))
51    }
52
53    pub fn api_key_env_var(&self) -> &str {
54        match self.llm.provider.as_str() {
55            "anthropic" => "ANTHROPIC_API_KEY",
56            "xai" => "XAI_API_KEY",
57            "openai" => "OPENAI_API_KEY",
58            "openrouter" => "OPENROUTER_API_KEY",
59            "claude-cli" => "NONE", // Claude CLI doesn't need API key
60            _ => "API_KEY",
61        }
62    }
63
64    pub fn requires_api_key(&self) -> bool {
65        self.llm.provider != "claude-cli"
66    }
67
68    pub fn api_endpoint(&self) -> &str {
69        match self.llm.provider.as_str() {
70            "anthropic" => "https://api.anthropic.com/v1/messages",
71            "xai" => "https://api.x.ai/v1/chat/completions",
72            "openai" => "https://api.openai.com/v1/chat/completions",
73            "openrouter" => "https://openrouter.ai/api/v1/chat/completions",
74            _ => "https://api.anthropic.com/v1/messages",
75        }
76    }
77
78    pub fn default_model_for_provider(provider: &str) -> &str {
79        match provider {
80            "xai" => "grok-code-fast-1",
81            "anthropic" => "claude-sonnet-4-5-20250929",
82            "openai" => "o3-mini",
83            "openrouter" => "anthropic/claude-sonnet-4.5",
84            "claude-cli" => "sonnet", // Claude CLI model names: sonnet, opus, haiku
85            _ => "grok-code-fast-1",
86        }
87    }
88
89    /// Get suggested models for a provider (for display in init)
90    pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
91        match provider {
92            "xai" => vec![
93                "grok-code-fast-1",
94                "grok-4-1-fast-reasoning",
95                "grok-4-1-fast",
96                "grok-3-fast",
97            ],
98            "anthropic" => vec![
99                "claude-sonnet-4-5-20250929",
100                "claude-opus-4-5-20251101",
101                "claude-haiku-4-5-20251001",
102                "claude-opus-4-1-20250805",
103            ],
104            "openai" => vec![
105                "gpt-5.1",
106                "gpt-5.1-mini",
107                "o3-mini",
108                "o3",
109                "o4-mini",
110                "gpt-4.1",
111            ],
112            "openrouter" => vec![
113                "anthropic/claude-sonnet-4.5",
114                "anthropic/claude-opus-4.5",
115                "openai/o3-mini",
116                "openai/gpt-4.1",
117                "xai/grok-4-1-fast-reasoning",
118            ],
119            _ => vec![],
120        }
121    }
122}
123
124#[cfg(test)]
125mod tests {
126    use super::*;
127    use tempfile::TempDir;
128
129    #[test]
130    fn test_default_config() {
131        let config = Config::default();
132        assert_eq!(config.llm.provider, "xai");
133        assert_eq!(config.llm.model, "grok-code-fast-1");
134        assert_eq!(config.llm.max_tokens, 4096);
135    }
136
137    #[test]
138    fn test_api_key_env_vars() {
139        let mut config = Config::default();
140
141        config.llm.provider = "anthropic".to_string();
142        assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
143
144        config.llm.provider = "xai".to_string();
145        assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
146
147        config.llm.provider = "openai".to_string();
148        assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
149    }
150
151    #[test]
152    fn test_api_endpoints() {
153        let mut config = Config::default();
154
155        config.llm.provider = "anthropic".to_string();
156        assert_eq!(
157            config.api_endpoint(),
158            "https://api.anthropic.com/v1/messages"
159        );
160
161        config.llm.provider = "xai".to_string();
162        assert_eq!(
163            config.api_endpoint(),
164            "https://api.x.ai/v1/chat/completions"
165        );
166
167        config.llm.provider = "openai".to_string();
168        assert_eq!(
169            config.api_endpoint(),
170            "https://api.openai.com/v1/chat/completions"
171        );
172    }
173
174    #[test]
175    fn test_save_and_load_config() {
176        let temp_dir = TempDir::new().unwrap();
177        let config_path = temp_dir.path().join("config.toml");
178
179        let config = Config {
180            llm: LLMConfig {
181                provider: "xai".to_string(),
182                model: "grok-code-fast-1".to_string(),
183                max_tokens: 8192,
184            },
185        };
186
187        config.save(&config_path).unwrap();
188        assert!(config_path.exists());
189
190        let loaded = Config::load(&config_path).unwrap();
191        assert_eq!(loaded.llm.provider, "xai");
192        assert_eq!(loaded.llm.model, "grok-code-fast-1");
193        assert_eq!(loaded.llm.max_tokens, 8192);
194    }
195
196    #[test]
197    fn test_default_models() {
198        assert_eq!(
199            Config::default_model_for_provider("xai"),
200            "grok-code-fast-1"
201        );
202        assert_eq!(
203            Config::default_model_for_provider("anthropic"),
204            "claude-sonnet-4-5-20250929"
205        );
206        assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
207    }
208}