use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
pub llm: LLMConfig,
#[serde(default)]
pub swarm: SwarmConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SwarmConfig {
#[serde(default = "default_swarm_harness")]
pub harness: String,
#[serde(default)]
pub model: Option<String>,
#[serde(default = "default_round_size")]
pub round_size: usize,
#[serde(default = "default_default_tag")]
pub default_tag: Option<String>,
#[serde(default)]
pub use_direct_api: bool,
#[serde(default = "default_direct_api_provider")]
pub direct_api_provider: String,
}
fn default_swarm_harness() -> String {
"rho".to_string()
}
fn default_round_size() -> usize {
5
}
fn default_default_tag() -> Option<String> {
None
}
fn default_direct_api_provider() -> String {
std::env::var("SCUD_DIRECT_API_PROVIDER").unwrap_or_else(|_| "anthropic".to_string())
}
impl Default for SwarmConfig {
fn default() -> Self {
SwarmConfig {
harness: default_swarm_harness(),
model: std::env::var("SCUD_SWARM_MODEL").ok(),
round_size: default_round_size(),
default_tag: default_default_tag(),
use_direct_api: false,
direct_api_provider: default_direct_api_provider(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LLMConfig {
#[serde(default = "default_provider")]
pub provider: String,
#[serde(default = "default_model")]
pub model: String,
#[serde(default = "default_smart_provider")]
pub smart_provider: String,
#[serde(default = "default_smart_model")]
pub smart_model: String,
#[serde(default = "default_fast_provider")]
pub fast_provider: String,
#[serde(default = "default_fast_model")]
pub fast_model: String,
#[serde(default = "default_max_tokens")]
pub max_tokens: u32,
}
fn default_provider() -> String {
std::env::var("SCUD_PROVIDER").unwrap_or_else(|_| "xai".to_string())
}
fn default_model() -> String {
std::env::var("SCUD_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
}
fn default_smart_provider() -> String {
std::env::var("SCUD_SMART_PROVIDER").unwrap_or_else(|_| "claude-cli".to_string())
}
fn default_smart_model() -> String {
std::env::var("SCUD_SMART_MODEL").unwrap_or_else(|_| "opus".to_string())
}
fn default_fast_provider() -> String {
std::env::var("SCUD_FAST_PROVIDER").unwrap_or_else(|_| "xai".to_string())
}
fn default_fast_model() -> String {
std::env::var("SCUD_FAST_MODEL").unwrap_or_else(|_| "xai/grok-code-fast-1".to_string())
}
fn default_max_tokens() -> u32 {
std::env::var("SCUD_MAX_TOKENS")
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or(16000)
}
impl Default for Config {
fn default() -> Self {
Config {
llm: LLMConfig {
provider: default_provider(),
model: default_model(),
smart_provider: default_smart_provider(),
smart_model: default_smart_model(),
fast_provider: default_fast_provider(),
fast_model: default_fast_model(),
max_tokens: default_max_tokens(),
},
swarm: SwarmConfig::default(),
}
}
}
impl Config {
pub fn swarm_model(&self) -> &str {
self.swarm.model.as_deref().unwrap_or(&self.llm.model)
}
pub fn load(path: &Path) -> Result<Self> {
let content = fs::read_to_string(path)
.with_context(|| format!("Failed to read config file: {}", path.display()))?;
toml::from_str(&content)
.with_context(|| format!("Failed to parse config file: {}", path.display()))
}
pub fn save(&self, path: &Path) -> Result<()> {
let content = toml::to_string_pretty(self).context("Failed to serialize config to TOML")?;
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).with_context(|| {
format!("Failed to create config directory: {}", parent.display())
})?;
}
fs::write(path, content)
.with_context(|| format!("Failed to write config file: {}", path.display()))
}
pub fn api_key_env_var(&self) -> &str {
Self::api_key_env_var_for_provider(&self.llm.provider)
}
pub fn api_key_env_var_for_provider(provider: &str) -> &str {
match provider {
"anthropic" => "ANTHROPIC_API_KEY",
"anthropic-oauth" => "NONE", "xai" => "XAI_API_KEY",
"openai" => "OPENAI_API_KEY",
"openrouter" => "OPENROUTER_API_KEY",
"opencode-zen" | "opencode" | "zen" => "OPENCODE_API_KEY",
"claude-cli" => "NONE", "codex" => "NONE", "cursor" => "NONE", _ => "API_KEY",
}
}
pub fn requires_api_key(&self) -> bool {
let providers = [
&self.llm.provider,
&self.llm.smart_provider,
&self.llm.fast_provider,
];
providers.iter().any(|p| {
!matches!(
p.as_str(),
"claude-cli" | "codex" | "cursor" | "anthropic-oauth"
)
})
}
pub fn api_endpoint(&self) -> &str {
match self.llm.provider.as_str() {
"anthropic" => "https://api.anthropic.com/v1/messages",
"xai" => "https://api.x.ai/v1/chat/completions",
"openai" => "https://api.openai.com/v1/chat/completions",
"openrouter" => "https://openrouter.ai/api/v1/chat/completions",
_ => "https://api.anthropic.com/v1/messages",
}
}
pub fn default_model_for_provider(provider: &str) -> &str {
match provider {
"xai" => "xai/grok-code-fast-1",
"anthropic" => "claude-sonnet-4-5-20250929",
"anthropic-oauth" => "claude-opus-4-6",
"openai" => "o3-mini",
"openrouter" => "anthropic/claude-sonnet-4.5",
"claude-cli" => "sonnet", "codex" => "gpt-5.1", "cursor" => "claude-4-sonnet", _ => "xai/grok-code-fast-1",
}
}
pub fn suggested_models_for_provider(provider: &str) -> Vec<&str> {
match provider {
"xai" => vec![
"xai/grok-code-fast-1",
"xai/grok-4-1-fast",
"xai/grok-4.20-experimental-beta-0304-reasoning",
"xai/grok-4.20-experimental-beta-0304-non-reasoning",
"xai/grok-4.20-multi-agent-experimental-beta-0304",
"xai/grok-4-fast",
"xai/grok-3-fast",
],
"anthropic" => vec![
"claude-sonnet-4-5-20250929",
"claude-opus-4-5-20251101",
"claude-haiku-4-5-20251001",
"claude-opus-4-1-20250805",
],
"anthropic-oauth" => vec![
"claude-opus-4-6",
"claude-sonnet-4-5-20250929",
"claude-opus-4-5-20251101",
"claude-haiku-4-5-20251001",
],
"openai" => vec![
"gpt-5.2-high",
"gpt-5.1",
"gpt-5.1-mini",
"o3-mini",
"o3",
"o4-mini",
"gpt-4.1",
],
"openrouter" => vec![
"anthropic/claude-sonnet-4.5",
"anthropic/claude-opus-4.5",
"openai/o3-mini",
"openai/gpt-4.1",
"xai/grok-4-1-fast-reasoning",
],
"claude-cli" => vec![
"opus", "sonnet", "haiku", ],
"codex" => vec![
"gpt-5.2-high", "gpt-5.1", "gpt-5.1-mini", "o3", "o3-mini", ],
"cursor" => vec![
"claude-4-opus", "claude-4-sonnet", "gpt-5", "gpt-5.2-high", ],
_ => vec![],
}
}
pub fn smart_provider(&self) -> &str {
&self.llm.smart_provider
}
pub fn smart_model(&self) -> &str {
&self.llm.smart_model
}
pub fn fast_provider(&self) -> &str {
&self.llm.fast_provider
}
pub fn fast_model(&self) -> &str {
&self.llm.fast_model
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[test]
fn test_default_config() {
let config = Config::default();
assert_eq!(config.llm.provider, "xai");
assert_eq!(config.llm.model, "xai/grok-code-fast-1");
assert_eq!(config.llm.smart_provider, "claude-cli");
assert_eq!(config.llm.smart_model, "opus");
assert_eq!(config.llm.fast_provider, "xai");
assert_eq!(config.llm.fast_model, "xai/grok-code-fast-1");
assert_eq!(config.llm.max_tokens, 16000);
}
#[test]
fn test_model_tiers() {
let config = Config::default();
assert_eq!(config.smart_provider(), "claude-cli");
assert_eq!(config.smart_model(), "opus");
assert_eq!(config.fast_provider(), "xai");
assert_eq!(config.fast_model(), "xai/grok-code-fast-1");
}
#[test]
fn test_api_key_env_vars() {
let mut config = Config::default();
config.llm.provider = "anthropic".to_string();
assert_eq!(config.api_key_env_var(), "ANTHROPIC_API_KEY");
config.llm.provider = "xai".to_string();
assert_eq!(config.api_key_env_var(), "XAI_API_KEY");
config.llm.provider = "openai".to_string();
assert_eq!(config.api_key_env_var(), "OPENAI_API_KEY");
config.llm.provider = "claude-cli".to_string();
config.llm.smart_provider = "claude-cli".to_string();
config.llm.fast_provider = "claude-cli".to_string();
assert!(!config.requires_api_key());
}
#[test]
fn test_api_endpoints() {
let mut config = Config::default();
config.llm.provider = "anthropic".to_string();
assert_eq!(
config.api_endpoint(),
"https://api.anthropic.com/v1/messages"
);
config.llm.provider = "xai".to_string();
assert_eq!(
config.api_endpoint(),
"https://api.x.ai/v1/chat/completions"
);
config.llm.provider = "openai".to_string();
assert_eq!(
config.api_endpoint(),
"https://api.openai.com/v1/chat/completions"
);
}
#[test]
fn test_save_and_load_config() {
let temp_dir = TempDir::new().unwrap();
let config_path = temp_dir.path().join("config.toml");
let config = Config {
llm: LLMConfig {
provider: "claude-cli".to_string(),
model: "sonnet".to_string(),
smart_provider: "claude-cli".to_string(),
smart_model: "opus".to_string(),
fast_provider: "xai".to_string(),
fast_model: "haiku".to_string(),
max_tokens: 8192,
},
swarm: SwarmConfig::default(),
};
config.save(&config_path).unwrap();
assert!(config_path.exists());
let loaded = Config::load(&config_path).unwrap();
assert_eq!(loaded.llm.provider, "claude-cli");
assert_eq!(loaded.llm.model, "sonnet");
assert_eq!(loaded.llm.smart_provider, "claude-cli");
assert_eq!(loaded.llm.smart_model, "opus");
assert_eq!(loaded.llm.fast_provider, "xai");
assert_eq!(loaded.llm.fast_model, "haiku");
assert_eq!(loaded.llm.max_tokens, 8192);
}
#[test]
fn test_default_models() {
assert_eq!(
Config::default_model_for_provider("xai"),
"xai/grok-code-fast-1"
);
assert_eq!(
Config::default_model_for_provider("anthropic"),
"claude-sonnet-4-5-20250929"
);
assert_eq!(Config::default_model_for_provider("openai"), "o3-mini");
assert_eq!(Config::default_model_for_provider("claude-cli"), "sonnet");
}
#[test]
fn test_load_config_without_model_tiers() {
let temp_dir = TempDir::new().unwrap();
let config_path = temp_dir.path().join("config.toml");
std::fs::write(
&config_path,
r#"[llm]
provider = "xai"
model = "xai/grok-code-fast-1"
max_tokens = 4096
"#,
)
.unwrap();
let loaded = Config::load(&config_path).unwrap();
assert_eq!(loaded.llm.provider, "xai");
assert_eq!(loaded.llm.model, "xai/grok-code-fast-1");
assert_eq!(loaded.llm.smart_provider, "claude-cli");
assert_eq!(loaded.llm.smart_model, "opus");
assert_eq!(loaded.llm.fast_provider, "xai");
assert_eq!(loaded.llm.fast_model, "xai/grok-code-fast-1");
}
}