use crate::agent::context_manager::ContextConfig;
use crate::llm::retry::RetryConfig;
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::{Path, PathBuf};
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(default)]
pub struct Config {
pub provider: ProviderConfig,
pub model: String,
pub project_dir: Option<PathBuf>,
pub sandbox: SandboxConfig,
pub agent: AgentConfig,
#[serde(default)]
pub lsp: LspConfig,
#[serde(default)]
pub mcp_servers: Vec<McpServerConfig>,
#[serde(default)]
pub custom_tools: Vec<CustomToolConfig>,
#[serde(default)]
pub permissions: Vec<PermissionRule>,
#[serde(default)]
pub formatters: std::collections::HashMap<String, String>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(default)]
pub struct ProviderConfig {
pub api_base: String,
pub api_key: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(default)]
pub struct SandboxConfig {
pub enabled: bool,
pub sbox_path: Option<String>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(default)]
pub struct AgentConfig {
pub max_iterations: u32,
pub max_tool_calls_per_response: u32,
pub max_auto_continues: u32,
pub retry: RetryConfig,
pub context: ContextConfig,
pub compact_mode: bool,
}
#[derive(Debug, Clone, Default)]
pub struct ConfigOverrides {
pub api_key: Option<String>,
pub api_base: Option<String>,
pub model: Option<String>,
pub project_dir: Option<PathBuf>,
pub no_sandbox: bool,
pub compact: bool,
}
impl Default for Config {
fn default() -> Self {
Config {
provider: ProviderConfig {
api_base: "https://api.openai.com/v1".to_string(),
api_key: String::new(),
},
model: "gpt-4o".to_string(),
project_dir: None,
sandbox: SandboxConfig {
enabled: true,
sbox_path: None,
},
agent: AgentConfig {
max_iterations: 25,
max_tool_calls_per_response: 10,
max_auto_continues: 20,
retry: RetryConfig::default(),
context: ContextConfig::default(),
compact_mode: false,
},
lsp: LspConfig::default(),
mcp_servers: Vec::new(),
custom_tools: Vec::new(),
permissions: Vec::new(),
formatters: std::collections::HashMap::new(),
}
}
}
impl Default for ProviderConfig {
fn default() -> Self {
ProviderConfig {
api_base: "https://api.openai.com/v1".to_string(),
api_key: String::new(),
}
}
}
impl Default for SandboxConfig {
fn default() -> Self {
SandboxConfig {
enabled: true,
sbox_path: None,
}
}
}
impl Default for AgentConfig {
fn default() -> Self {
AgentConfig {
max_iterations: 25,
max_tool_calls_per_response: 10,
max_auto_continues: 20,
retry: RetryConfig::default(),
context: ContextConfig::default(),
compact_mode: false,
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(default)]
pub struct McpServerConfig {
pub name: String,
pub command: String,
pub args: Vec<String>,
pub env: std::collections::HashMap<String, String>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(default)]
pub struct CustomToolConfig {
pub name: String,
pub description: String,
pub command: String,
pub parameters: serde_json::Value,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(default)]
pub struct PermissionRule {
pub tool: String,
pub confirm: bool,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(default)]
pub struct LspConfig {
pub enabled: bool,
pub server_command: Option<String>,
pub args: Vec<String>,
}
impl Config {
#[allow(dead_code)]
pub fn load(overrides: &ConfigOverrides) -> Result<Self> {
Self::load_from_path(None, overrides)
}
pub fn load_from_path(path: Option<&Path>, overrides: &ConfigOverrides) -> Result<Self> {
let mut config = Config::default();
let config_path = if let Some(p) = path {
p.to_path_buf()
} else {
let base = dirs::config_dir().context("Could not determine config directory")?;
base.join("xcode").join("config.json")
};
if config_path.exists() {
let content = fs::read_to_string(&config_path)
.with_context(|| format!("Failed to read config file: {:?}", config_path))?;
let file_config: Config =
serde_json::from_str(&content).context("Failed to parse config JSON")?;
config = file_config;
} else {
if let Some(parent) = config_path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("Failed to create config directory: {:?}", parent))?;
}
let default_json = serde_json::to_string_pretty(&config)
.context("Failed to serialize default config")?;
fs::write(&config_path, default_json)
.with_context(|| format!("Failed to write default config: {:?}", config_path))?;
}
if let Ok(api_key) = std::env::var("XCODE_API_KEY") {
config.provider.api_key = api_key;
}
if let Ok(api_base) = std::env::var("XCODE_API_BASE") {
config.provider.api_base = api_base;
}
if let Ok(model) = std::env::var("XCODE_MODEL") {
config.model = model;
}
if let Some(api_key) = &overrides.api_key {
config.provider.api_key = api_key.clone();
}
if let Some(api_base) = &overrides.api_base {
config.provider.api_base = api_base.clone();
}
if let Some(model) = &overrides.model {
config.model = model.clone();
}
if let Some(project_dir) = &overrides.project_dir {
config.project_dir = Some(project_dir.clone());
}
if overrides.no_sandbox {
config.sandbox.enabled = false;
}
if overrides.compact {
config.agent.compact_mode = true;
}
Ok(config)
}
pub fn save_provider(api_base: &str, api_key: &str) -> Result<()> {
let base = dirs::config_dir().context("Could not determine config directory")?;
let config_path = base.join("xcode").join("config.json");
let mut config = if config_path.exists() {
let content = fs::read_to_string(&config_path)
.with_context(|| format!("Failed to read config file: {:?}", config_path))?;
serde_json::from_str::<Config>(&content).unwrap_or_default()
} else {
Config::default()
};
config.provider.api_base = api_base.to_string();
config.provider.api_key = api_key.to_string();
if let Some(parent) = config_path.parent() {
fs::create_dir_all(parent)?;
}
let json = serde_json::to_string_pretty(&config).context("Failed to serialize config")?;
fs::write(&config_path, json)
.with_context(|| format!("Failed to write config file: {:?}", config_path))?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::Mutex;
static TEST_LOCK: Mutex<()> = Mutex::new(());
#[test]
fn test_default_config() {
let config = Config::default();
assert_eq!(config.agent.max_iterations, 25);
assert_eq!(config.agent.max_tool_calls_per_response, 10);
assert!(config.sandbox.enabled);
assert_eq!(config.model, "gpt-4o");
assert_eq!(config.provider.api_base, "https://api.openai.com/v1");
assert_eq!(config.agent.retry.max_retries, 5);
assert_eq!(config.agent.retry.initial_delay_ms, 1000);
}
#[test]
fn test_load_from_file() {
let _lock = TEST_LOCK.lock().unwrap();
std::env::remove_var("XCODE_API_KEY");
std::env::remove_var("XCODE_API_BASE");
std::env::remove_var("XCODE_MODEL");
let temp_dir = tempfile::TempDir::new().unwrap();
let config_path = temp_dir.path().join("config.json");
let test_config = Config {
provider: ProviderConfig {
api_base: "https://custom.api.com".to_string(),
api_key: "test_key_123".to_string(),
},
model: "gpt-4-turbo".to_string(),
project_dir: Some(PathBuf::from("/test/project")),
sandbox: SandboxConfig {
enabled: false,
sbox_path: Some("/tmp/sbox".to_string()),
},
agent: AgentConfig {
max_iterations: 50,
max_tool_calls_per_response: 20,
max_auto_continues: 20,
retry: RetryConfig::default(),
context: ContextConfig::default(),
compact_mode: false,
},
lsp: LspConfig::default(),
mcp_servers: vec![],
custom_tools: vec![],
permissions: vec![],
formatters: std::collections::HashMap::new(),
};
let json = serde_json::to_string_pretty(&test_config).unwrap();
fs::write(&config_path, json).unwrap();
let overrides = ConfigOverrides {
api_key: None,
api_base: None,
model: None,
project_dir: None,
no_sandbox: false,
compact: false,
};
let loaded = Config::load_from_path(Some(&config_path), &overrides).unwrap();
assert_eq!(loaded.provider.api_base, "https://custom.api.com");
assert_eq!(loaded.provider.api_key, "test_key_123");
assert_eq!(loaded.model, "gpt-4-turbo");
assert_eq!(loaded.agent.max_iterations, 50);
assert_eq!(loaded.agent.max_tool_calls_per_response, 20);
assert!(!loaded.sandbox.enabled);
}
#[test]
fn test_env_override() {
let _lock = TEST_LOCK.lock().unwrap();
std::env::remove_var("XCODE_API_KEY");
std::env::remove_var("XCODE_API_BASE");
std::env::remove_var("XCODE_MODEL");
std::env::set_var("XCODE_API_KEY", "env_test_key");
std::env::set_var("XCODE_API_BASE", "https://env.api.com");
std::env::set_var("XCODE_MODEL", "env-model");
let temp_dir = tempfile::TempDir::new().unwrap();
let config_path = temp_dir.path().join("config.json");
let overrides = ConfigOverrides {
api_key: None,
api_base: None,
model: None,
project_dir: None,
no_sandbox: false,
compact: false,
};
let config = Config::load_from_path(Some(&config_path), &overrides).unwrap();
assert_eq!(config.provider.api_key, "env_test_key");
assert_eq!(config.provider.api_base, "https://env.api.com");
assert_eq!(config.model, "env-model");
std::env::remove_var("XCODE_API_KEY");
std::env::remove_var("XCODE_API_BASE");
std::env::remove_var("XCODE_MODEL");
}
#[test]
fn test_cli_override_takes_precedence() {
let _lock = TEST_LOCK.lock().unwrap();
std::env::remove_var("XCODE_API_KEY");
std::env::remove_var("XCODE_API_BASE");
std::env::remove_var("XCODE_MODEL");
std::env::set_var("XCODE_API_KEY", "env_key");
std::env::set_var("XCODE_MODEL", "env-model");
let temp_dir = tempfile::TempDir::new().unwrap();
let config_path = temp_dir.path().join("config.json");
let overrides = ConfigOverrides {
api_key: Some("cli_key".to_string()),
api_base: Some("https://cli.api.com".to_string()),
model: Some("cli-model".to_string()),
project_dir: None,
no_sandbox: false,
compact: false,
};
let config = Config::load_from_path(Some(&config_path), &overrides).unwrap();
assert_eq!(config.provider.api_key, "cli_key");
assert_eq!(config.provider.api_base, "https://cli.api.com");
assert_eq!(config.model, "cli-model");
std::env::remove_var("XCODE_API_KEY");
std::env::remove_var("XCODE_MODEL");
}
#[test]
fn test_sandbox_disable_override() {
let _lock = TEST_LOCK.lock().unwrap();
std::env::remove_var("XCODE_API_KEY");
std::env::remove_var("XCODE_API_BASE");
std::env::remove_var("XCODE_MODEL");
let temp_dir = tempfile::TempDir::new().unwrap();
let config_path = temp_dir.path().join("config.json");
let overrides = ConfigOverrides {
api_key: None,
api_base: None,
model: None,
project_dir: None,
no_sandbox: true,
compact: false,
};
let config = Config::load_from_path(Some(&config_path), &overrides).unwrap();
assert!(!config.sandbox.enabled);
}
#[test]
fn test_backwards_compatible_config() {
let _lock = TEST_LOCK.lock().unwrap();
std::env::remove_var("XCODE_API_KEY");
std::env::remove_var("XCODE_API_BASE");
std::env::remove_var("XCODE_MODEL");
let temp_dir = tempfile::TempDir::new().unwrap();
let config_path = temp_dir.path().join("config.json");
let old_config_json = r#"{
"provider": {
"api_base": "https://api.openai.com/v1",
"api_key": "old-key"
},
"model": "gpt-4o",
"sandbox": {
"enabled": false
},
"agent": {
"max_iterations": 25,
"max_tool_calls_per_response": 10
}
}"#;
fs::write(&config_path, old_config_json).unwrap();
let overrides = ConfigOverrides {
api_key: None,
api_base: None,
model: None,
project_dir: None,
no_sandbox: false,
compact: false,
};
let loaded = Config::load_from_path(Some(&config_path), &overrides).unwrap();
assert_eq!(loaded.provider.api_key, "old-key");
assert_eq!(loaded.agent.max_iterations, 25);
assert!(!loaded.sandbox.enabled);
assert_eq!(loaded.agent.max_auto_continues, 20);
assert_eq!(loaded.agent.retry.max_retries, 5);
}
#[test]
fn test_mcp_config_parsing() {
let _lock = TEST_LOCK.lock().unwrap();
std::env::remove_var("XCODE_API_KEY");
std::env::remove_var("XCODE_API_BASE");
std::env::remove_var("XCODE_MODEL");
let temp_dir = tempfile::TempDir::new().unwrap();
let config_path = temp_dir.path().join("config.json");
let json = r#"{
"provider": {
"api_base": "https://api.openai.com/v1",
"api_key": "test-key"
},
"model": "gpt-4o",
"mcp_servers": [
{
"name": "filesystem",
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"],
"env": { "MCP_FS_ROOT": "/tmp" }
}
]
}"#;
fs::write(&config_path, json).unwrap();
let overrides = ConfigOverrides {
api_key: None,
api_base: None,
model: None,
project_dir: None,
no_sandbox: false,
compact: false,
};
let cfg = Config::load_from_path(Some(&config_path), &overrides).unwrap();
assert_eq!(cfg.mcp_servers.len(), 1);
let srv = &cfg.mcp_servers[0];
assert_eq!(srv.name, "filesystem");
assert_eq!(srv.command, "npx");
assert_eq!(
srv.args,
vec!["-y", "@modelcontextprotocol/server-filesystem", "/tmp"]
);
assert_eq!(srv.env.get("MCP_FS_ROOT").map(String::as_str), Some("/tmp"));
}
#[test]
fn test_mcp_servers_defaults_to_empty() {
let _lock = TEST_LOCK.lock().unwrap();
std::env::remove_var("XCODE_API_KEY");
std::env::remove_var("XCODE_API_BASE");
std::env::remove_var("XCODE_MODEL");
let temp_dir = tempfile::TempDir::new().unwrap();
let config_path = temp_dir.path().join("config.json");
let json = r#"{
"provider": { "api_base": "https://api.openai.com/v1", "api_key": "k" },
"model": "gpt-4o"
}"#;
fs::write(&config_path, json).unwrap();
let overrides = ConfigOverrides {
api_key: None,
api_base: None,
model: None,
project_dir: None,
no_sandbox: false,
compact: false,
};
let cfg = Config::load_from_path(Some(&config_path), &overrides).unwrap();
assert!(
cfg.mcp_servers.is_empty(),
"mcp_servers should default to empty vec"
);
}
}