agent_core/agent/
config.rs1use std::collections::HashMap;
6use std::fs;
7use std::path::PathBuf;
8
9use crate::controller::{CompactionConfig, LLMProvider, LLMSessionConfig, ToolCompaction};
10use serde::Deserialize;
11
12pub trait AgentConfig {
17 fn config_path(&self) -> &str;
19
20 fn default_system_prompt(&self) -> &str;
22
23 fn log_prefix(&self) -> &str;
25
26 fn name(&self) -> &str;
28}
29
30#[derive(Debug, Deserialize)]
32pub struct ProviderConfig {
33 pub provider: String,
35 pub api_key: String,
37 pub model: String,
39 pub system_prompt: Option<String>,
41}
42
43#[derive(Debug, Deserialize)]
45pub struct ConfigFile {
46 #[serde(default)]
48 pub providers: Vec<ProviderConfig>,
49
50 pub default_provider: Option<String>,
52}
53
54pub struct LLMRegistry {
56 configs: HashMap<String, LLMSessionConfig>,
57 default_provider: Option<String>,
58}
59
60impl LLMRegistry {
61 pub fn new() -> Self {
63 Self {
64 configs: HashMap::new(),
65 default_provider: None,
66 }
67 }
68
69 pub fn load_from_file(path: &PathBuf, default_system_prompt: &str) -> Result<Self, ConfigError> {
71 let content = fs::read_to_string(path).map_err(|e| ConfigError::ReadError {
72 path: path.display().to_string(),
73 source: e.to_string(),
74 })?;
75
76 let config_file: ConfigFile =
77 serde_yaml::from_str(&content).map_err(|e| ConfigError::ParseError {
78 path: path.display().to_string(),
79 source: e.to_string(),
80 })?;
81
82 let mut registry = Self::new();
83 registry.default_provider = config_file.default_provider;
84
85 for provider_config in config_file.providers {
86 let session_config = Self::create_session_config(&provider_config, default_system_prompt)?;
87 registry
88 .configs
89 .insert(provider_config.provider.clone(), session_config);
90
91 if registry.default_provider.is_none() {
93 registry.default_provider = Some(provider_config.provider);
94 }
95 }
96
97 Ok(registry)
98 }
99
100 fn create_session_config(config: &ProviderConfig, default_system_prompt: &str) -> Result<LLMSessionConfig, ConfigError> {
102 let provider = match config.provider.as_str() {
103 "anthropic" => LLMProvider::Anthropic,
104 "openai" => LLMProvider::OpenAI,
105 other => {
106 return Err(ConfigError::UnknownProvider {
107 provider: other.to_string(),
108 })
109 }
110 };
111
112 let mut session_config = match provider {
113 LLMProvider::Anthropic => {
114 LLMSessionConfig::anthropic(&config.api_key, &config.model)
115 }
116 LLMProvider::OpenAI => {
117 LLMSessionConfig::openai(&config.api_key, &config.model)
118 }
119 };
120
121 let system_prompt = config
123 .system_prompt
124 .clone()
125 .unwrap_or_else(|| default_system_prompt.to_string());
126 session_config = session_config.with_system_prompt(system_prompt);
127
128 session_config = session_config.with_threshold_compaction(CompactionConfig {
133 threshold: 0.05,
134 keep_recent_turns: 1,
135 tool_compaction: ToolCompaction::Summarize,
136 });
137
138 Ok(session_config)
139 }
140
141 pub fn get_default(&self) -> Option<&LLMSessionConfig> {
143 self.default_provider
144 .as_ref()
145 .and_then(|p| self.configs.get(p))
146 .or_else(|| self.configs.values().next())
147 }
148
149 pub fn get(&self, provider: &str) -> Option<&LLMSessionConfig> {
151 self.configs.get(provider)
152 }
153
154 pub fn default_provider_name(&self) -> Option<&str> {
156 self.default_provider.as_deref()
157 }
158
159 pub fn is_empty(&self) -> bool {
161 self.configs.is_empty()
162 }
163
164 pub fn providers(&self) -> Vec<&str> {
166 self.configs.keys().map(|s| s.as_str()).collect()
167 }
168}
169
170impl Default for LLMRegistry {
171 fn default() -> Self {
172 Self::new()
173 }
174}
175
176#[derive(Debug)]
178pub enum ConfigError {
179 NoHomeDirectory,
181 ReadError { path: String, source: String },
183 ParseError { path: String, source: String },
185 UnknownProvider { provider: String },
187}
188
189impl std::fmt::Display for ConfigError {
190 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
191 match self {
192 ConfigError::NoHomeDirectory => write!(f, "Could not determine home directory"),
193 ConfigError::ReadError { path, source } => {
194 write!(f, "Failed to read config file '{}': {}", path, source)
195 }
196 ConfigError::ParseError { path, source } => {
197 write!(f, "Failed to parse config file '{}': {}", path, source)
198 }
199 ConfigError::UnknownProvider { provider } => {
200 write!(f, "Unknown provider: {}", provider)
201 }
202 }
203 }
204}
205
206impl std::error::Error for ConfigError {}
207
208pub fn load_config<A: AgentConfig>(agent_config: &A) -> LLMRegistry {
212 let config_path = agent_config.config_path();
213 let default_prompt = agent_config.default_system_prompt();
214
215 if let Some(home) = dirs::home_dir() {
217 let path = home.join(config_path);
218 match LLMRegistry::load_from_file(&path, default_prompt) {
219 Ok(registry) if !registry.is_empty() => {
220 tracing::info!("Loaded configuration from ~/{}", config_path);
221 return registry;
222 }
223 Ok(_) => {
224 tracing::debug!("Config file empty, trying environment variables");
225 }
226 Err(e) => {
227 tracing::debug!("Could not load config file: {}", e);
228 }
229 }
230 }
231
232 let mut registry = LLMRegistry::new();
234
235 if let Ok(api_key) = std::env::var("ANTHROPIC_API_KEY") {
236 let model = std::env::var("ANTHROPIC_MODEL")
237 .unwrap_or_else(|_| "claude-sonnet-4-20250514".to_string());
238
239 let config = LLMSessionConfig::anthropic(&api_key, &model)
240 .with_system_prompt(default_prompt);
241
242 registry.configs.insert("anthropic".to_string(), config);
243 registry.default_provider = Some("anthropic".to_string());
244
245 tracing::info!("Loaded Anthropic configuration from environment");
246 }
247
248 if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
249 let model =
250 std::env::var("OPENAI_MODEL").unwrap_or_else(|_| "gpt-4-turbo-preview".to_string());
251
252 let config =
253 LLMSessionConfig::openai(&api_key, &model).with_system_prompt(default_prompt);
254
255 registry.configs.insert("openai".to_string(), config);
256 if registry.default_provider.is_none() {
257 registry.default_provider = Some("openai".to_string());
258 }
259
260 tracing::info!("Loaded OpenAI configuration from environment");
261 }
262
263 registry
264}
265
266#[cfg(test)]
267mod tests {
268 use super::*;
269
270 #[test]
271 fn test_parse_config() {
272 let yaml = r#"
273providers:
274 - provider: anthropic
275 api_key: test-key
276 model: claude-sonnet-4-20250514
277default_provider: anthropic
278"#;
279 let config: ConfigFile = serde_yaml::from_str(yaml).unwrap();
280 assert_eq!(config.providers.len(), 1);
281 assert_eq!(config.providers[0].provider, "anthropic");
282 assert_eq!(config.default_provider, Some("anthropic".to_string()));
283 }
284
285 #[test]
286 fn test_empty_registry() {
287 let registry = LLMRegistry::new();
288 assert!(registry.is_empty());
289 assert!(registry.get_default().is_none());
290 }
291}