agent_core_runtime/agent/
config.rs1use std::collections::HashMap;
6use std::fs;
7use std::path::PathBuf;
8
9use crate::controller::{CompactionConfig, LLMSessionConfig, ToolCompaction};
10use serde::Deserialize;
11
12pub trait AgentConfig {
17 fn config_path(&self) -> &str;
22
23 fn default_system_prompt(&self) -> &str;
25
26 fn log_prefix(&self) -> &str;
28
29 fn name(&self) -> &str;
31
32 fn channel_buffer_size(&self) -> Option<usize> {
41 None
42 }
43}
44
45pub struct SimpleConfig {
60 name: String,
61 config_path: String,
62 system_prompt: String,
63 log_prefix: String,
64}
65
66impl SimpleConfig {
67 pub fn new(name: impl Into<String>, config_path: impl Into<String>, system_prompt: impl Into<String>) -> Self {
74 let name = name.into();
75 let log_prefix = name
77 .chars()
78 .map(|c| if c.is_alphanumeric() { c.to_ascii_lowercase() } else { '_' })
79 .collect();
80
81 Self {
82 name,
83 config_path: config_path.into(),
84 system_prompt: system_prompt.into(),
85 log_prefix,
86 }
87 }
88}
89
90impl AgentConfig for SimpleConfig {
91 fn config_path(&self) -> &str {
92 &self.config_path
93 }
94
95 fn default_system_prompt(&self) -> &str {
96 &self.system_prompt
97 }
98
99 fn log_prefix(&self) -> &str {
100 &self.log_prefix
101 }
102
103 fn name(&self) -> &str {
104 &self.name
105 }
106}
107
108#[derive(Debug, Deserialize)]
128pub struct ProviderConfig {
129 pub provider: String,
131 pub api_key: String,
133 #[serde(default)]
135 pub model: String,
136 pub system_prompt: Option<String>,
138}
139
140#[derive(Debug, Deserialize)]
142pub struct ConfigFile {
143 #[serde(default)]
145 pub providers: Vec<ProviderConfig>,
146
147 pub default_provider: Option<String>,
149}
150
151pub struct LLMRegistry {
153 configs: HashMap<String, LLMSessionConfig>,
154 default_provider: Option<String>,
155}
156
157impl LLMRegistry {
158 pub fn new() -> Self {
160 Self {
161 configs: HashMap::new(),
162 default_provider: None,
163 }
164 }
165
166 pub fn load_from_file(path: &PathBuf, default_system_prompt: &str) -> Result<Self, ConfigError> {
168 let content = fs::read_to_string(path).map_err(|e| ConfigError::ReadError {
169 path: path.display().to_string(),
170 source: e.to_string(),
171 })?;
172
173 let config_file: ConfigFile =
174 serde_yaml::from_str(&content).map_err(|e| ConfigError::ParseError {
175 path: path.display().to_string(),
176 source: e.to_string(),
177 })?;
178
179 let mut registry = Self::new();
180 registry.default_provider = config_file.default_provider;
181
182 for provider_config in config_file.providers {
183 let session_config = Self::create_session_config(&provider_config, default_system_prompt)?;
184 registry
185 .configs
186 .insert(provider_config.provider.clone(), session_config);
187
188 if registry.default_provider.is_none() {
190 registry.default_provider = Some(provider_config.provider);
191 }
192 }
193
194 Ok(registry)
195 }
196
197 fn create_session_config(config: &ProviderConfig, default_system_prompt: &str) -> Result<LLMSessionConfig, ConfigError> {
199 use super::providers::get_provider_info;
200
201 let provider_name = config.provider.to_lowercase();
202
203 let mut session_config = if let Some(info) = get_provider_info(&provider_name) {
205 let model = if config.model.is_empty() {
207 info.default_model.to_string()
208 } else {
209 config.model.clone()
210 };
211
212 LLMSessionConfig::openai_compatible(
213 &config.api_key,
214 &model,
215 info.base_url,
216 info.context_limit,
217 )
218 } else {
219 match provider_name.as_str() {
221 "anthropic" => {
222 let model = if config.model.is_empty() {
223 "claude-sonnet-4-20250514".to_string()
224 } else {
225 config.model.clone()
226 };
227 LLMSessionConfig::anthropic(&config.api_key, &model)
228 }
229 "openai" => {
230 let model = if config.model.is_empty() {
231 "gpt-4-turbo-preview".to_string()
232 } else {
233 config.model.clone()
234 };
235 LLMSessionConfig::openai(&config.api_key, &model)
236 }
237 "google" => {
238 let model = if config.model.is_empty() {
239 "gemini-2.5-flash".to_string()
240 } else {
241 config.model.clone()
242 };
243 LLMSessionConfig::google(&config.api_key, &model)
244 }
245 other => {
246 return Err(ConfigError::UnknownProvider {
247 provider: other.to_string(),
248 })
249 }
250 }
251 };
252
253 let system_prompt = config
255 .system_prompt
256 .clone()
257 .unwrap_or_else(|| default_system_prompt.to_string());
258 session_config = session_config.with_system_prompt(system_prompt);
259
260 session_config = session_config.with_threshold_compaction(CompactionConfig {
265 threshold: 0.05,
266 keep_recent_turns: 1,
267 tool_compaction: ToolCompaction::Summarize,
268 });
269
270 Ok(session_config)
271 }
272
273 pub fn get_default(&self) -> Option<&LLMSessionConfig> {
275 self.default_provider
276 .as_ref()
277 .and_then(|p| self.configs.get(p))
278 .or_else(|| self.configs.values().next())
279 }
280
281 pub fn get(&self, provider: &str) -> Option<&LLMSessionConfig> {
283 self.configs.get(provider)
284 }
285
286 pub fn default_provider_name(&self) -> Option<&str> {
288 self.default_provider.as_deref()
289 }
290
291 pub fn is_empty(&self) -> bool {
293 self.configs.is_empty()
294 }
295
296 pub fn providers(&self) -> Vec<&str> {
298 self.configs.keys().map(|s| s.as_str()).collect()
299 }
300
301 pub fn with_environment_context(mut self) -> Self {
313 use super::environment::EnvironmentContext;
314
315 let context = EnvironmentContext::gather();
316 let context_section = context.to_prompt_section();
317
318 for config in self.configs.values_mut() {
319 if let Some(ref prompt) = config.system_prompt {
320 config.system_prompt = Some(format!("{}\n\n{}", prompt, context_section));
321 } else {
322 config.system_prompt = Some(context_section.clone());
323 }
324 }
325
326 self
327 }
328}
329
330impl Default for LLMRegistry {
331 fn default() -> Self {
332 Self::new()
333 }
334}
335
336#[derive(Debug)]
338pub enum ConfigError {
339 NoHomeDirectory,
341 ReadError { path: String, source: String },
343 ParseError { path: String, source: String },
345 UnknownProvider { provider: String },
347}
348
349impl std::fmt::Display for ConfigError {
350 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
351 match self {
352 ConfigError::NoHomeDirectory => write!(f, "Could not determine home directory"),
353 ConfigError::ReadError { path, source } => {
354 write!(f, "Failed to read config file '{}': {}", path, source)
355 }
356 ConfigError::ParseError { path, source } => {
357 write!(f, "Failed to parse config file '{}': {}", path, source)
358 }
359 ConfigError::UnknownProvider { provider } => {
360 write!(f, "Unknown provider: {}", provider)
361 }
362 }
363 }
364}
365
366impl std::error::Error for ConfigError {}
367
368pub fn load_config<A: AgentConfig>(agent_config: &A) -> LLMRegistry {
373 let config_path = agent_config.config_path();
374 let default_prompt = agent_config.default_system_prompt();
375
376 let path = if let Some(rest) = config_path.strip_prefix("~/") {
378 match dirs::home_dir() {
379 Some(home) => home.join(rest),
380 None => {
381 tracing::debug!("Could not determine home directory");
382 PathBuf::from(config_path)
383 }
384 }
385 } else {
386 PathBuf::from(config_path)
387 };
388
389 match LLMRegistry::load_from_file(&path, default_prompt) {
391 Ok(registry) if !registry.is_empty() => {
392 tracing::info!("Loaded configuration from {}", path.display());
393 return registry;
394 }
395 Ok(_) => {
396 tracing::debug!("Config file empty, trying environment variables");
397 }
398 Err(e) => {
399 tracing::debug!("Could not load config file: {}", e);
400 }
401 }
402
403 let mut registry = LLMRegistry::new();
405
406 let compaction = CompactionConfig {
408 threshold: 0.05,
409 keep_recent_turns: 1,
410 tool_compaction: ToolCompaction::Summarize,
411 };
412
413 if let Ok(api_key) = std::env::var("ANTHROPIC_API_KEY") {
414 let model = std::env::var("ANTHROPIC_MODEL")
415 .unwrap_or_else(|_| "claude-sonnet-4-20250514".to_string());
416
417 let config = LLMSessionConfig::anthropic(&api_key, &model)
418 .with_system_prompt(default_prompt)
419 .with_threshold_compaction(compaction.clone());
420
421 registry.configs.insert("anthropic".to_string(), config);
422 registry.default_provider = Some("anthropic".to_string());
423
424 tracing::info!("Loaded Anthropic configuration from environment");
425 }
426
427 if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
428 let model =
429 std::env::var("OPENAI_MODEL").unwrap_or_else(|_| "gpt-4-turbo-preview".to_string());
430
431 let config = LLMSessionConfig::openai(&api_key, &model)
432 .with_system_prompt(default_prompt)
433 .with_threshold_compaction(compaction.clone());
434
435 registry.configs.insert("openai".to_string(), config);
436 if registry.default_provider.is_none() {
437 registry.default_provider = Some("openai".to_string());
438 }
439
440 tracing::info!("Loaded OpenAI configuration from environment");
441 }
442
443 if let Ok(api_key) = std::env::var("GOOGLE_API_KEY") {
444 let model =
445 std::env::var("GOOGLE_MODEL").unwrap_or_else(|_| "gemini-2.5-flash".to_string());
446
447 let config = LLMSessionConfig::google(&api_key, &model)
448 .with_system_prompt(default_prompt)
449 .with_threshold_compaction(compaction.clone());
450
451 registry.configs.insert("google".to_string(), config);
452 if registry.default_provider.is_none() {
453 registry.default_provider = Some("google".to_string());
454 }
455
456 tracing::info!("Loaded Google (Gemini) configuration from environment");
457 }
458
459 for (name, info) in super::providers::KNOWN_PROVIDERS {
461 let api_key = if info.requires_api_key {
464 match std::env::var(info.env_var) {
465 Ok(key) if !key.is_empty() => key,
466 _ => continue, }
468 } else {
469 if std::env::var(info.env_var).is_err() {
471 continue;
472 }
473 String::new() };
475
476 let model =
477 std::env::var(info.model_env_var).unwrap_or_else(|_| info.default_model.to_string());
478
479 let config = LLMSessionConfig::openai_compatible(&api_key, &model, info.base_url, info.context_limit)
480 .with_system_prompt(default_prompt)
481 .with_threshold_compaction(compaction.clone());
482
483 registry.configs.insert(name.to_string(), config);
484 if registry.default_provider.is_none() {
485 registry.default_provider = Some(name.to_string());
486 }
487
488 tracing::info!("Loaded {} configuration from environment", info.name);
489 }
490
491 registry
492}
493
494#[cfg(test)]
495mod tests {
496 use super::*;
497
498 #[test]
499 fn test_parse_config() {
500 let yaml = r#"
501providers:
502 - provider: anthropic
503 api_key: test-key
504 model: claude-sonnet-4-20250514
505default_provider: anthropic
506"#;
507 let config: ConfigFile = serde_yaml::from_str(yaml).unwrap();
508 assert_eq!(config.providers.len(), 1);
509 assert_eq!(config.providers[0].provider, "anthropic");
510 assert_eq!(config.default_provider, Some("anthropic".to_string()));
511 }
512
513 #[test]
514 fn test_parse_known_provider() {
515 let yaml = r#"
516providers:
517 - provider: groq
518 api_key: gsk_test_key
519 model: llama-3.3-70b-versatile
520"#;
521 let config: ConfigFile = serde_yaml::from_str(yaml).unwrap();
522 assert_eq!(config.providers.len(), 1);
523 assert_eq!(config.providers[0].provider, "groq");
524 }
525
526 #[test]
527 fn test_known_provider_default_model() {
528 let provider_config = ProviderConfig {
530 provider: "groq".to_string(),
531 api_key: "test-key".to_string(),
532 model: String::new(), system_prompt: None,
534 };
535
536 let session_config = LLMRegistry::create_session_config(&provider_config, "test prompt").unwrap();
537 assert_eq!(session_config.model, "llama-3.3-70b-versatile");
539 assert!(session_config.base_url.is_some());
541 assert!(session_config.base_url.as_ref().unwrap().contains("groq.com"));
542 }
543
544 #[test]
545 fn test_empty_registry() {
546 let registry = LLMRegistry::new();
547 assert!(registry.is_empty());
548 assert!(registry.get_default().is_none());
549 }
550}