Skip to main content

agent_core_runtime/agent/
config.rs

1// Configuration management for LLM agents
2//
3// Provides trait-based customization for config paths and system prompts.
4
5use std::collections::HashMap;
6use std::fs;
7use std::path::PathBuf;
8
9use crate::controller::{CompactionConfig, LLMSessionConfig, ToolCompaction};
10use serde::Deserialize;
11
12/// Trait for agent-specific configuration.
13///
14/// Implement this trait to provide custom config paths and system prompts
15/// for your agent.
16pub trait AgentConfig {
17    /// The config file path.
18    ///
19    /// Paths starting with `~/` are expanded to the home directory.
20    /// All other paths (absolute or relative) are used as-is.
21    fn config_path(&self) -> &str;
22
23    /// The default system prompt for this agent
24    fn default_system_prompt(&self) -> &str;
25
26    /// The log file prefix for this agent (e.g., "multi_code", "europa")
27    fn log_prefix(&self) -> &str;
28
29    /// Agent name for display and logging
30    fn name(&self) -> &str;
31
32    /// Channel buffer size for internal communication channels.
33    ///
34    /// Returns None to use the default (500). Override to customize
35    /// the buffer size for all async channels (LLM responses, tool results,
36    /// UI events, etc.).
37    ///
38    /// Larger values reduce backpressure but use more memory.
39    /// Smaller values provide tighter flow control.
40    fn channel_buffer_size(&self) -> Option<usize> {
41        None
42    }
43}
44
45/// A simple configuration for quick agent setup.
46///
47/// Use this when you don't need a custom config struct. Created via
48/// `AgentCore::with_config()`.
49///
50/// # Example
51///
52/// ```ignore
53/// let agent = AgentCore::with_config(
54///     "my-agent",
55///     "~/.config/my-agent/config.yaml",
56///     "You are a helpful assistant."
57/// )?;
58/// ```
59pub struct SimpleConfig {
60    name: String,
61    config_path: String,
62    system_prompt: String,
63    log_prefix: String,
64}
65
66impl SimpleConfig {
67    /// Create a new simple configuration.
68    ///
69    /// # Arguments
70    /// * `name` - Agent name for display (e.g., "my-agent")
71    /// * `config_path` - Path to config file (e.g., "~/.config/my-agent/config.yaml")
72    /// * `system_prompt` - Default system prompt for the agent
73    pub fn new(name: impl Into<String>, config_path: impl Into<String>, system_prompt: impl Into<String>) -> Self {
74        let name = name.into();
75        // Derive log prefix from name: lowercase, replace non-alphanumeric with underscores
76        let log_prefix = name
77            .chars()
78            .map(|c| if c.is_alphanumeric() { c.to_ascii_lowercase() } else { '_' })
79            .collect();
80
81        Self {
82            name,
83            config_path: config_path.into(),
84            system_prompt: system_prompt.into(),
85            log_prefix,
86        }
87    }
88}
89
90impl AgentConfig for SimpleConfig {
91    fn config_path(&self) -> &str {
92        &self.config_path
93    }
94
95    fn default_system_prompt(&self) -> &str {
96        &self.system_prompt
97    }
98
99    fn log_prefix(&self) -> &str {
100        &self.log_prefix
101    }
102
103    fn name(&self) -> &str {
104        &self.name
105    }
106}
107
108/// Provider configuration from YAML
109///
110/// Supported providers:
111/// - `anthropic` - Anthropic Claude models
112/// - `openai` - OpenAI GPT models
113/// - `google` - Google Gemini models
114/// - `groq` - Groq (Llama, Mixtral)
115/// - `together` - Together AI
116/// - `fireworks` - Fireworks AI
117/// - `mistral` - Mistral AI
118/// - `perplexity` - Perplexity
119/// - `deepseek` - DeepSeek
120/// - `openrouter` - OpenRouter (access to multiple providers)
121/// - `ollama` - Local Ollama server
122/// - `lmstudio` - Local LM Studio server
123/// - `anyscale` - Anyscale Endpoints
124/// - `cerebras` - Cerebras
125/// - `sambanova` - SambaNova
126/// - `xai` - xAI (Grok)
127#[derive(Debug, Deserialize)]
128pub struct ProviderConfig {
129    /// Provider name (see above for supported values)
130    pub provider: String,
131    /// API token/key
132    pub api_key: String,
133    /// Model identifier (optional - uses provider default if not specified)
134    #[serde(default)]
135    pub model: String,
136    /// Optional system prompt override
137    pub system_prompt: Option<String>,
138}
139
140/// Root configuration structure from YAML
141#[derive(Debug, Deserialize)]
142pub struct ConfigFile {
143    /// List of LLM provider configurations
144    #[serde(default)]
145    pub providers: Vec<ProviderConfig>,
146
147    /// Default provider to use (optional, defaults to first provider)
148    pub default_provider: Option<String>,
149}
150
151/// LLM Registry - stores loaded provider configurations
152pub struct LLMRegistry {
153    configs: HashMap<String, LLMSessionConfig>,
154    default_provider: Option<String>,
155}
156
157impl LLMRegistry {
158    /// Creates an empty registry
159    pub fn new() -> Self {
160        Self {
161            configs: HashMap::new(),
162            default_provider: None,
163        }
164    }
165
166    /// Load configuration from the specified config file path
167    pub fn load_from_file(path: &PathBuf, default_system_prompt: &str) -> Result<Self, ConfigError> {
168        let content = fs::read_to_string(path).map_err(|e| ConfigError::ReadError {
169            path: path.display().to_string(),
170            source: e.to_string(),
171        })?;
172
173        let config_file: ConfigFile =
174            serde_yaml::from_str(&content).map_err(|e| ConfigError::ParseError {
175                path: path.display().to_string(),
176                source: e.to_string(),
177            })?;
178
179        let mut registry = Self::new();
180        registry.default_provider = config_file.default_provider;
181
182        for provider_config in config_file.providers {
183            let session_config = Self::create_session_config(&provider_config, default_system_prompt)?;
184            registry
185                .configs
186                .insert(provider_config.provider.clone(), session_config);
187
188            // Set first provider as default if not specified
189            if registry.default_provider.is_none() {
190                registry.default_provider = Some(provider_config.provider);
191            }
192        }
193
194        Ok(registry)
195    }
196
197    /// Create session config from provider config
198    fn create_session_config(config: &ProviderConfig, default_system_prompt: &str) -> Result<LLMSessionConfig, ConfigError> {
199        use super::providers::get_provider_info;
200
201        let provider_name = config.provider.to_lowercase();
202
203        // Check if it's a known OpenAI-compatible provider
204        let mut session_config = if let Some(info) = get_provider_info(&provider_name) {
205            // Use model from config, or fall back to provider default
206            let model = if config.model.is_empty() {
207                info.default_model.to_string()
208            } else {
209                config.model.clone()
210            };
211
212            LLMSessionConfig::openai_compatible(
213                &config.api_key,
214                &model,
215                info.base_url,
216                info.context_limit,
217            )
218        } else {
219            // Handle built-in providers
220            match provider_name.as_str() {
221                "anthropic" => {
222                    let model = if config.model.is_empty() {
223                        "claude-sonnet-4-20250514".to_string()
224                    } else {
225                        config.model.clone()
226                    };
227                    LLMSessionConfig::anthropic(&config.api_key, &model)
228                }
229                "openai" => {
230                    let model = if config.model.is_empty() {
231                        "gpt-4-turbo-preview".to_string()
232                    } else {
233                        config.model.clone()
234                    };
235                    LLMSessionConfig::openai(&config.api_key, &model)
236                }
237                "google" => {
238                    let model = if config.model.is_empty() {
239                        "gemini-2.5-flash".to_string()
240                    } else {
241                        config.model.clone()
242                    };
243                    LLMSessionConfig::google(&config.api_key, &model)
244                }
245                other => {
246                    return Err(ConfigError::UnknownProvider {
247                        provider: other.to_string(),
248                    })
249                }
250            }
251        };
252
253        // Set system prompt
254        let system_prompt = config
255            .system_prompt
256            .clone()
257            .unwrap_or_else(|| default_system_prompt.to_string());
258        session_config = session_config.with_system_prompt(system_prompt);
259
260        // Configure aggressive compaction to avoid rate limits
261        // With 0.05 threshold on 200K context = 10K tokens triggers compaction
262        // keep_recent_turns=1 means only current turn keeps full tool results
263        // All previous tool results are summarized to compact strings
264        session_config = session_config.with_threshold_compaction(CompactionConfig {
265            threshold: 0.05,
266            keep_recent_turns: 1,
267            tool_compaction: ToolCompaction::Summarize,
268        });
269
270        Ok(session_config)
271    }
272
273    /// Get the default session config
274    pub fn get_default(&self) -> Option<&LLMSessionConfig> {
275        self.default_provider
276            .as_ref()
277            .and_then(|p| self.configs.get(p))
278            .or_else(|| self.configs.values().next())
279    }
280
281    /// Get session config by provider name
282    pub fn get(&self, provider: &str) -> Option<&LLMSessionConfig> {
283        self.configs.get(provider)
284    }
285
286    /// Get the default provider name
287    pub fn default_provider_name(&self) -> Option<&str> {
288        self.default_provider.as_deref()
289    }
290
291    /// Check if registry is empty
292    pub fn is_empty(&self) -> bool {
293        self.configs.is_empty()
294    }
295
296    /// Get list of available providers
297    pub fn providers(&self) -> Vec<&str> {
298        self.configs.keys().map(|s| s.as_str()).collect()
299    }
300
301    /// Inject environment context into all session prompts.
302    ///
303    /// This appends environment information (working directory, platform, date)
304    /// to all configured system prompts, giving the LLM awareness of its
305    /// execution context.
306    ///
307    /// # Example
308    ///
309    /// ```ignore
310    /// let registry = load_config(&config).with_environment_context();
311    /// ```
312    pub fn with_environment_context(mut self) -> Self {
313        use super::environment::EnvironmentContext;
314
315        let context = EnvironmentContext::gather();
316        let context_section = context.to_prompt_section();
317
318        for config in self.configs.values_mut() {
319            if let Some(ref prompt) = config.system_prompt {
320                config.system_prompt = Some(format!("{}\n\n{}", prompt, context_section));
321            } else {
322                config.system_prompt = Some(context_section.clone());
323            }
324        }
325
326        self
327    }
328}
329
330impl Default for LLMRegistry {
331    fn default() -> Self {
332        Self::new()
333    }
334}
335
336/// Configuration errors
337#[derive(Debug)]
338pub enum ConfigError {
339    /// Home directory not found
340    NoHomeDirectory,
341    /// Failed to read config file
342    ReadError { path: String, source: String },
343    /// Failed to parse config file
344    ParseError { path: String, source: String },
345    /// Unknown provider
346    UnknownProvider { provider: String },
347}
348
349impl std::fmt::Display for ConfigError {
350    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
351        match self {
352            ConfigError::NoHomeDirectory => write!(f, "Could not determine home directory"),
353            ConfigError::ReadError { path, source } => {
354                write!(f, "Failed to read config file '{}': {}", path, source)
355            }
356            ConfigError::ParseError { path, source } => {
357                write!(f, "Failed to parse config file '{}': {}", path, source)
358            }
359            ConfigError::UnknownProvider { provider } => {
360                write!(f, "Unknown provider: {}", provider)
361            }
362        }
363    }
364}
365
366impl std::error::Error for ConfigError {}
367
368/// Load config for an agent using its AgentConfig trait implementation.
369///
370/// Tries to load from the config file first, then falls back to environment variables.
371/// Supports both absolute paths and paths relative to home directory.
372pub fn load_config<A: AgentConfig>(agent_config: &A) -> LLMRegistry {
373    let config_path = agent_config.config_path();
374    let default_prompt = agent_config.default_system_prompt();
375
376    // Resolve config path - expand ~/ to home directory, otherwise use as-is
377    let path = if let Some(rest) = config_path.strip_prefix("~/") {
378        match dirs::home_dir() {
379            Some(home) => home.join(rest),
380            None => {
381                tracing::debug!("Could not determine home directory");
382                PathBuf::from(config_path)
383            }
384        }
385    } else {
386        PathBuf::from(config_path)
387    };
388
389    // Try loading from config file first
390    match LLMRegistry::load_from_file(&path, default_prompt) {
391        Ok(registry) if !registry.is_empty() => {
392            tracing::info!("Loaded configuration from {}", path.display());
393            return registry;
394        }
395        Ok(_) => {
396            tracing::debug!("Config file empty, trying environment variables");
397        }
398        Err(e) => {
399            tracing::debug!("Could not load config file: {}", e);
400        }
401    }
402
403    // Fall back to environment variables
404    let mut registry = LLMRegistry::new();
405
406    // Default compaction config for environment-based configuration
407    let compaction = CompactionConfig {
408        threshold: 0.05,
409        keep_recent_turns: 1,
410        tool_compaction: ToolCompaction::Summarize,
411    };
412
413    if let Ok(api_key) = std::env::var("ANTHROPIC_API_KEY") {
414        let model = std::env::var("ANTHROPIC_MODEL")
415            .unwrap_or_else(|_| "claude-sonnet-4-20250514".to_string());
416
417        let config = LLMSessionConfig::anthropic(&api_key, &model)
418            .with_system_prompt(default_prompt)
419            .with_threshold_compaction(compaction.clone());
420
421        registry.configs.insert("anthropic".to_string(), config);
422        registry.default_provider = Some("anthropic".to_string());
423
424        tracing::info!("Loaded Anthropic configuration from environment");
425    }
426
427    if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
428        let model =
429            std::env::var("OPENAI_MODEL").unwrap_or_else(|_| "gpt-4-turbo-preview".to_string());
430
431        let config = LLMSessionConfig::openai(&api_key, &model)
432            .with_system_prompt(default_prompt)
433            .with_threshold_compaction(compaction.clone());
434
435        registry.configs.insert("openai".to_string(), config);
436        if registry.default_provider.is_none() {
437            registry.default_provider = Some("openai".to_string());
438        }
439
440        tracing::info!("Loaded OpenAI configuration from environment");
441    }
442
443    if let Ok(api_key) = std::env::var("GOOGLE_API_KEY") {
444        let model =
445            std::env::var("GOOGLE_MODEL").unwrap_or_else(|_| "gemini-2.5-flash".to_string());
446
447        let config = LLMSessionConfig::google(&api_key, &model)
448            .with_system_prompt(default_prompt)
449            .with_threshold_compaction(compaction.clone());
450
451        registry.configs.insert("google".to_string(), config);
452        if registry.default_provider.is_none() {
453            registry.default_provider = Some("google".to_string());
454        }
455
456        tracing::info!("Loaded Google (Gemini) configuration from environment");
457    }
458
459    // Check for known OpenAI-compatible providers via environment variables
460    for (name, info) in super::providers::KNOWN_PROVIDERS {
461        // For providers that require API keys, the env var must contain the key
462        // For local providers (Ollama, LM Studio), the env var just signals enablement
463        let api_key = if info.requires_api_key {
464            match std::env::var(info.env_var) {
465                Ok(key) if !key.is_empty() => key,
466                _ => continue, // Skip if no API key provided
467            }
468        } else {
469            // Local provider - check if env var is set (any value enables it)
470            if std::env::var(info.env_var).is_err() {
471                continue;
472            }
473            String::new() // Empty API key for local providers
474        };
475
476        let model =
477            std::env::var(info.model_env_var).unwrap_or_else(|_| info.default_model.to_string());
478
479        let config = LLMSessionConfig::openai_compatible(&api_key, &model, info.base_url, info.context_limit)
480            .with_system_prompt(default_prompt)
481            .with_threshold_compaction(compaction.clone());
482
483        registry.configs.insert(name.to_string(), config);
484        if registry.default_provider.is_none() {
485            registry.default_provider = Some(name.to_string());
486        }
487
488        tracing::info!("Loaded {} configuration from environment", info.name);
489    }
490
491    registry
492}
493
494#[cfg(test)]
495mod tests {
496    use super::*;
497
498    #[test]
499    fn test_parse_config() {
500        let yaml = r#"
501providers:
502  - provider: anthropic
503    api_key: test-key
504    model: claude-sonnet-4-20250514
505default_provider: anthropic
506"#;
507        let config: ConfigFile = serde_yaml::from_str(yaml).unwrap();
508        assert_eq!(config.providers.len(), 1);
509        assert_eq!(config.providers[0].provider, "anthropic");
510        assert_eq!(config.default_provider, Some("anthropic".to_string()));
511    }
512
513    #[test]
514    fn test_parse_known_provider() {
515        let yaml = r#"
516providers:
517  - provider: groq
518    api_key: gsk_test_key
519    model: llama-3.3-70b-versatile
520"#;
521        let config: ConfigFile = serde_yaml::from_str(yaml).unwrap();
522        assert_eq!(config.providers.len(), 1);
523        assert_eq!(config.providers[0].provider, "groq");
524    }
525
526    #[test]
527    fn test_known_provider_default_model() {
528        // When model is not specified, it should use the provider's default
529        let provider_config = ProviderConfig {
530            provider: "groq".to_string(),
531            api_key: "test-key".to_string(),
532            model: String::new(), // Empty model
533            system_prompt: None,
534        };
535
536        let session_config = LLMRegistry::create_session_config(&provider_config, "test prompt").unwrap();
537        // Should use groq's default model
538        assert_eq!(session_config.model, "llama-3.3-70b-versatile");
539        // Should have groq's base_url set
540        assert!(session_config.base_url.is_some());
541        assert!(session_config.base_url.as_ref().unwrap().contains("groq.com"));
542    }
543
544    #[test]
545    fn test_empty_registry() {
546        let registry = LLMRegistry::new();
547        assert!(registry.is_empty());
548        assert!(registry.get_default().is_none());
549    }
550}