ceylon_runtime/config/
agent_config.rs

1//! Agent configuration from TOML files.
2
3use super::{McpToolConfig, MemoryConfig};
4use crate::core::error::{Error, Result};
5use serde::{Deserialize, Serialize};
6use std::path::Path;
7
8/// Configuration for a single LLM agent.
9///
10/// This struct can be loaded from a TOML file and used to create an `LlmAgent`.
11///
12/// # Example
13///
14/// ```toml
15/// name = "my_agent"
16/// model = "ollama::gemma3:latest"
17/// system_prompt = "You are a helpful assistant."
18/// temperature = 0.7
19/// max_tokens = 1024
20/// ```
21#[derive(Debug, Clone, Serialize, Deserialize)]
22pub struct AgentConfig {
23    /// Unique name for the agent
24    pub name: String,
25
26    /// Model in "provider::model" format (e.g., "ollama::gemma3:latest")
27    pub model: String,
28
29    /// System prompt for the agent
30    #[serde(default = "default_system_prompt")]
31    pub system_prompt: String,
32
33    /// API key for the LLM provider (optional, can use env vars)
34    #[serde(default)]
35    pub api_key: Option<String>,
36
37    /// Temperature for generation (0.0 - 2.0)
38    #[serde(default)]
39    pub temperature: Option<f32>,
40
41    /// Maximum tokens to generate
42    #[serde(default)]
43    pub max_tokens: Option<u32>,
44
45    /// Base URL override for the LLM provider
46    #[serde(default)]
47    pub base_url: Option<String>,
48
49    /// Top-p (nucleus) sampling parameter
50    #[serde(default)]
51    pub top_p: Option<f32>,
52
53    /// Top-k sampling parameter
54    #[serde(default)]
55    pub top_k: Option<u32>,
56
57    /// Request timeout in seconds
58    #[serde(default)]
59    pub timeout_seconds: Option<u64>,
60
61    /// Enable reasoning mode (for supported providers)
62    #[serde(default)]
63    pub reasoning: Option<bool>,
64
65    /// Reasoning effort level
66    #[serde(default)]
67    pub reasoning_effort: Option<String>,
68
69    /// Memory configuration for this agent (optional, overrides mesh default)
70    #[serde(default)]
71    pub memory: Option<MemoryConfig>,
72
73    /// MCP tool configurations for this agent.
74    /// Each entry defines an external MCP server that provides tools.
75    #[serde(default)]
76    pub mcp_tools: Vec<McpToolConfig>,
77}
78
79fn default_system_prompt() -> String {
80    "You are a helpful AI assistant.".to_string()
81}
82
83impl Default for AgentConfig {
84    fn default() -> Self {
85        Self {
86            name: String::new(),
87            model: String::new(),
88            system_prompt: default_system_prompt(),
89            api_key: None,
90            temperature: None,
91            max_tokens: None,
92            base_url: None,
93            top_p: None,
94            top_k: None,
95            timeout_seconds: None,
96            reasoning: None,
97            reasoning_effort: None,
98            memory: None,
99            mcp_tools: Vec::new(),
100        }
101    }
102}
103
104impl AgentConfig {
105    /// Create a new AgentConfig with required fields.
106    pub fn new(name: impl Into<String>, model: impl Into<String>) -> Self {
107        Self {
108            name: name.into(),
109            model: model.into(),
110            ..Default::default()
111        }
112    }
113
114    /// Parse an AgentConfig from a TOML string.
115    ///
116    /// # Example
117    ///
118    /// ```rust
119    /// use ceylon_runtime::config::AgentConfig;
120    ///
121    /// let toml = r#"
122    /// name = "my_agent"
123    /// model = "ollama::llama2"
124    /// "#;
125    ///
126    /// let config = AgentConfig::from_toml(toml).unwrap();
127    /// assert_eq!(config.name, "my_agent");
128    /// ```
129    pub fn from_toml(toml_str: &str) -> Result<Self> {
130        toml::from_str(toml_str)
131            .map_err(|e| Error::ConfigError(format!("Failed to parse TOML: {}", e)))
132    }
133
134    /// Load an AgentConfig from a TOML file.
135    ///
136    /// # Example
137    ///
138    /// ```rust,no_run
139    /// use ceylon_runtime::config::AgentConfig;
140    ///
141    /// let config = AgentConfig::from_file("agent.toml").unwrap();
142    /// ```
143    pub fn from_file(path: impl AsRef<Path>) -> Result<Self> {
144        let content = std::fs::read_to_string(path.as_ref()).map_err(|e| {
145            Error::ConfigError(format!(
146                "Failed to read file '{}': {}",
147                path.as_ref().display(),
148                e
149            ))
150        })?;
151        Self::from_toml(&content)
152    }
153
154    /// Set the system prompt.
155    pub fn with_system_prompt(mut self, prompt: impl Into<String>) -> Self {
156        self.system_prompt = prompt.into();
157        self
158    }
159
160    /// Set the API key.
161    pub fn with_api_key(mut self, api_key: impl Into<String>) -> Self {
162        self.api_key = Some(api_key.into());
163        self
164    }
165
166    /// Set the temperature.
167    pub fn with_temperature(mut self, temperature: f32) -> Self {
168        self.temperature = Some(temperature);
169        self
170    }
171
172    /// Set max tokens.
173    pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
174        self.max_tokens = Some(max_tokens);
175        self
176    }
177
178    /// Set memory configuration for this agent.
179    pub fn with_memory(mut self, memory: MemoryConfig) -> Self {
180        self.memory = Some(memory);
181        self
182    }
183
184    /// Get the effective memory configuration for this agent.
185    ///
186    /// Returns the agent's own memory config if set, otherwise returns the fallback
187    /// (typically the mesh-level memory config).
188    ///
189    /// # Arguments
190    ///
191    /// * `fallback` - The mesh-level memory config to use if agent has no memory config
192    ///
193    /// # Example
194    ///
195    /// ```rust
196    /// use ceylon_runtime::config::{AgentConfig, MemoryConfig, MeshConfig};
197    ///
198    /// let mesh_memory = MemoryConfig::in_memory().with_max_entries(1000);
199    /// let agent = AgentConfig::new("agent1", "ollama::llama2");
200    ///
201    /// // Agent without memory uses mesh fallback
202    /// let effective = agent.get_effective_memory_config(Some(&mesh_memory));
203    /// assert!(effective.is_some());
204    ///
205    /// // Agent with its own memory uses that
206    /// let agent_with_memory = agent.with_memory(MemoryConfig::in_memory().with_max_entries(500));
207    /// let effective = agent_with_memory.get_effective_memory_config(Some(&mesh_memory));
208    /// assert_eq!(effective.unwrap().max_entries, Some(500));
209    /// ```
210    pub fn get_effective_memory_config<'a>(
211        &'a self,
212        fallback: Option<&'a MemoryConfig>,
213    ) -> Option<&'a MemoryConfig> {
214        self.memory.as_ref().or(fallback)
215    }
216}
217
218#[cfg(test)]
219mod tests {
220    use super::*;
221
222    #[test]
223    fn test_parse_basic_config() {
224        let toml = r#"
225            name = "test_agent"
226            model = "ollama::llama2"
227        "#;
228
229        let config = AgentConfig::from_toml(toml).unwrap();
230        assert_eq!(config.name, "test_agent");
231        assert_eq!(config.model, "ollama::llama2");
232        assert_eq!(config.system_prompt, "You are a helpful AI assistant.");
233    }
234
235    #[test]
236    fn test_parse_full_config() {
237        let toml = r#"
238            name = "researcher"
239            model = "openai::gpt-4"
240            system_prompt = "You are a research assistant."
241            api_key = "sk-test"
242            temperature = 0.7
243            max_tokens = 2048
244        "#;
245
246        let config = AgentConfig::from_toml(toml).unwrap();
247        assert_eq!(config.name, "researcher");
248        assert_eq!(config.model, "openai::gpt-4");
249        assert_eq!(config.system_prompt, "You are a research assistant.");
250        assert_eq!(config.api_key, Some("sk-test".to_string()));
251        assert_eq!(config.temperature, Some(0.7));
252        assert_eq!(config.max_tokens, Some(2048));
253    }
254
255    #[test]
256    fn test_invalid_toml() {
257        let toml = "this is not valid toml [[[";
258        let result = AgentConfig::from_toml(toml);
259        assert!(result.is_err());
260    }
261
262    #[test]
263    fn test_builder_methods() {
264        let config = AgentConfig::new("agent", "ollama::llama2")
265            .with_system_prompt("Custom prompt")
266            .with_temperature(0.5)
267            .with_max_tokens(1024);
268
269        assert_eq!(config.name, "agent");
270        assert_eq!(config.system_prompt, "Custom prompt");
271        assert_eq!(config.temperature, Some(0.5));
272        assert_eq!(config.max_tokens, Some(1024));
273    }
274
275    #[test]
276    fn test_with_memory() {
277        let memory = MemoryConfig::in_memory().with_max_entries(500);
278        let config = AgentConfig::new("agent", "ollama::llama2").with_memory(memory);
279
280        assert!(config.memory.is_some());
281        assert_eq!(config.memory.as_ref().unwrap().max_entries, Some(500));
282    }
283
284    #[test]
285    fn test_get_effective_memory_config_with_agent_memory() {
286        let mesh_memory = MemoryConfig::in_memory().with_max_entries(1000);
287        let agent_memory = MemoryConfig::in_memory().with_max_entries(500);
288        let config = AgentConfig::new("agent", "ollama::llama2").with_memory(agent_memory);
289
290        // Agent's own memory takes precedence
291        let effective = config.get_effective_memory_config(Some(&mesh_memory));
292        assert!(effective.is_some());
293        assert_eq!(effective.unwrap().max_entries, Some(500));
294    }
295
296    #[test]
297    fn test_get_effective_memory_config_fallback_to_mesh() {
298        let mesh_memory = MemoryConfig::in_memory().with_max_entries(1000);
299        let config = AgentConfig::new("agent", "ollama::llama2");
300
301        // Falls back to mesh memory when agent has no memory config
302        let effective = config.get_effective_memory_config(Some(&mesh_memory));
303        assert!(effective.is_some());
304        assert_eq!(effective.unwrap().max_entries, Some(1000));
305    }
306
307    #[test]
308    fn test_get_effective_memory_config_no_memory() {
309        let config = AgentConfig::new("agent", "ollama::llama2");
310
311        // Returns None when no memory config anywhere
312        let effective = config.get_effective_memory_config(None);
313        assert!(effective.is_none());
314    }
315}