ceylon_runtime/config/
agent_config.rs1use super::{McpToolConfig, MemoryConfig};
4use crate::core::error::{Error, Result};
5use serde::{Deserialize, Serialize};
6use std::path::Path;
7
8#[derive(Debug, Clone, Serialize, Deserialize)]
22pub struct AgentConfig {
23 pub name: String,
25
26 pub model: String,
28
29 #[serde(default = "default_system_prompt")]
31 pub system_prompt: String,
32
33 #[serde(default)]
35 pub api_key: Option<String>,
36
37 #[serde(default)]
39 pub temperature: Option<f32>,
40
41 #[serde(default)]
43 pub max_tokens: Option<u32>,
44
45 #[serde(default)]
47 pub base_url: Option<String>,
48
49 #[serde(default)]
51 pub top_p: Option<f32>,
52
53 #[serde(default)]
55 pub top_k: Option<u32>,
56
57 #[serde(default)]
59 pub timeout_seconds: Option<u64>,
60
61 #[serde(default)]
63 pub reasoning: Option<bool>,
64
65 #[serde(default)]
67 pub reasoning_effort: Option<String>,
68
69 #[serde(default)]
71 pub memory: Option<MemoryConfig>,
72
73 #[serde(default)]
76 pub mcp_tools: Vec<McpToolConfig>,
77}
78
79fn default_system_prompt() -> String {
80 "You are a helpful AI assistant.".to_string()
81}
82
83impl Default for AgentConfig {
84 fn default() -> Self {
85 Self {
86 name: String::new(),
87 model: String::new(),
88 system_prompt: default_system_prompt(),
89 api_key: None,
90 temperature: None,
91 max_tokens: None,
92 base_url: None,
93 top_p: None,
94 top_k: None,
95 timeout_seconds: None,
96 reasoning: None,
97 reasoning_effort: None,
98 memory: None,
99 mcp_tools: Vec::new(),
100 }
101 }
102}
103
104impl AgentConfig {
105 pub fn new(name: impl Into<String>, model: impl Into<String>) -> Self {
107 Self {
108 name: name.into(),
109 model: model.into(),
110 ..Default::default()
111 }
112 }
113
114 pub fn from_toml(toml_str: &str) -> Result<Self> {
130 toml::from_str(toml_str)
131 .map_err(|e| Error::ConfigError(format!("Failed to parse TOML: {}", e)))
132 }
133
134 pub fn from_file(path: impl AsRef<Path>) -> Result<Self> {
144 let content = std::fs::read_to_string(path.as_ref()).map_err(|e| {
145 Error::ConfigError(format!(
146 "Failed to read file '{}': {}",
147 path.as_ref().display(),
148 e
149 ))
150 })?;
151 Self::from_toml(&content)
152 }
153
154 pub fn with_system_prompt(mut self, prompt: impl Into<String>) -> Self {
156 self.system_prompt = prompt.into();
157 self
158 }
159
160 pub fn with_api_key(mut self, api_key: impl Into<String>) -> Self {
162 self.api_key = Some(api_key.into());
163 self
164 }
165
166 pub fn with_temperature(mut self, temperature: f32) -> Self {
168 self.temperature = Some(temperature);
169 self
170 }
171
172 pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
174 self.max_tokens = Some(max_tokens);
175 self
176 }
177
178 pub fn with_memory(mut self, memory: MemoryConfig) -> Self {
180 self.memory = Some(memory);
181 self
182 }
183
184 pub fn get_effective_memory_config<'a>(
211 &'a self,
212 fallback: Option<&'a MemoryConfig>,
213 ) -> Option<&'a MemoryConfig> {
214 self.memory.as_ref().or(fallback)
215 }
216}
217
218#[cfg(test)]
219mod tests {
220 use super::*;
221
222 #[test]
223 fn test_parse_basic_config() {
224 let toml = r#"
225 name = "test_agent"
226 model = "ollama::llama2"
227 "#;
228
229 let config = AgentConfig::from_toml(toml).unwrap();
230 assert_eq!(config.name, "test_agent");
231 assert_eq!(config.model, "ollama::llama2");
232 assert_eq!(config.system_prompt, "You are a helpful AI assistant.");
233 }
234
235 #[test]
236 fn test_parse_full_config() {
237 let toml = r#"
238 name = "researcher"
239 model = "openai::gpt-4"
240 system_prompt = "You are a research assistant."
241 api_key = "sk-test"
242 temperature = 0.7
243 max_tokens = 2048
244 "#;
245
246 let config = AgentConfig::from_toml(toml).unwrap();
247 assert_eq!(config.name, "researcher");
248 assert_eq!(config.model, "openai::gpt-4");
249 assert_eq!(config.system_prompt, "You are a research assistant.");
250 assert_eq!(config.api_key, Some("sk-test".to_string()));
251 assert_eq!(config.temperature, Some(0.7));
252 assert_eq!(config.max_tokens, Some(2048));
253 }
254
255 #[test]
256 fn test_invalid_toml() {
257 let toml = "this is not valid toml [[[";
258 let result = AgentConfig::from_toml(toml);
259 assert!(result.is_err());
260 }
261
262 #[test]
263 fn test_builder_methods() {
264 let config = AgentConfig::new("agent", "ollama::llama2")
265 .with_system_prompt("Custom prompt")
266 .with_temperature(0.5)
267 .with_max_tokens(1024);
268
269 assert_eq!(config.name, "agent");
270 assert_eq!(config.system_prompt, "Custom prompt");
271 assert_eq!(config.temperature, Some(0.5));
272 assert_eq!(config.max_tokens, Some(1024));
273 }
274
275 #[test]
276 fn test_with_memory() {
277 let memory = MemoryConfig::in_memory().with_max_entries(500);
278 let config = AgentConfig::new("agent", "ollama::llama2").with_memory(memory);
279
280 assert!(config.memory.is_some());
281 assert_eq!(config.memory.as_ref().unwrap().max_entries, Some(500));
282 }
283
284 #[test]
285 fn test_get_effective_memory_config_with_agent_memory() {
286 let mesh_memory = MemoryConfig::in_memory().with_max_entries(1000);
287 let agent_memory = MemoryConfig::in_memory().with_max_entries(500);
288 let config = AgentConfig::new("agent", "ollama::llama2").with_memory(agent_memory);
289
290 let effective = config.get_effective_memory_config(Some(&mesh_memory));
292 assert!(effective.is_some());
293 assert_eq!(effective.unwrap().max_entries, Some(500));
294 }
295
296 #[test]
297 fn test_get_effective_memory_config_fallback_to_mesh() {
298 let mesh_memory = MemoryConfig::in_memory().with_max_entries(1000);
299 let config = AgentConfig::new("agent", "ollama::llama2");
300
301 let effective = config.get_effective_memory_config(Some(&mesh_memory));
303 assert!(effective.is_some());
304 assert_eq!(effective.unwrap().max_entries, Some(1000));
305 }
306
307 #[test]
308 fn test_get_effective_memory_config_no_memory() {
309 let config = AgentConfig::new("agent", "ollama::llama2");
310
311 let effective = config.get_effective_memory_config(None);
313 assert!(effective.is_none());
314 }
315}