ceylon_runtime/config/
agent_config.rs

1//! Agent configuration from TOML files.
2
3use crate::core::error::{Error, Result};
4use serde::{Deserialize, Serialize};
5use std::path::Path;
6
7/// Configuration for a single LLM agent.
8///
9/// This struct can be loaded from a TOML file and used to create an `LlmAgent`.
10///
11/// # Example
12///
13/// ```toml
14/// name = "my_agent"
15/// model = "ollama::gemma3:latest"
16/// system_prompt = "You are a helpful assistant."
17/// temperature = 0.7
18/// max_tokens = 1024
19/// ```
20#[derive(Debug, Clone, Serialize, Deserialize)]
21pub struct AgentConfig {
22    /// Unique name for the agent
23    pub name: String,
24
25    /// Model in "provider::model" format (e.g., "ollama::gemma3:latest")
26    pub model: String,
27
28    /// System prompt for the agent
29    #[serde(default = "default_system_prompt")]
30    pub system_prompt: String,
31
32    /// API key for the LLM provider (optional, can use env vars)
33    #[serde(default)]
34    pub api_key: Option<String>,
35
36    /// Temperature for generation (0.0 - 2.0)
37    #[serde(default)]
38    pub temperature: Option<f32>,
39
40    /// Maximum tokens to generate
41    #[serde(default)]
42    pub max_tokens: Option<u32>,
43
44    /// Base URL override for the LLM provider
45    #[serde(default)]
46    pub base_url: Option<String>,
47
48    /// Top-p (nucleus) sampling parameter
49    #[serde(default)]
50    pub top_p: Option<f32>,
51
52    /// Top-k sampling parameter
53    #[serde(default)]
54    pub top_k: Option<u32>,
55
56    /// Request timeout in seconds
57    #[serde(default)]
58    pub timeout_seconds: Option<u64>,
59
60    /// Enable reasoning mode (for supported providers)
61    #[serde(default)]
62    pub reasoning: Option<bool>,
63
64    /// Reasoning effort level
65    #[serde(default)]
66    pub reasoning_effort: Option<String>,
67}
68
69fn default_system_prompt() -> String {
70    "You are a helpful AI assistant.".to_string()
71}
72
73impl Default for AgentConfig {
74    fn default() -> Self {
75        Self {
76            name: String::new(),
77            model: String::new(),
78            system_prompt: default_system_prompt(),
79            api_key: None,
80            temperature: None,
81            max_tokens: None,
82            base_url: None,
83            top_p: None,
84            top_k: None,
85            timeout_seconds: None,
86            reasoning: None,
87            reasoning_effort: None,
88        }
89    }
90}
91
92impl AgentConfig {
93    /// Create a new AgentConfig with required fields.
94    pub fn new(name: impl Into<String>, model: impl Into<String>) -> Self {
95        Self {
96            name: name.into(),
97            model: model.into(),
98            ..Default::default()
99        }
100    }
101
102    /// Parse an AgentConfig from a TOML string.
103    ///
104    /// # Example
105    ///
106    /// ```rust
107    /// use ceylon_runtime::config::AgentConfig;
108    ///
109    /// let toml = r#"
110    /// name = "my_agent"
111    /// model = "ollama::llama2"
112    /// "#;
113    ///
114    /// let config = AgentConfig::from_toml(toml).unwrap();
115    /// assert_eq!(config.name, "my_agent");
116    /// ```
117    pub fn from_toml(toml_str: &str) -> Result<Self> {
118        toml::from_str(toml_str)
119            .map_err(|e| Error::ConfigError(format!("Failed to parse TOML: {}", e)))
120    }
121
122    /// Load an AgentConfig from a TOML file.
123    ///
124    /// # Example
125    ///
126    /// ```rust,no_run
127    /// use ceylon_runtime::config::AgentConfig;
128    ///
129    /// let config = AgentConfig::from_file("agent.toml").unwrap();
130    /// ```
131    pub fn from_file(path: impl AsRef<Path>) -> Result<Self> {
132        let content = std::fs::read_to_string(path.as_ref()).map_err(|e| {
133            Error::ConfigError(format!(
134                "Failed to read file '{}': {}",
135                path.as_ref().display(),
136                e
137            ))
138        })?;
139        Self::from_toml(&content)
140    }
141
142    /// Set the system prompt.
143    pub fn with_system_prompt(mut self, prompt: impl Into<String>) -> Self {
144        self.system_prompt = prompt.into();
145        self
146    }
147
148    /// Set the API key.
149    pub fn with_api_key(mut self, api_key: impl Into<String>) -> Self {
150        self.api_key = Some(api_key.into());
151        self
152    }
153
154    /// Set the temperature.
155    pub fn with_temperature(mut self, temperature: f32) -> Self {
156        self.temperature = Some(temperature);
157        self
158    }
159
160    /// Set max tokens.
161    pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
162        self.max_tokens = Some(max_tokens);
163        self
164    }
165}
166
167#[cfg(test)]
168mod tests {
169    use super::*;
170
171    #[test]
172    fn test_parse_basic_config() {
173        let toml = r#"
174            name = "test_agent"
175            model = "ollama::llama2"
176        "#;
177
178        let config = AgentConfig::from_toml(toml).unwrap();
179        assert_eq!(config.name, "test_agent");
180        assert_eq!(config.model, "ollama::llama2");
181        assert_eq!(config.system_prompt, "You are a helpful AI assistant.");
182    }
183
184    #[test]
185    fn test_parse_full_config() {
186        let toml = r#"
187            name = "researcher"
188            model = "openai::gpt-4"
189            system_prompt = "You are a research assistant."
190            api_key = "sk-test"
191            temperature = 0.7
192            max_tokens = 2048
193        "#;
194
195        let config = AgentConfig::from_toml(toml).unwrap();
196        assert_eq!(config.name, "researcher");
197        assert_eq!(config.model, "openai::gpt-4");
198        assert_eq!(config.system_prompt, "You are a research assistant.");
199        assert_eq!(config.api_key, Some("sk-test".to_string()));
200        assert_eq!(config.temperature, Some(0.7));
201        assert_eq!(config.max_tokens, Some(2048));
202    }
203
204    #[test]
205    fn test_invalid_toml() {
206        let toml = "this is not valid toml [[[";
207        let result = AgentConfig::from_toml(toml);
208        assert!(result.is_err());
209    }
210
211    #[test]
212    fn test_builder_methods() {
213        let config = AgentConfig::new("agent", "ollama::llama2")
214            .with_system_prompt("Custom prompt")
215            .with_temperature(0.5)
216            .with_max_tokens(1024);
217
218        assert_eq!(config.name, "agent");
219        assert_eq!(config.system_prompt, "Custom prompt");
220        assert_eq!(config.temperature, Some(0.5));
221        assert_eq!(config.max_tokens, Some(1024));
222    }
223}