ceylon_runtime/config/
agent_config.rs1use crate::core::error::{Error, Result};
4use serde::{Deserialize, Serialize};
5use std::path::Path;
6
7#[derive(Debug, Clone, Serialize, Deserialize)]
21pub struct AgentConfig {
22 pub name: String,
24
25 pub model: String,
27
28 #[serde(default = "default_system_prompt")]
30 pub system_prompt: String,
31
32 #[serde(default)]
34 pub api_key: Option<String>,
35
36 #[serde(default)]
38 pub temperature: Option<f32>,
39
40 #[serde(default)]
42 pub max_tokens: Option<u32>,
43
44 #[serde(default)]
46 pub base_url: Option<String>,
47
48 #[serde(default)]
50 pub top_p: Option<f32>,
51
52 #[serde(default)]
54 pub top_k: Option<u32>,
55
56 #[serde(default)]
58 pub timeout_seconds: Option<u64>,
59
60 #[serde(default)]
62 pub reasoning: Option<bool>,
63
64 #[serde(default)]
66 pub reasoning_effort: Option<String>,
67}
68
69fn default_system_prompt() -> String {
70 "You are a helpful AI assistant.".to_string()
71}
72
73impl Default for AgentConfig {
74 fn default() -> Self {
75 Self {
76 name: String::new(),
77 model: String::new(),
78 system_prompt: default_system_prompt(),
79 api_key: None,
80 temperature: None,
81 max_tokens: None,
82 base_url: None,
83 top_p: None,
84 top_k: None,
85 timeout_seconds: None,
86 reasoning: None,
87 reasoning_effort: None,
88 }
89 }
90}
91
92impl AgentConfig {
93 pub fn new(name: impl Into<String>, model: impl Into<String>) -> Self {
95 Self {
96 name: name.into(),
97 model: model.into(),
98 ..Default::default()
99 }
100 }
101
102 pub fn from_toml(toml_str: &str) -> Result<Self> {
118 toml::from_str(toml_str)
119 .map_err(|e| Error::ConfigError(format!("Failed to parse TOML: {}", e)))
120 }
121
122 pub fn from_file(path: impl AsRef<Path>) -> Result<Self> {
132 let content = std::fs::read_to_string(path.as_ref()).map_err(|e| {
133 Error::ConfigError(format!(
134 "Failed to read file '{}': {}",
135 path.as_ref().display(),
136 e
137 ))
138 })?;
139 Self::from_toml(&content)
140 }
141
142 pub fn with_system_prompt(mut self, prompt: impl Into<String>) -> Self {
144 self.system_prompt = prompt.into();
145 self
146 }
147
148 pub fn with_api_key(mut self, api_key: impl Into<String>) -> Self {
150 self.api_key = Some(api_key.into());
151 self
152 }
153
154 pub fn with_temperature(mut self, temperature: f32) -> Self {
156 self.temperature = Some(temperature);
157 self
158 }
159
160 pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
162 self.max_tokens = Some(max_tokens);
163 self
164 }
165}
166
167#[cfg(test)]
168mod tests {
169 use super::*;
170
171 #[test]
172 fn test_parse_basic_config() {
173 let toml = r#"
174 name = "test_agent"
175 model = "ollama::llama2"
176 "#;
177
178 let config = AgentConfig::from_toml(toml).unwrap();
179 assert_eq!(config.name, "test_agent");
180 assert_eq!(config.model, "ollama::llama2");
181 assert_eq!(config.system_prompt, "You are a helpful AI assistant.");
182 }
183
184 #[test]
185 fn test_parse_full_config() {
186 let toml = r#"
187 name = "researcher"
188 model = "openai::gpt-4"
189 system_prompt = "You are a research assistant."
190 api_key = "sk-test"
191 temperature = 0.7
192 max_tokens = 2048
193 "#;
194
195 let config = AgentConfig::from_toml(toml).unwrap();
196 assert_eq!(config.name, "researcher");
197 assert_eq!(config.model, "openai::gpt-4");
198 assert_eq!(config.system_prompt, "You are a research assistant.");
199 assert_eq!(config.api_key, Some("sk-test".to_string()));
200 assert_eq!(config.temperature, Some(0.7));
201 assert_eq!(config.max_tokens, Some(2048));
202 }
203
204 #[test]
205 fn test_invalid_toml() {
206 let toml = "this is not valid toml [[[";
207 let result = AgentConfig::from_toml(toml);
208 assert!(result.is_err());
209 }
210
211 #[test]
212 fn test_builder_methods() {
213 let config = AgentConfig::new("agent", "ollama::llama2")
214 .with_system_prompt("Custom prompt")
215 .with_temperature(0.5)
216 .with_max_tokens(1024);
217
218 assert_eq!(config.name, "agent");
219 assert_eq!(config.system_prompt, "Custom prompt");
220 assert_eq!(config.temperature, Some(0.5));
221 assert_eq!(config.max_tokens, Some(1024));
222 }
223}