1use crate::client::{LLMClient, LLMConfig, LLMResponse, UniversalLLMClient};
2use crate::react::{ReActConfig, ReActEngine, ReActResult};
3use crate::types::{Message as LlmMessage, ToolSpec};
4use async_trait::async_trait;
5use ceylon_core::action::{ActionInvoker, ActionMetadata, ToolInvoker};
6use ceylon_core::agent::{Agent, AgentContext};
7use ceylon_core::error::Result;
8use ceylon_core::memory::{Memory, MemoryEntry, MemoryQuery};
9use ceylon_core::message::Message as CeylonMessage;
10use serde_json::{json, Value};
11use std::sync::Arc;
12
13pub struct LlmAgent {
42 name: String,
43 llm_client: UniversalLLMClient,
44 llm_config: LLMConfig,
45 system_prompt: String,
46 conversation_history: Vec<LlmMessage>,
47 tool_invoker: ToolInvoker,
48 memory: Option<Arc<dyn Memory>>,
49 react_config: Option<ReActConfig>,
50}
51
52impl LlmAgent {
53 pub fn builder(name: impl Into<String>, model: impl Into<String>) -> LlmAgentBuilder {
70 LlmAgentBuilder::new(name, model)
71 }
72
73 pub fn new_with_config(
77 name: impl Into<String>,
78 config: LLMConfig,
79 system_prompt: impl Into<String>,
80 memory: Option<Arc<dyn Memory>>,
81 ) -> Result<Self> {
82 let client = UniversalLLMClient::new_with_config(config.clone())
83 .map_err(|e| ceylon_core::error::Error::MeshError(e))?;
84
85 let mut agent = Self {
86 name: name.into(),
87 llm_client: client,
88 llm_config: config,
89 system_prompt: system_prompt.into(),
90 conversation_history: Vec::new(),
91 tool_invoker: ToolInvoker::default(),
92 memory,
93 react_config: None,
94 };
95
96 if agent.memory.is_some() {
98 agent.register_memory_tools();
99 }
100
101 Ok(agent)
102 }
103
104 fn register_memory_tools(&mut self) {
106 if let Some(memory) = &self.memory {
107 self.tool_invoker
109 .register(Box::new(SaveMemoryAction::new(memory.clone())));
110
111 self.tool_invoker
113 .register(Box::new(SearchMemoryAction::new(memory.clone())));
114 }
115 }
116
117 fn action_to_tool_spec(action: &ActionMetadata) -> ToolSpec {
119 ToolSpec {
120 name: action.name.clone(),
121 description: action.description.clone(),
122 input_schema: action.input_schema.clone(),
123 }
124 }
125
126 pub fn with_react(&mut self, config: ReActConfig) {
128 self.react_config = Some(config);
129 }
130
131 pub async fn send_message_react(
135 &mut self,
136 message: impl Into<String>,
137 ctx: &mut AgentContext,
138 ) -> Result<ReActResult> {
139 let content = message.into();
140
141 let react_config = self.react_config.clone().ok_or_else(|| {
143 ceylon_core::error::Error::MeshError(
144 "ReAct mode not enabled. Call with_react() first".to_string(),
145 )
146 })?;
147
148 let engine = ReActEngine::new(react_config, None);
151
152 let result = engine
154 .execute(
155 content,
156 &self.llm_client,
157 &self.llm_config,
158 self.memory.as_ref(),
159 ctx,
160 )
161 .await?;
162
163 Ok(result)
164 }
165
166 pub async fn send_message_and_get_response(
170 &mut self,
171 message: impl Into<String>,
172 ctx: &mut AgentContext,
173 ) -> Result<String> {
174 let content = message.into();
175
176 self.conversation_history.push(LlmMessage {
178 role: "user".to_string(),
179 content,
180 });
181
182 self.process_with_llm(ctx).await
184 }
185
186 pub fn last_response(&self) -> Option<String> {
188 self.conversation_history
189 .iter()
190 .rev()
191 .find(|m| m.role == "assistant")
192 .map(|m| m.content.clone())
193 }
194
195 async fn process_with_llm(&mut self, ctx: &mut AgentContext) -> Result<String> {
197 if self.conversation_history.len() == 1 {
199 self.conversation_history.insert(
200 0,
201 LlmMessage {
202 role: "system".to_string(),
203 content: self.system_prompt.clone(),
204 },
205 );
206 }
207
208 let actions = self.tool_invoker.list_actions();
210 let tools: Vec<ToolSpec> = actions.iter().map(Self::action_to_tool_spec).collect();
211
212 let response: LLMResponse<String> = self
214 .llm_client
215 .complete::<LLMResponse<String>, String>(&self.conversation_history, &tools)
216 .await
217 .map_err(|e| ceylon_core::error::Error::MeshError(e))?;
218
219 if !response.is_complete && !response.tool_calls.is_empty() {
221 let mut tool_results = Vec::new();
222
223 for tool_call in response.tool_calls {
224 let result = self
225 .tool_invoker
226 .invoke(&tool_call.name, ctx, tool_call.input)
227 .await?;
228
229 tool_results.push(format!("Tool {}: {}", tool_call.name, result));
230 }
231
232 let tool_result_message = LlmMessage {
234 role: "tool".to_string(),
235 content: tool_results.join("\n"),
236 };
237 self.conversation_history.push(tool_result_message);
238
239 return Box::pin(self.process_with_llm(ctx)).await;
241 }
242
243 self.conversation_history.push(LlmMessage {
245 role: "assistant".to_string(),
246 content: response.content.clone(),
247 });
248
249 Ok(response.content)
250 }
251}
252
253#[async_trait]
254impl Agent for LlmAgent {
255 fn name(&self) -> String {
256 self.name.clone()
257 }
258
259 async fn on_message(&mut self, msg: CeylonMessage, ctx: &mut AgentContext) -> Result<()> {
260 let content = String::from_utf8(msg.payload.clone()).map_err(|e| {
262 ceylon_core::error::Error::MeshError(format!("Invalid UTF-8 in message payload: {}", e))
263 })?;
264
265 self.conversation_history.push(LlmMessage {
267 role: "user".to_string(),
268 content,
269 });
270
271 let _response = self.process_with_llm(ctx).await?;
273 Ok(())
274 }
275
276 async fn on_generic_message(
277 &mut self,
278 msg: ceylon_core::message::GenericMessage,
279 ctx: &mut AgentContext,
280 ) -> Result<ceylon_core::message::GenericResponse> {
281 let response_text = self.send_message_and_get_response(msg.content, ctx).await?;
283 Ok(ceylon_core::message::GenericResponse::new(response_text))
284 }
285
286 fn tool_invoker(&self) -> Option<&ToolInvoker> {
287 Some(&self.tool_invoker)
288 }
289
290 fn tool_invoker_mut(&mut self) -> Option<&mut ToolInvoker> {
291 Some(&mut self.tool_invoker)
292 }
293}
294
295pub struct LlmAgentBuilder {
297 name: String,
298 model: String,
299 api_key: Option<String>,
300 system_prompt: String,
301 temperature: Option<f32>,
302 max_tokens: Option<u32>,
303 memory: Option<Arc<dyn Memory>>,
304}
305
306impl LlmAgentBuilder {
307 pub fn new(name: impl Into<String>, model: impl Into<String>) -> Self {
309 Self {
310 name: name.into(),
311 model: model.into(),
312 api_key: None,
313 system_prompt: "You are a helpful AI assistant.".to_string(),
314 temperature: None,
315 max_tokens: None,
316 memory: None,
317 }
318 }
319
320 pub fn with_api_key(mut self, api_key: impl Into<String>) -> Self {
322 self.api_key = Some(api_key.into());
323 self
324 }
325
326 pub fn with_system_prompt(mut self, prompt: impl Into<String>) -> Self {
328 self.system_prompt = prompt.into();
329 self
330 }
331
332 pub fn with_temperature(mut self, temperature: f32) -> Self {
334 self.temperature = Some(temperature);
335 self
336 }
337
338 pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
340 self.max_tokens = Some(max_tokens);
341 self
342 }
343
344 pub fn with_memory(mut self, memory: Arc<dyn Memory>) -> Self {
346 self.memory = Some(memory);
347 self
348 }
349
350 pub fn build(self) -> Result<LlmAgent> {
352 let mut config = LLMConfig::new(self.model);
354
355 if let Some(api_key) = self.api_key {
356 config = config.with_api_key(api_key);
357 }
358
359 if let Some(temperature) = self.temperature {
360 config = config.with_temperature(temperature);
361 }
362
363 if let Some(max_tokens) = self.max_tokens {
364 config = config.with_max_tokens(max_tokens);
365 }
366
367 LlmAgent::new_with_config(self.name, config, self.system_prompt, self.memory)
368 }
369}
370
371struct SaveMemoryAction {
374 memory: Arc<dyn Memory>,
375 metadata: ActionMetadata,
376}
377
378impl SaveMemoryAction {
379 fn new(memory: Arc<dyn Memory>) -> Self {
380 Self {
381 memory,
382 metadata: ActionMetadata {
383 name: "save_memory".to_string(),
384 description: "Save information to memory for later retrieval.".to_string(),
385 input_schema: json!({
386 "type": "object",
387 "properties": {
388 "content": {
389 "type": "string",
390 "description": "The information to save."
391 }
392 },
393 "required": ["content"]
394 }),
395 output_schema: Some(json!({
396 "type": "object",
397 "properties": {
398 "status": { "type": "string" },
399 "id": { "type": "string" }
400 }
401 })),
402 },
403 }
404 }
405}
406
407#[async_trait]
408impl ActionInvoker for SaveMemoryAction {
409 async fn execute(&self, _ctx: &mut AgentContext, inputs: Value) -> Result<Value> {
410 let content = inputs
411 .get("content")
412 .and_then(|v| v.as_str())
413 .ok_or_else(|| {
414 ceylon_core::error::Error::ActionExecutionError(
415 "Missing 'content' in inputs".to_string(),
416 )
417 })?;
418
419 let entry = MemoryEntry::new(content);
420 let id = self.memory.store(entry).await?;
421
422 Ok(json!({ "status": "success", "id": id }))
423 }
424
425 fn metadata(&self) -> &ActionMetadata {
426 &self.metadata
427 }
428}
429
430struct SearchMemoryAction {
431 memory: Arc<dyn Memory>,
432 metadata: ActionMetadata,
433}
434
435impl SearchMemoryAction {
436 fn new(memory: Arc<dyn Memory>) -> Self {
437 Self {
438 memory,
439 metadata: ActionMetadata {
440 name: "search_memory".to_string(),
441 description: "Search memory for relevant information.".to_string(),
442 input_schema: json!({
443 "type": "object",
444 "properties": {
445 "query": {
446 "type": "string",
447 "description": "The query to search for."
448 },
449 "limit": {
450 "type": "integer",
451 "description": "Max number of results (default 5)."
452 }
453 },
454 "required": ["query"]
455 }),
456 output_schema: Some(json!({
457 "type": "object",
458 "properties": {
459 "results": {
460 "type": "array",
461 "items": { "type": "string" }
462 }
463 }
464 })),
465 },
466 }
467 }
468}
469
470#[async_trait]
471impl ActionInvoker for SearchMemoryAction {
472 async fn execute(&self, _ctx: &mut AgentContext, inputs: Value) -> Result<Value> {
473 let query_str = inputs
474 .get("query")
475 .and_then(|v| v.as_str())
476 .ok_or_else(|| {
477 ceylon_core::error::Error::ActionExecutionError(
478 "Missing 'query' in inputs".to_string(),
479 )
480 })?;
481
482 let limit = inputs
483 .get("limit")
484 .and_then(|v| v.as_u64())
485 .map(|v| v as usize)
486 .unwrap_or(5);
487
488 let mut query = MemoryQuery::new().with_limit(limit);
489 query.semantic_query = Some(query_str.to_string());
490
491 let results = self.memory.search(query).await?;
492
493 let result_strings: Vec<String> = results.into_iter().map(|e| e.content).collect();
494
495 Ok(json!({ "results": result_strings }))
496 }
497
498 fn metadata(&self) -> &ActionMetadata {
499 &self.metadata
500 }
501}
502
503#[cfg(test)]
504mod tests {
505 use super::*;
506
507 #[tokio::test]
512 async fn test_llm_agent_builder() {
513 let builder = LlmAgent::builder("test", "ollama::llama2")
516 .with_system_prompt("Custom prompt")
517 .with_temperature(0.7)
518 .with_max_tokens(1000);
519
520 assert_eq!(builder.name, "test");
522 assert_eq!(builder.model, "ollama::llama2");
523 assert_eq!(builder.system_prompt, "Custom prompt");
524 assert_eq!(builder.temperature, Some(0.7));
525 assert_eq!(builder.max_tokens, Some(1000));
526 }
527}