llm_multi_agent_mesh/
llm_multi_agent_mesh.rs

1//! LLM Multi-Agent Mesh Example - Two LLM agents communicating via LocalMesh
2//!
3//! This example demonstrates how to create two LLM agents that communicate
4//! with each other through Ceylon's LocalMesh infrastructure.
5//!
6//! # Architecture
7//!
8//! - **Researcher Agent**: Uses Ollama gemma3:latest to research topics
9//! - **Summarizer Agent**: Uses Ollama gemma3:latest to summarize research
10//!
11//! The Researcher sends its findings to the Summarizer, which then creates
12//! a concise summary.
13//!
14//! # Prerequisites
15//!
16//! 1. Install Ollama from https://ollama.ai
17//! 2. Pull the gemma3 model: `ollama pull gemma3:latest`
18//! 3. Ensure Ollama is running (default: http://127.0.0.1:11434)
19//!
20//! # Running
21//!
22//! ```bash
23//! cargo run --example llm_multi_agent_mesh
24//! ```
25
26use async_trait::async_trait;
27use ceylon_runtime::core::agent::{Agent, AgentContext};
28use ceylon_runtime::core::error::Result;
29use ceylon_runtime::core::mesh::Mesh;
30use ceylon_runtime::core::message::Message;
31use ceylon_runtime::llm::LlmAgent;
32use ceylon_runtime::local::LocalMesh;
33use std::sync::Arc;
34use std::time::Duration;
35use tokio::sync::Notify;
36
37// --- Researcher Agent Wrapper ---
38// Wraps an LlmAgent to add mesh communication capabilities
39struct ResearcherAgent {
40    llm_agent: LlmAgent,
41    mesh: Arc<LocalMesh>,
42}
43
44impl ResearcherAgent {
45    fn new(mesh: Arc<LocalMesh>) -> Result<Self> {
46        let llm_agent = LlmAgent::builder("researcher", "ollama::gemma3:latest")
47            .with_system_prompt(
48                "You are a research assistant. When given a topic, provide detailed \
49                 information and key facts about it. Be thorough but focused. \
50                 Limit your response to 3-4 paragraphs.",
51            )
52            .with_temperature(0.7)
53            .with_max_tokens(1024)
54            .build()?;
55
56        Ok(Self { llm_agent, mesh })
57    }
58}
59
60#[async_trait]
61impl Agent for ResearcherAgent {
62    fn name(&self) -> String {
63        "researcher".to_string()
64    }
65
66    async fn on_start(&mut self, _ctx: &mut AgentContext) -> Result<()> {
67        println!("[Researcher] Started and ready for research tasks.");
68        Ok(())
69    }
70
71    async fn on_message(&mut self, msg: Message, ctx: &mut AgentContext) -> Result<()> {
72        let topic = String::from_utf8_lossy(&msg.payload);
73        println!("\n[Researcher] Received research request: {}", topic);
74
75        // Use LLM to research the topic
76        let research_prompt = format!(
77            "Research the following topic and provide key information: {}",
78            topic
79        );
80
81        match self
82            .llm_agent
83            .send_message_and_get_response(&research_prompt, ctx)
84            .await
85        {
86            Ok(research_result) => {
87                println!("[Researcher] Research complete. Sending to summarizer...");
88
89                // Send research results to the summarizer
90                let research_msg =
91                    Message::new("research_result", research_result.into_bytes(), self.name());
92                self.mesh.send(research_msg, "summarizer").await?;
93            }
94            Err(e) => {
95                eprintln!("[Researcher] Error during research: {}", e);
96                // Send error message to summarizer
97                let error_msg = Message::new(
98                    "error",
99                    format!("Research failed: {}", e).into_bytes(),
100                    self.name(),
101                );
102                self.mesh.send(error_msg, "summarizer").await?;
103            }
104        }
105
106        Ok(())
107    }
108}
109
110// --- Summarizer Agent Wrapper ---
111// Wraps an LlmAgent to summarize research findings
112struct SummarizerAgent {
113    llm_agent: LlmAgent,
114    completion_notify: Arc<Notify>,
115}
116
117impl SummarizerAgent {
118    fn new(completion_notify: Arc<Notify>) -> Result<Self> {
119        let llm_agent = LlmAgent::builder("summarizer", "ollama::gemma3:latest")
120            .with_system_prompt(
121                "You are a summarization expert. When given research content, \
122                 create a clear and concise summary with bullet points highlighting \
123                 the most important facts. Keep the summary to 5-7 bullet points.",
124            )
125            .with_temperature(0.5)
126            .with_max_tokens(512)
127            .build()?;
128
129        Ok(Self {
130            llm_agent,
131            completion_notify,
132        })
133    }
134}
135
136#[async_trait]
137impl Agent for SummarizerAgent {
138    fn name(&self) -> String {
139        "summarizer".to_string()
140    }
141
142    async fn on_start(&mut self, _ctx: &mut AgentContext) -> Result<()> {
143        println!("[Summarizer] Started and ready to summarize.");
144        Ok(())
145    }
146
147    async fn on_message(&mut self, msg: Message, ctx: &mut AgentContext) -> Result<()> {
148        let content = String::from_utf8_lossy(&msg.payload);
149        println!("\n[Summarizer] Received content from {}", msg.sender);
150
151        if msg.topic == "error" {
152            println!("[Summarizer] Received error: {}", content);
153            self.completion_notify.notify_one();
154            return Ok(());
155        }
156
157        // Use LLM to summarize the research
158        let summary_prompt = format!(
159            "Please summarize the following research content into clear bullet points:\n\n{}",
160            content
161        );
162
163        match self
164            .llm_agent
165            .send_message_and_get_response(&summary_prompt, ctx)
166            .await
167        {
168            Ok(summary) => {
169                println!("\n========================================");
170                println!("           FINAL SUMMARY");
171                println!("========================================\n");
172                println!("{}", summary);
173                println!("\n========================================\n");
174            }
175            Err(e) => {
176                eprintln!("[Summarizer] Error during summarization: {}", e);
177            }
178        }
179
180        // Signal completion
181        self.completion_notify.notify_one();
182        Ok(())
183    }
184}
185
186// --- Coordinator Agent ---
187// Initiates the workflow by sending a research topic
188struct CoordinatorAgent {
189    mesh: Arc<LocalMesh>,
190    topic: String,
191}
192
193#[async_trait]
194impl Agent for CoordinatorAgent {
195    fn name(&self) -> String {
196        "coordinator".to_string()
197    }
198
199    async fn on_start(&mut self, _ctx: &mut AgentContext) -> Result<()> {
200        println!("[Coordinator] Starting multi-agent workflow...");
201        println!("[Coordinator] Topic: {}\n", self.topic);
202
203        // Small delay to ensure all agents are ready
204        tokio::time::sleep(Duration::from_millis(100)).await;
205
206        // Send topic to researcher
207        let task_msg = Message::new(
208            "research_topic",
209            self.topic.as_bytes().to_vec(),
210            self.name(),
211        );
212        self.mesh.send(task_msg, "researcher").await?;
213
214        Ok(())
215    }
216}
217
218#[tokio::main]
219async fn main() -> Result<()> {
220    println!("=== Ceylon Runtime - LLM Multi-Agent Mesh Example ===\n");
221    println!("This example demonstrates two LLM agents communicating via LocalMesh:");
222    println!("  1. Researcher Agent - Researches a given topic");
223    println!("  2. Summarizer Agent - Summarizes the research findings\n");
224
225    // 1. Create the Mesh
226    let mesh = Arc::new(LocalMesh::new("llm_mesh"));
227
228    // 2. Create completion notifier
229    let completion = Arc::new(Notify::new());
230
231    // 3. Create LLM-powered agents
232    let researcher = match ResearcherAgent::new(mesh.clone()) {
233        Ok(agent) => agent,
234        Err(e) => {
235            eprintln!("Failed to create Researcher agent: {}", e);
236            eprintln!("Make sure Ollama is running and gemma3:latest is available.");
237            return Err(e);
238        }
239    };
240
241    let summarizer = match SummarizerAgent::new(completion.clone()) {
242        Ok(agent) => agent,
243        Err(e) => {
244            eprintln!("Failed to create Summarizer agent: {}", e);
245            eprintln!("Make sure Ollama is running and gemma3:latest is available.");
246            return Err(e);
247        }
248    };
249
250    let coordinator = CoordinatorAgent {
251        mesh: mesh.clone(),
252        topic: "The benefits of Rust programming language for system development".to_string(),
253    };
254
255    // 4. Register agents with the mesh
256    mesh.add_agent(Box::new(researcher)).await?;
257    mesh.add_agent(Box::new(summarizer)).await?;
258    mesh.add_agent(Box::new(coordinator)).await?;
259
260    println!("✓ All agents registered successfully\n");
261
262    // 5. Start the mesh (this triggers on_start for all agents)
263    mesh.start().await?;
264
265    // 6. Wait for the workflow to complete
266    println!("Waiting for multi-agent workflow to complete...\n");
267    tokio::select! {
268        _ = completion.notified() => {
269            println!("Multi-agent workflow completed successfully!");
270        }
271        _ = tokio::time::sleep(Duration::from_secs(120)) => {
272            println!("Timeout waiting for workflow completion.");
273        }
274    }
275
276    // 7. Stop the mesh
277    mesh.stop().await?;
278
279    println!("\n=== Example finished ===");
280    Ok(())
281}