01_basic_agent/
01_basic_agent.rs

1//! Basic Agent Example
2//!
3//! This example demonstrates how to create and use a simple agent
4//! that can answer questions and process tasks.
5//!
6//! Run with: cargo run --example 01_basic_agent --manifest-path ceylon/Cargo.toml
7
8use ceylon_next::agent::{Agent, AgentConfig};
9use ceylon_next::tasks::{OutputData, TaskRequest};
10
11#[tokio::main]
12async fn main() {
13    println!("🤖 Ceylon Agent - Basic Example\n");
14
15    // Step 1: Create a new agent with a name and model
16    // The model should be one supported by the LLM library (openai, anthropic, ollama, etc.)
17    let mut agent = Agent::new("Assistant", "ollama::gemma3:latest");
18
19    // Step 2: (Optional) Configure the agent with custom settings
20    let config = AgentConfig::new(
21        3,   // max_retries
22        60,  // timeout in seconds
23    );
24    agent.with_config(config);
25
26    // Step 3: (Optional) Customize the system prompt
27    agent.with_system_prompt(
28        "You are a helpful assistant that answers questions accurately and concisely. \
29         Always provide well-structured responses."
30    );
31
32    // Step 4: Create a task request
33    let mut task = TaskRequest::new("What is the capital of France?");
34    task.with_name("Geography Question")
35        .with_description("A simple geography question")
36        .with_priority(5);
37
38    // Step 5: Run the agent with the task
39    println!("📋 Task: {}\n", "What is the capital of France?");
40    let response = agent.run(task).await;
41
42    // Step 6: Handle the response
43    match response.result() {
44        OutputData::Text(answer) => {
45            println!("📝 Agent Response:\n{}\n", answer);
46        }
47        _ => {
48            println!("❌ Unexpected response type");
49        }
50    }
51
52    println!("✅ Example completed successfully!");
53}