llm_ollama/
llm_ollama.rs

1//! LLM Ollama Example - Demonstrates using Ollama with gemma3:latest model
2//!
3//! This example shows how to create an LLM agent that uses Ollama
4//! with the gemma3:latest model for local AI inference.
5//!
6//! # Prerequisites
7//!
8//! 1. Install Ollama from https://ollama.ai
9//! 2. Pull the gemma3 model: `ollama pull gemma3:latest`
10//! 3. Ensure Ollama is running (default: http://127.0.0.1:11434)
11//!
12//! # Running
13//!
14//! ```bash
15//! cargo run --example llm_ollama
16//! ```
17
18use ceylon_runtime::core::agent::AgentContext;
19use ceylon_runtime::core::error::Result;
20use ceylon_runtime::llm::LlmAgent;
21
22#[tokio::main]
23async fn main() -> Result<()> {
24    println!("=== Ceylon Runtime - LLM Ollama Example ===\n");
25
26    // Create an LLM agent using Ollama with gemma3:latest model
27    // No API key is required for Ollama (local inference)
28    let mut agent = LlmAgent::builder("gemma_agent", "ollama::gemma3:latest")
29        .with_system_prompt(
30            "You are a helpful AI assistant. Be concise and informative in your responses.",
31        )
32        .with_temperature(0.7)
33        .with_max_tokens(1024)
34        .build()?;
35
36    println!("✓ LLM Agent created successfully with Ollama gemma3:latest\n");
37
38    // Create an agent context for the conversation
39    let mut ctx = AgentContext::new("gemma_demo_mesh".to_string(), None);
40
41    // Example 1: Simple greeting
42    println!("--- Example 1: Simple Greeting ---");
43    let prompt1 = "Hello! What are you capable of?";
44    println!("User: {}", prompt1);
45
46    match agent.send_message_and_get_response(prompt1, &mut ctx).await {
47        Ok(response) => {
48            println!("Assistant: {}\n", response);
49        }
50        Err(e) => {
51            eprintln!("Error: {}\n", e);
52            eprintln!("Make sure Ollama is running and gemma3:latest model is available.");
53            eprintln!("Pull the model with: ollama pull gemma3:latest");
54            return Err(e);
55        }
56    }
57
58    // Example 2: Technical question
59    println!("--- Example 2: Technical Question ---");
60    let prompt2 = "Explain what an AI agent is in 2-3 sentences.";
61    println!("User: {}", prompt2);
62
63    match agent.send_message_and_get_response(prompt2, &mut ctx).await {
64        Ok(response) => {
65            println!("Assistant: {}\n", response);
66        }
67        Err(e) => {
68            eprintln!("Error: {}\n", e);
69            return Err(e);
70        }
71    }
72
73    // Example 3: Creative task
74    println!("--- Example 3: Creative Task ---");
75    let prompt3 = "Write a haiku about programming.";
76    println!("User: {}", prompt3);
77
78    match agent.send_message_and_get_response(prompt3, &mut ctx).await {
79        Ok(response) => {
80            println!("Assistant: {}\n", response);
81        }
82        Err(e) => {
83            eprintln!("Error: {}\n", e);
84            return Err(e);
85        }
86    }
87
88    println!("=== Example completed successfully! ===");
89    Ok(())
90}