08_llm_providers/
08_llm_providers.rs1use ceylon_next::agent::Agent;
21use ceylon_next::llm::LLMConfig;
22use ceylon_next::tasks::TaskRequest;
23
24#[tokio::main]
25async fn main() -> Result<(), Box<dyn std::error::Error>> {
26 println!("š Ceylon LLM Provider Examples\n");
27
28 println!("{}", "=".repeat(60));
30 println!("Example 0: API Key Handling");
31 println!("{}", "=".repeat(60));
32
33 api_key_handling().await?;
34
35 println!("\n{}", "=".repeat(60));
37 println!("Example 1: Basic Provider Usage");
38 println!("{}", "=".repeat(60));
39
40 basic_provider_usage().await?;
41
42 println!("\n{}", "=".repeat(60));
44 println!("Example 2: Advanced Configuration with LLMConfig");
45 println!("{}", "=".repeat(60));
46
47 advanced_configuration().await?;
48
49 println!("\n{}", "=".repeat(60));
51 println!("Example 3: Provider-Specific Features");
52 println!("{}", "=".repeat(60));
53
54 provider_specific_features().await?;
55
56 println!("\n{}", "=".repeat(60));
58 println!("Example 4: Comparing Providers");
59 println!("{}", "=".repeat(60));
60
61 compare_providers().await?;
62
63 Ok(())
64}
65
66async fn api_key_handling() -> Result<(), Box<dyn std::error::Error>> {
68 println!("\n⨠API keys are automatically loaded from environment variables:\n");
69
70 println!("š Supported Environment Variables:");
71 println!(" ⢠OPENAI_API_KEY - For OpenAI/GPT providers");
72 println!(" ⢠ANTHROPIC_API_KEY - For Anthropic/Claude providers");
73 println!(" ⢠GROQ_API_KEY - For Groq provider");
74 println!(" ⢠GOOGLE_API_KEY - For Google/Gemini providers");
75 println!(" ⢠DEEPSEEK_API_KEY - For DeepSeek provider");
76 println!(" ⢠XAI_API_KEY - For xAI provider");
77 println!(" ⢠AZURE_OPENAI_API_KEY - For Azure OpenAI provider");
78 println!(" ⢠MISTRAL_API_KEY - For Mistral provider");
79 println!(" ⢠COHERE_API_KEY - For Cohere provider");
80 println!(" ⢠OPENROUTER_API_KEY - For OpenRouter provider");
81 println!(" ⢠ELEVENLABS_API_KEY - For ElevenLabs provider");
82 println!(" ⢠PHIND_API_KEY - For Phind provider");
83 println!(" ⢠OLLAMA_URL - For Ollama base URL (optional)\n");
84
85 println!("š Four ways to provide API keys:\n");
86
87 println!("1. Environment Variable with Agent::new() (Simple):");
88 println!(" let agent = Agent::new(\"Assistant\", \"openai::gpt-4\");");
89 println!(" // Automatically uses OPENAI_API_KEY from environment\n");
90
91 println!("2. Explicit API Key with Agent::new_with_config() (Recommended):");
92 println!(" let config = LLMConfig::new(\"openai::gpt-4\")");
93 println!(" .with_api_key(\"your-api-key\")");
94 println!(" .with_temperature(0.7);");
95 println!(" let agent = Agent::new_with_config(\"Assistant\", config)?;\n");
96
97 println!("3. Explicit API Key with Agent::with_llm_config() (Alternative):");
98 println!(" let config = LLMConfig::new(\"openai::gpt-4\")");
99 println!(" .with_api_key(\"your-api-key\");");
100 println!(" let mut agent = Agent::new(\"Assistant\", \"openai::gpt-4\");");
101 println!(" agent.with_llm_config(config)?;\n");
102
103 println!("4. No API Key (for local providers like Ollama):");
104 println!(" let agent = Agent::new(\"Assistant\", \"ollama::llama3.2\");");
105 println!(" // No API key required!\n");
106
107 println!("ā ļø Error Handling:");
108 println!(" If you try to use a provider without setting its API key,");
109 println!(" you'll get a helpful error message:\n");
110
111 println!(" Example error message format:");
113 if std::env::var("OPENAI_API_KEY").is_err() {
114 println!(" ā 'API key required for provider 'openai'.");
115 println!(" Please set the OPENAI_API_KEY environment variable");
116 println!(" or pass the API key explicitly.'\n");
117 } else {
118 println!(" ā
OPENAI_API_KEY is set in environment\n");
119 }
120
121 Ok(())
122}
123
124async fn basic_provider_usage() -> Result<(), Box<dyn std::error::Error>> {
126 println!("\n⨠Using different providers with simple syntax:\n");
127
128 println!("š OpenAI GPT-4:");
130 let mut openai_agent = Agent::new("OpenAI Assistant", "openai::gpt-4");
131 let task1 = TaskRequest::new("What is 2 + 2? Answer briefly.");
132 let response = openai_agent.run(task1).await;
133 println!(" Response: {:?}\n", response.result());
134
135 println!("š Anthropic Claude:");
137 let mut anthropic_agent = Agent::new("Claude Assistant", "anthropic::claude-3-sonnet-20240229");
138 let task2 = TaskRequest::new("What is 2 + 2? Answer briefly.");
139 let response = anthropic_agent.run(task2).await;
140 println!(" Response: {:?}\n", response.result());
141
142 println!("š Ollama (Local):");
144 let mut ollama_agent = Agent::new("Ollama Assistant", "ollama::llama3.2");
145 let task3 = TaskRequest::new("What is 2 + 2? Answer briefly.");
146 let response = ollama_agent.run(task3).await;
147 println!(" Response: {:?}\n", response.result());
148
149 println!("š Google Gemini:");
151 let mut gemini_agent = Agent::new("Gemini Assistant", "google::gemini-pro");
152 let task4 = TaskRequest::new("What is 2 + 2? Answer briefly.");
153 let response = gemini_agent.run(task4).await;
154 println!(" Response: {:?}\n", response.result());
155
156 println!("š Groq:");
158 let mut groq_agent = Agent::new("Groq Assistant", "groq::mixtral-8x7b-32768");
159 let task5 = TaskRequest::new("What is 2 + 2? Answer briefly.");
160 let response = groq_agent.run(task5).await;
161 println!(" Response: {:?}\n", response.result());
162
163 Ok(())
164}
165
166async fn advanced_configuration() -> Result<(), Box<dyn std::error::Error>> {
168 println!("\n⨠Using LLMConfig for advanced configuration:\n");
169
170 println!("š Using Agent::new_with_config() with explicit API key:");
172 if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
173 let config = LLMConfig::new("openai::gpt-4")
174 .with_api_key(api_key)
175 .with_temperature(0.7)
176 .with_max_tokens(150)
177 .with_top_p(0.9);
178
179 if let Ok(mut agent) = Agent::new_with_config("Configured Assistant", config) {
180 let task1 = TaskRequest::new("Explain quantum computing in one sentence.");
181 let response = agent.run(task1).await;
182 println!(" Response: {:?}\n", response.result());
183 }
184 } else {
185 println!(" Skipped (OPENAI_API_KEY not set)\n");
186 }
187
188 println!("š Using Agent::new() then with_llm_config():");
190 let mut agent = Agent::new("Configured Assistant", "openai::gpt-4");
191
192 let config = LLMConfig::new("openai::gpt-4")
193 .with_temperature(0.8)
194 .with_max_tokens(200);
195
196 if let Ok(_) = agent.with_llm_config(config) {
197 let task2 = TaskRequest::new("What is machine learning in one sentence?");
198 let response = agent.run(task2).await;
199 println!(" Response: {:?}\n", response.result());
200 }
201
202 println!("š Anthropic with extended thinking:");
204 let mut claude_agent = Agent::new("Thinking Claude", "anthropic::claude-3-opus-20240229");
205
206 let claude_config = LLMConfig::new("anthropic::claude-3-opus-20240229")
207 .with_api_key(std::env::var("ANTHROPIC_API_KEY").unwrap_or_default())
208 .with_temperature(0.5)
209 .with_max_tokens(200)
210 .with_reasoning(true);
211
212 if let Ok(_) = claude_agent.with_llm_config(claude_config) {
213 let task2 = TaskRequest::new("Explain quantum computing in one sentence.");
214 let response = claude_agent.run(task2).await;
215 println!(" Response: {:?}\n", response.result());
216 }
217
218 println!("š Ollama with custom system prompt:");
220 let mut ollama_agent = Agent::new("Custom Ollama", "ollama::llama3.2");
221
222 let ollama_config = LLMConfig::new("ollama::llama3.2")
223 .with_system("You are a concise and technical AI assistant.")
224 .with_max_tokens(100)
225 .with_temperature(0.3);
226
227 if let Ok(_) = ollama_agent.with_llm_config(ollama_config) {
228 let task3 = TaskRequest::new("Explain quantum computing in one sentence.");
229 let response = ollama_agent.run(task3).await;
230 println!(" Response: {:?}\n", response.result());
231 }
232
233 Ok(())
234}
235
236async fn provider_specific_features() -> Result<(), Box<dyn std::error::Error>> {
238 println!("\n⨠Provider-specific features:\n");
239
240 println!("āļø Azure OpenAI with deployment configuration:");
242 let mut azure_agent = Agent::new("Azure Assistant", "azure::gpt-4");
243
244 let azure_config = LLMConfig::new("azure::gpt-4")
245 .with_api_key(std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_default())
246 .with_deployment_id("your-deployment-id")
247 .with_api_version("2024-02-01")
248 .with_base_url("https://your-resource.openai.azure.com");
249
250 if let Ok(_) = azure_agent.with_llm_config(azure_config) {
251 println!(" ā Azure agent configured with deployment settings");
252 }
253
254 println!("\nš OpenAI with web search enabled:");
256 let mut search_agent = Agent::new("Search Assistant", "openai::gpt-4");
257
258 let search_config = LLMConfig::new("openai::gpt-4")
259 .with_api_key(std::env::var("OPENAI_API_KEY").unwrap_or_default())
260 .with_openai_web_search(true);
261
262 if let Ok(_) = search_agent.with_llm_config(search_config) {
263 let task1 = TaskRequest::new("What are the latest developments in AI?");
264 let response = search_agent.run(task1).await;
265 println!(" Response (with web search): {:?}\n", response.result());
266 }
267
268 println!("š DeepSeek for code-focused tasks:");
270 let mut deepseek_agent = Agent::new("DeepSeek Assistant", "deepseek::deepseek-coder");
271
272 let deepseek_config = LLMConfig::new("deepseek::deepseek-coder")
273 .with_api_key(std::env::var("DEEPSEEK_API_KEY").unwrap_or_default())
274 .with_temperature(0.2);
275
276 if let Ok(_) = deepseek_agent.with_llm_config(deepseek_config) {
277 let code_task = TaskRequest::new("Write a Rust function to calculate fibonacci numbers");
278 let response = deepseek_agent.run(code_task).await;
279 println!(" Response: {:?}\n", response.result());
280 }
281
282 println!("š Mistral for European AI:");
284 let mut mistral_agent = Agent::new("Mistral Assistant", "mistral::mistral-large-latest");
285
286 let mistral_config = LLMConfig::new("mistral::mistral-large-latest")
287 .with_api_key(std::env::var("MISTRAL_API_KEY").unwrap_or_default())
288 .with_temperature(0.7)
289 .with_max_tokens(500);
290
291 if let Ok(_) = mistral_agent.with_llm_config(mistral_config) {
292 println!(" ā Mistral agent configured");
293 }
294
295 Ok(())
296}
297
298async fn compare_providers() -> Result<(), Box<dyn std::error::Error>> {
300 println!("\n⨠Comparing responses across providers:\n");
301
302 let providers = vec![
303 ("OpenAI GPT-4", "openai::gpt-4", std::env::var("OPENAI_API_KEY").ok()),
304 ("Anthropic Claude", "anthropic::claude-3-sonnet-20240229", std::env::var("ANTHROPIC_API_KEY").ok()),
305 ("Ollama Llama", "ollama::llama3.2", None),
306 ("Google Gemini", "google::gemini-pro", std::env::var("GOOGLE_API_KEY").ok()),
307 ("Groq Mixtral", "groq::mixtral-8x7b-32768", std::env::var("GROQ_API_KEY").ok()),
308 ];
309
310 for (name, model, api_key) in providers {
311 println!("š¤ {}:", name);
312
313 let mut agent = Agent::new(name, model);
314
315 if let Some(key) = api_key {
317 let config = LLMConfig::new(model)
318 .with_api_key(key)
319 .with_temperature(0.7)
320 .with_max_tokens(100);
321
322 if let Ok(_) = agent.with_llm_config(config) {
323 let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
324 let response = agent.run(task).await;
325 println!(" ā {:?}", response.result());
326 }
327 } else {
328 let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
330 let response = agent.run(task).await;
331 println!(" ā {:?}", response.result());
332 }
333
334 println!();
335 }
336
337 Ok(())
338}