LLMConfig

Struct LLMConfig 

Source
pub struct LLMConfig {
Show 35 fields pub model: String, pub api_key: Option<String>, pub base_url: Option<String>, pub max_tokens: Option<u32>, pub temperature: Option<f32>, pub top_p: Option<f32>, pub top_k: Option<u32>, pub system: Option<String>, pub timeout_seconds: Option<u64>, pub embedding_encoding_format: Option<String>, pub embedding_dimensions: Option<u32>, pub enable_parallel_tool_use: Option<bool>, pub reasoning: Option<bool>, pub reasoning_effort: Option<String>, pub reasoning_budget_tokens: Option<u32>, pub api_version: Option<String>, pub deployment_id: Option<String>, pub voice: Option<String>, pub xai_search_mode: Option<String>, pub xai_search_source_type: Option<String>, pub xai_search_excluded_websites: Option<Vec<String>>, pub xai_search_max_results: Option<u32>, pub xai_search_from_date: Option<String>, pub xai_search_to_date: Option<String>, pub openai_enable_web_search: Option<bool>, pub openai_web_search_context_size: Option<String>, pub openai_web_search_user_location_type: Option<String>, pub openai_web_search_user_location_approximate_country: Option<String>, pub openai_web_search_user_location_approximate_city: Option<String>, pub openai_web_search_user_location_approximate_region: Option<String>, pub resilient_enable: Option<bool>, pub resilient_attempts: Option<usize>, pub resilient_base_delay_ms: Option<u64>, pub resilient_max_delay_ms: Option<u64>, pub resilient_jitter: Option<bool>,
}
Expand description

Configuration for LLM providers with all builder options.

This struct provides comprehensive configuration for any LLM provider, matching all options available in the LLMBuilder.

Fields§

§model: String§api_key: Option<String>§base_url: Option<String>§max_tokens: Option<u32>§temperature: Option<f32>§top_p: Option<f32>§top_k: Option<u32>§system: Option<String>§timeout_seconds: Option<u64>§embedding_encoding_format: Option<String>§embedding_dimensions: Option<u32>§enable_parallel_tool_use: Option<bool>§reasoning: Option<bool>§reasoning_effort: Option<String>§reasoning_budget_tokens: Option<u32>§api_version: Option<String>§deployment_id: Option<String>§voice: Option<String>§xai_search_mode: Option<String>§xai_search_source_type: Option<String>§xai_search_excluded_websites: Option<Vec<String>>§xai_search_max_results: Option<u32>§xai_search_from_date: Option<String>§xai_search_to_date: Option<String>§openai_enable_web_search: Option<bool>§openai_web_search_context_size: Option<String>§openai_web_search_user_location_type: Option<String>§openai_web_search_user_location_approximate_country: Option<String>§openai_web_search_user_location_approximate_city: Option<String>§openai_web_search_user_location_approximate_region: Option<String>§resilient_enable: Option<bool>§resilient_attempts: Option<usize>§resilient_base_delay_ms: Option<u64>§resilient_max_delay_ms: Option<u64>§resilient_jitter: Option<bool>

Implementations§

Source§

impl LLMConfig

Source

pub fn new(model: impl Into<String>) -> Self

Create a new LLMConfig with just the model name

Examples found in repository?
examples/08_llm_providers.rs (line 173)
167async fn advanced_configuration() -> Result<(), Box<dyn std::error::Error>> {
168    println!("\n✨ Using LLMConfig for advanced configuration:\n");
169
170    // Method 1: Using Agent::new_with_config() (Recommended for explicit API keys)
171    println!("📘 Using Agent::new_with_config() with explicit API key:");
172    if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
173        let config = LLMConfig::new("openai::gpt-4")
174            .with_api_key(api_key)
175            .with_temperature(0.7)
176            .with_max_tokens(150)
177            .with_top_p(0.9);
178
179        if let Ok(mut agent) = Agent::new_with_config("Configured Assistant", config) {
180            let task1 = TaskRequest::new("Explain quantum computing in one sentence.");
181            let response = agent.run(task1).await;
182            println!("   Response: {:?}\n", response.result());
183        }
184    } else {
185        println!("   Skipped (OPENAI_API_KEY not set)\n");
186    }
187
188    // Method 2: Using Agent::new() then with_llm_config() (Alternative approach)
189    println!("📗 Using Agent::new() then with_llm_config():");
190    let mut agent = Agent::new("Configured Assistant", "openai::gpt-4");
191
192    let config = LLMConfig::new("openai::gpt-4")
193        .with_temperature(0.8)
194        .with_max_tokens(200);
195
196    if let Ok(_) = agent.with_llm_config(config) {
197        let task2 = TaskRequest::new("What is machine learning in one sentence?");
198        let response = agent.run(task2).await;
199        println!("   Response: {:?}\n", response.result());
200    }
201
202    // Anthropic with reasoning
203    println!("📗 Anthropic with extended thinking:");
204    let mut claude_agent = Agent::new("Thinking Claude", "anthropic::claude-3-opus-20240229");
205
206    let claude_config = LLMConfig::new("anthropic::claude-3-opus-20240229")
207        .with_api_key(std::env::var("ANTHROPIC_API_KEY").unwrap_or_default())
208        .with_temperature(0.5)
209        .with_max_tokens(200)
210        .with_reasoning(true);
211
212    if let Ok(_) = claude_agent.with_llm_config(claude_config) {
213        let task2 = TaskRequest::new("Explain quantum computing in one sentence.");
214        let response = claude_agent.run(task2).await;
215        println!("   Response: {:?}\n", response.result());
216    }
217
218    // Ollama with custom system prompt
219    println!("📙 Ollama with custom system prompt:");
220    let mut ollama_agent = Agent::new("Custom Ollama", "ollama::llama3.2");
221
222    let ollama_config = LLMConfig::new("ollama::llama3.2")
223        .with_system("You are a concise and technical AI assistant.")
224        .with_max_tokens(100)
225        .with_temperature(0.3);
226
227    if let Ok(_) = ollama_agent.with_llm_config(ollama_config) {
228        let task3 = TaskRequest::new("Explain quantum computing in one sentence.");
229        let response = ollama_agent.run(task3).await;
230        println!("   Response: {:?}\n", response.result());
231    }
232
233    Ok(())
234}
235
236/// Example 3: Provider-specific features
237async fn provider_specific_features() -> Result<(), Box<dyn std::error::Error>> {
238    println!("\n✨ Provider-specific features:\n");
239
240    // Azure OpenAI
241    println!("☁️  Azure OpenAI with deployment configuration:");
242    let mut azure_agent = Agent::new("Azure Assistant", "azure::gpt-4");
243
244    let azure_config = LLMConfig::new("azure::gpt-4")
245        .with_api_key(std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_default())
246        .with_deployment_id("your-deployment-id")
247        .with_api_version("2024-02-01")
248        .with_base_url("https://your-resource.openai.azure.com");
249
250    if let Ok(_) = azure_agent.with_llm_config(azure_config) {
251        println!("   ✓ Azure agent configured with deployment settings");
252    }
253
254    // OpenAI with web search
255    println!("\n🌐 OpenAI with web search enabled:");
256    let mut search_agent = Agent::new("Search Assistant", "openai::gpt-4");
257
258    let search_config = LLMConfig::new("openai::gpt-4")
259        .with_api_key(std::env::var("OPENAI_API_KEY").unwrap_or_default())
260        .with_openai_web_search(true);
261
262    if let Ok(_) = search_agent.with_llm_config(search_config) {
263        let task1 = TaskRequest::new("What are the latest developments in AI?");
264        let response = search_agent.run(task1).await;
265        println!("   Response (with web search): {:?}\n", response.result());
266    }
267
268    // DeepSeek
269    println!("🔍 DeepSeek for code-focused tasks:");
270    let mut deepseek_agent = Agent::new("DeepSeek Assistant", "deepseek::deepseek-coder");
271
272    let deepseek_config = LLMConfig::new("deepseek::deepseek-coder")
273        .with_api_key(std::env::var("DEEPSEEK_API_KEY").unwrap_or_default())
274        .with_temperature(0.2);
275
276    if let Ok(_) = deepseek_agent.with_llm_config(deepseek_config) {
277        let code_task = TaskRequest::new("Write a Rust function to calculate fibonacci numbers");
278        let response = deepseek_agent.run(code_task).await;
279        println!("   Response: {:?}\n", response.result());
280    }
281
282    // Mistral
283    println!("🌟 Mistral for European AI:");
284    let mut mistral_agent = Agent::new("Mistral Assistant", "mistral::mistral-large-latest");
285
286    let mistral_config = LLMConfig::new("mistral::mistral-large-latest")
287        .with_api_key(std::env::var("MISTRAL_API_KEY").unwrap_or_default())
288        .with_temperature(0.7)
289        .with_max_tokens(500);
290
291    if let Ok(_) = mistral_agent.with_llm_config(mistral_config) {
292        println!("   ✓ Mistral agent configured");
293    }
294
295    Ok(())
296}
297
298/// Example 4: Compare providers
299async fn compare_providers() -> Result<(), Box<dyn std::error::Error>> {
300    println!("\n✨ Comparing responses across providers:\n");
301
302    let providers = vec![
303        ("OpenAI GPT-4", "openai::gpt-4", std::env::var("OPENAI_API_KEY").ok()),
304        ("Anthropic Claude", "anthropic::claude-3-sonnet-20240229", std::env::var("ANTHROPIC_API_KEY").ok()),
305        ("Ollama Llama", "ollama::llama3.2", None),
306        ("Google Gemini", "google::gemini-pro", std::env::var("GOOGLE_API_KEY").ok()),
307        ("Groq Mixtral", "groq::mixtral-8x7b-32768", std::env::var("GROQ_API_KEY").ok()),
308    ];
309
310    for (name, model, api_key) in providers {
311        println!("🤖 {}:", name);
312
313        let mut agent = Agent::new(name, model);
314
315        // Configure with LLMConfig if API key is available
316        if let Some(key) = api_key {
317            let config = LLMConfig::new(model)
318                .with_api_key(key)
319                .with_temperature(0.7)
320                .with_max_tokens(100);
321
322            if let Ok(_) = agent.with_llm_config(config) {
323                let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
324                let response = agent.run(task).await;
325                println!("   ✓ {:?}", response.result());
326            }
327        } else {
328            // Try without explicit API key (for Ollama or env vars)
329            let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
330            let response = agent.run(task).await;
331            println!("   ✓ {:?}", response.result());
332        }
333
334        println!();
335    }
336
337    Ok(())
338}
More examples
Hide additional examples
examples/09_test_groq.rs (line 18)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7    println!("🧪 Testing Groq LLM Provider with API Key\n");
8
9    // Test 1: Using Agent::new_with_config() - Recommended approach
10    println!("{}", "=".repeat(60));
11    println!("Test 1: Using Agent::new_with_config() with explicit API key");
12    println!("{}", "=".repeat(60));
13
14    match std::env::var("GROQ_API_KEY") {
15        Ok(api_key) => {
16            println!("✓ GROQ_API_KEY found in environment\n");
17
18            let groq_config = LLMConfig::new("groq::llama-3.3-70b-versatile")
19                .with_api_key(api_key)
20                .with_temperature(0.7)
21                .with_max_tokens(100);
22
23            match Agent::new_with_config("Groq Agent", groq_config) {
24                Ok(mut agent) => {
25                    println!("✓ Groq agent created successfully!\n");
26
27                    println!("Running task: 'What is 2 + 2?'");
28                    let task = TaskRequest::new("What is 2 + 2? Answer in one short sentence.");
29                    println!("Sending request to Groq API...\n");
30
31                    let response = agent.run(task).await;
32
33                    println!("Response received:");
34                    println!("================");
35                    println!("{:?}", response.result());
36                    println!("================\n");
37
38                    println!("🎉 Test 1 completed successfully!");
39                }
40                Err(e) => {
41                    println!("✗ Failed to create Groq agent: {}\n", e);
42                }
43            }
44        }
45        Err(_) => {
46            println!("✗ GROQ_API_KEY not found in environment");
47            println!("  Please set GROQ_API_KEY to run this test\n");
48        }
49    }
50
51    // Test 2: Using Agent::new() with environment variable - Simpler approach
52    println!("\n{}", "=".repeat(60));
53    println!("Test 2: Using Agent::new() with environment variable");
54    println!("{}", "=".repeat(60));
55
56    if std::env::var("GROQ_API_KEY").is_ok() {
57        println!("✓ GROQ_API_KEY is set, agent will use it automatically\n");
58
59        match std::panic::catch_unwind(|| {
60            Agent::new("Simple Groq Agent", "groq::llama-3.3-70b-versatile")
61        }) {
62            Ok(mut agent) => {
63                println!("✓ Groq agent created successfully with auto API key!\n");
64
65                println!("Running task: 'What is the capital of France?'");
66                let task = TaskRequest::new("What is the capital of France? Answer in one word.");
67
68                let response = agent.run(task).await;
69
70                println!("Response received:");
71                println!("================");
72                println!("{:?}", response.result());
73                println!("================\n");
74
75                println!("🎉 Test 2 completed successfully!");
76            }
77            Err(_) => {
78                println!("✗ Failed to create agent (unexpected panic)\n");
79            }
80        }
81    } else {
82        println!("✗ GROQ_API_KEY not set, skipping this test\n");
83    }
84
85    println!("\n{}", "=".repeat(60));
86    println!("All tests completed!");
87    println!("{}", "=".repeat(60));
88
89    Ok(())
90}
Source

pub fn with_api_key(self, api_key: impl Into<String>) -> Self

Set API key

Examples found in repository?
examples/08_llm_providers.rs (line 174)
167async fn advanced_configuration() -> Result<(), Box<dyn std::error::Error>> {
168    println!("\n✨ Using LLMConfig for advanced configuration:\n");
169
170    // Method 1: Using Agent::new_with_config() (Recommended for explicit API keys)
171    println!("📘 Using Agent::new_with_config() with explicit API key:");
172    if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
173        let config = LLMConfig::new("openai::gpt-4")
174            .with_api_key(api_key)
175            .with_temperature(0.7)
176            .with_max_tokens(150)
177            .with_top_p(0.9);
178
179        if let Ok(mut agent) = Agent::new_with_config("Configured Assistant", config) {
180            let task1 = TaskRequest::new("Explain quantum computing in one sentence.");
181            let response = agent.run(task1).await;
182            println!("   Response: {:?}\n", response.result());
183        }
184    } else {
185        println!("   Skipped (OPENAI_API_KEY not set)\n");
186    }
187
188    // Method 2: Using Agent::new() then with_llm_config() (Alternative approach)
189    println!("📗 Using Agent::new() then with_llm_config():");
190    let mut agent = Agent::new("Configured Assistant", "openai::gpt-4");
191
192    let config = LLMConfig::new("openai::gpt-4")
193        .with_temperature(0.8)
194        .with_max_tokens(200);
195
196    if let Ok(_) = agent.with_llm_config(config) {
197        let task2 = TaskRequest::new("What is machine learning in one sentence?");
198        let response = agent.run(task2).await;
199        println!("   Response: {:?}\n", response.result());
200    }
201
202    // Anthropic with reasoning
203    println!("📗 Anthropic with extended thinking:");
204    let mut claude_agent = Agent::new("Thinking Claude", "anthropic::claude-3-opus-20240229");
205
206    let claude_config = LLMConfig::new("anthropic::claude-3-opus-20240229")
207        .with_api_key(std::env::var("ANTHROPIC_API_KEY").unwrap_or_default())
208        .with_temperature(0.5)
209        .with_max_tokens(200)
210        .with_reasoning(true);
211
212    if let Ok(_) = claude_agent.with_llm_config(claude_config) {
213        let task2 = TaskRequest::new("Explain quantum computing in one sentence.");
214        let response = claude_agent.run(task2).await;
215        println!("   Response: {:?}\n", response.result());
216    }
217
218    // Ollama with custom system prompt
219    println!("📙 Ollama with custom system prompt:");
220    let mut ollama_agent = Agent::new("Custom Ollama", "ollama::llama3.2");
221
222    let ollama_config = LLMConfig::new("ollama::llama3.2")
223        .with_system("You are a concise and technical AI assistant.")
224        .with_max_tokens(100)
225        .with_temperature(0.3);
226
227    if let Ok(_) = ollama_agent.with_llm_config(ollama_config) {
228        let task3 = TaskRequest::new("Explain quantum computing in one sentence.");
229        let response = ollama_agent.run(task3).await;
230        println!("   Response: {:?}\n", response.result());
231    }
232
233    Ok(())
234}
235
236/// Example 3: Provider-specific features
237async fn provider_specific_features() -> Result<(), Box<dyn std::error::Error>> {
238    println!("\n✨ Provider-specific features:\n");
239
240    // Azure OpenAI
241    println!("☁️  Azure OpenAI with deployment configuration:");
242    let mut azure_agent = Agent::new("Azure Assistant", "azure::gpt-4");
243
244    let azure_config = LLMConfig::new("azure::gpt-4")
245        .with_api_key(std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_default())
246        .with_deployment_id("your-deployment-id")
247        .with_api_version("2024-02-01")
248        .with_base_url("https://your-resource.openai.azure.com");
249
250    if let Ok(_) = azure_agent.with_llm_config(azure_config) {
251        println!("   ✓ Azure agent configured with deployment settings");
252    }
253
254    // OpenAI with web search
255    println!("\n🌐 OpenAI with web search enabled:");
256    let mut search_agent = Agent::new("Search Assistant", "openai::gpt-4");
257
258    let search_config = LLMConfig::new("openai::gpt-4")
259        .with_api_key(std::env::var("OPENAI_API_KEY").unwrap_or_default())
260        .with_openai_web_search(true);
261
262    if let Ok(_) = search_agent.with_llm_config(search_config) {
263        let task1 = TaskRequest::new("What are the latest developments in AI?");
264        let response = search_agent.run(task1).await;
265        println!("   Response (with web search): {:?}\n", response.result());
266    }
267
268    // DeepSeek
269    println!("🔍 DeepSeek for code-focused tasks:");
270    let mut deepseek_agent = Agent::new("DeepSeek Assistant", "deepseek::deepseek-coder");
271
272    let deepseek_config = LLMConfig::new("deepseek::deepseek-coder")
273        .with_api_key(std::env::var("DEEPSEEK_API_KEY").unwrap_or_default())
274        .with_temperature(0.2);
275
276    if let Ok(_) = deepseek_agent.with_llm_config(deepseek_config) {
277        let code_task = TaskRequest::new("Write a Rust function to calculate fibonacci numbers");
278        let response = deepseek_agent.run(code_task).await;
279        println!("   Response: {:?}\n", response.result());
280    }
281
282    // Mistral
283    println!("🌟 Mistral for European AI:");
284    let mut mistral_agent = Agent::new("Mistral Assistant", "mistral::mistral-large-latest");
285
286    let mistral_config = LLMConfig::new("mistral::mistral-large-latest")
287        .with_api_key(std::env::var("MISTRAL_API_KEY").unwrap_or_default())
288        .with_temperature(0.7)
289        .with_max_tokens(500);
290
291    if let Ok(_) = mistral_agent.with_llm_config(mistral_config) {
292        println!("   ✓ Mistral agent configured");
293    }
294
295    Ok(())
296}
297
298/// Example 4: Compare providers
299async fn compare_providers() -> Result<(), Box<dyn std::error::Error>> {
300    println!("\n✨ Comparing responses across providers:\n");
301
302    let providers = vec![
303        ("OpenAI GPT-4", "openai::gpt-4", std::env::var("OPENAI_API_KEY").ok()),
304        ("Anthropic Claude", "anthropic::claude-3-sonnet-20240229", std::env::var("ANTHROPIC_API_KEY").ok()),
305        ("Ollama Llama", "ollama::llama3.2", None),
306        ("Google Gemini", "google::gemini-pro", std::env::var("GOOGLE_API_KEY").ok()),
307        ("Groq Mixtral", "groq::mixtral-8x7b-32768", std::env::var("GROQ_API_KEY").ok()),
308    ];
309
310    for (name, model, api_key) in providers {
311        println!("🤖 {}:", name);
312
313        let mut agent = Agent::new(name, model);
314
315        // Configure with LLMConfig if API key is available
316        if let Some(key) = api_key {
317            let config = LLMConfig::new(model)
318                .with_api_key(key)
319                .with_temperature(0.7)
320                .with_max_tokens(100);
321
322            if let Ok(_) = agent.with_llm_config(config) {
323                let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
324                let response = agent.run(task).await;
325                println!("   ✓ {:?}", response.result());
326            }
327        } else {
328            // Try without explicit API key (for Ollama or env vars)
329            let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
330            let response = agent.run(task).await;
331            println!("   ✓ {:?}", response.result());
332        }
333
334        println!();
335    }
336
337    Ok(())
338}
More examples
Hide additional examples
examples/09_test_groq.rs (line 19)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7    println!("🧪 Testing Groq LLM Provider with API Key\n");
8
9    // Test 1: Using Agent::new_with_config() - Recommended approach
10    println!("{}", "=".repeat(60));
11    println!("Test 1: Using Agent::new_with_config() with explicit API key");
12    println!("{}", "=".repeat(60));
13
14    match std::env::var("GROQ_API_KEY") {
15        Ok(api_key) => {
16            println!("✓ GROQ_API_KEY found in environment\n");
17
18            let groq_config = LLMConfig::new("groq::llama-3.3-70b-versatile")
19                .with_api_key(api_key)
20                .with_temperature(0.7)
21                .with_max_tokens(100);
22
23            match Agent::new_with_config("Groq Agent", groq_config) {
24                Ok(mut agent) => {
25                    println!("✓ Groq agent created successfully!\n");
26
27                    println!("Running task: 'What is 2 + 2?'");
28                    let task = TaskRequest::new("What is 2 + 2? Answer in one short sentence.");
29                    println!("Sending request to Groq API...\n");
30
31                    let response = agent.run(task).await;
32
33                    println!("Response received:");
34                    println!("================");
35                    println!("{:?}", response.result());
36                    println!("================\n");
37
38                    println!("🎉 Test 1 completed successfully!");
39                }
40                Err(e) => {
41                    println!("✗ Failed to create Groq agent: {}\n", e);
42                }
43            }
44        }
45        Err(_) => {
46            println!("✗ GROQ_API_KEY not found in environment");
47            println!("  Please set GROQ_API_KEY to run this test\n");
48        }
49    }
50
51    // Test 2: Using Agent::new() with environment variable - Simpler approach
52    println!("\n{}", "=".repeat(60));
53    println!("Test 2: Using Agent::new() with environment variable");
54    println!("{}", "=".repeat(60));
55
56    if std::env::var("GROQ_API_KEY").is_ok() {
57        println!("✓ GROQ_API_KEY is set, agent will use it automatically\n");
58
59        match std::panic::catch_unwind(|| {
60            Agent::new("Simple Groq Agent", "groq::llama-3.3-70b-versatile")
61        }) {
62            Ok(mut agent) => {
63                println!("✓ Groq agent created successfully with auto API key!\n");
64
65                println!("Running task: 'What is the capital of France?'");
66                let task = TaskRequest::new("What is the capital of France? Answer in one word.");
67
68                let response = agent.run(task).await;
69
70                println!("Response received:");
71                println!("================");
72                println!("{:?}", response.result());
73                println!("================\n");
74
75                println!("🎉 Test 2 completed successfully!");
76            }
77            Err(_) => {
78                println!("✗ Failed to create agent (unexpected panic)\n");
79            }
80        }
81    } else {
82        println!("✗ GROQ_API_KEY not set, skipping this test\n");
83    }
84
85    println!("\n{}", "=".repeat(60));
86    println!("All tests completed!");
87    println!("{}", "=".repeat(60));
88
89    Ok(())
90}
Source

pub fn with_base_url(self, base_url: impl Into<String>) -> Self

Set base URL

Examples found in repository?
examples/08_llm_providers.rs (line 248)
237async fn provider_specific_features() -> Result<(), Box<dyn std::error::Error>> {
238    println!("\n✨ Provider-specific features:\n");
239
240    // Azure OpenAI
241    println!("☁️  Azure OpenAI with deployment configuration:");
242    let mut azure_agent = Agent::new("Azure Assistant", "azure::gpt-4");
243
244    let azure_config = LLMConfig::new("azure::gpt-4")
245        .with_api_key(std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_default())
246        .with_deployment_id("your-deployment-id")
247        .with_api_version("2024-02-01")
248        .with_base_url("https://your-resource.openai.azure.com");
249
250    if let Ok(_) = azure_agent.with_llm_config(azure_config) {
251        println!("   ✓ Azure agent configured with deployment settings");
252    }
253
254    // OpenAI with web search
255    println!("\n🌐 OpenAI with web search enabled:");
256    let mut search_agent = Agent::new("Search Assistant", "openai::gpt-4");
257
258    let search_config = LLMConfig::new("openai::gpt-4")
259        .with_api_key(std::env::var("OPENAI_API_KEY").unwrap_or_default())
260        .with_openai_web_search(true);
261
262    if let Ok(_) = search_agent.with_llm_config(search_config) {
263        let task1 = TaskRequest::new("What are the latest developments in AI?");
264        let response = search_agent.run(task1).await;
265        println!("   Response (with web search): {:?}\n", response.result());
266    }
267
268    // DeepSeek
269    println!("🔍 DeepSeek for code-focused tasks:");
270    let mut deepseek_agent = Agent::new("DeepSeek Assistant", "deepseek::deepseek-coder");
271
272    let deepseek_config = LLMConfig::new("deepseek::deepseek-coder")
273        .with_api_key(std::env::var("DEEPSEEK_API_KEY").unwrap_or_default())
274        .with_temperature(0.2);
275
276    if let Ok(_) = deepseek_agent.with_llm_config(deepseek_config) {
277        let code_task = TaskRequest::new("Write a Rust function to calculate fibonacci numbers");
278        let response = deepseek_agent.run(code_task).await;
279        println!("   Response: {:?}\n", response.result());
280    }
281
282    // Mistral
283    println!("🌟 Mistral for European AI:");
284    let mut mistral_agent = Agent::new("Mistral Assistant", "mistral::mistral-large-latest");
285
286    let mistral_config = LLMConfig::new("mistral::mistral-large-latest")
287        .with_api_key(std::env::var("MISTRAL_API_KEY").unwrap_or_default())
288        .with_temperature(0.7)
289        .with_max_tokens(500);
290
291    if let Ok(_) = mistral_agent.with_llm_config(mistral_config) {
292        println!("   ✓ Mistral agent configured");
293    }
294
295    Ok(())
296}
Source

pub fn with_max_tokens(self, max_tokens: u32) -> Self

Set max tokens

Examples found in repository?
examples/08_llm_providers.rs (line 176)
167async fn advanced_configuration() -> Result<(), Box<dyn std::error::Error>> {
168    println!("\n✨ Using LLMConfig for advanced configuration:\n");
169
170    // Method 1: Using Agent::new_with_config() (Recommended for explicit API keys)
171    println!("📘 Using Agent::new_with_config() with explicit API key:");
172    if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
173        let config = LLMConfig::new("openai::gpt-4")
174            .with_api_key(api_key)
175            .with_temperature(0.7)
176            .with_max_tokens(150)
177            .with_top_p(0.9);
178
179        if let Ok(mut agent) = Agent::new_with_config("Configured Assistant", config) {
180            let task1 = TaskRequest::new("Explain quantum computing in one sentence.");
181            let response = agent.run(task1).await;
182            println!("   Response: {:?}\n", response.result());
183        }
184    } else {
185        println!("   Skipped (OPENAI_API_KEY not set)\n");
186    }
187
188    // Method 2: Using Agent::new() then with_llm_config() (Alternative approach)
189    println!("📗 Using Agent::new() then with_llm_config():");
190    let mut agent = Agent::new("Configured Assistant", "openai::gpt-4");
191
192    let config = LLMConfig::new("openai::gpt-4")
193        .with_temperature(0.8)
194        .with_max_tokens(200);
195
196    if let Ok(_) = agent.with_llm_config(config) {
197        let task2 = TaskRequest::new("What is machine learning in one sentence?");
198        let response = agent.run(task2).await;
199        println!("   Response: {:?}\n", response.result());
200    }
201
202    // Anthropic with reasoning
203    println!("📗 Anthropic with extended thinking:");
204    let mut claude_agent = Agent::new("Thinking Claude", "anthropic::claude-3-opus-20240229");
205
206    let claude_config = LLMConfig::new("anthropic::claude-3-opus-20240229")
207        .with_api_key(std::env::var("ANTHROPIC_API_KEY").unwrap_or_default())
208        .with_temperature(0.5)
209        .with_max_tokens(200)
210        .with_reasoning(true);
211
212    if let Ok(_) = claude_agent.with_llm_config(claude_config) {
213        let task2 = TaskRequest::new("Explain quantum computing in one sentence.");
214        let response = claude_agent.run(task2).await;
215        println!("   Response: {:?}\n", response.result());
216    }
217
218    // Ollama with custom system prompt
219    println!("📙 Ollama with custom system prompt:");
220    let mut ollama_agent = Agent::new("Custom Ollama", "ollama::llama3.2");
221
222    let ollama_config = LLMConfig::new("ollama::llama3.2")
223        .with_system("You are a concise and technical AI assistant.")
224        .with_max_tokens(100)
225        .with_temperature(0.3);
226
227    if let Ok(_) = ollama_agent.with_llm_config(ollama_config) {
228        let task3 = TaskRequest::new("Explain quantum computing in one sentence.");
229        let response = ollama_agent.run(task3).await;
230        println!("   Response: {:?}\n", response.result());
231    }
232
233    Ok(())
234}
235
236/// Example 3: Provider-specific features
237async fn provider_specific_features() -> Result<(), Box<dyn std::error::Error>> {
238    println!("\n✨ Provider-specific features:\n");
239
240    // Azure OpenAI
241    println!("☁️  Azure OpenAI with deployment configuration:");
242    let mut azure_agent = Agent::new("Azure Assistant", "azure::gpt-4");
243
244    let azure_config = LLMConfig::new("azure::gpt-4")
245        .with_api_key(std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_default())
246        .with_deployment_id("your-deployment-id")
247        .with_api_version("2024-02-01")
248        .with_base_url("https://your-resource.openai.azure.com");
249
250    if let Ok(_) = azure_agent.with_llm_config(azure_config) {
251        println!("   ✓ Azure agent configured with deployment settings");
252    }
253
254    // OpenAI with web search
255    println!("\n🌐 OpenAI with web search enabled:");
256    let mut search_agent = Agent::new("Search Assistant", "openai::gpt-4");
257
258    let search_config = LLMConfig::new("openai::gpt-4")
259        .with_api_key(std::env::var("OPENAI_API_KEY").unwrap_or_default())
260        .with_openai_web_search(true);
261
262    if let Ok(_) = search_agent.with_llm_config(search_config) {
263        let task1 = TaskRequest::new("What are the latest developments in AI?");
264        let response = search_agent.run(task1).await;
265        println!("   Response (with web search): {:?}\n", response.result());
266    }
267
268    // DeepSeek
269    println!("🔍 DeepSeek for code-focused tasks:");
270    let mut deepseek_agent = Agent::new("DeepSeek Assistant", "deepseek::deepseek-coder");
271
272    let deepseek_config = LLMConfig::new("deepseek::deepseek-coder")
273        .with_api_key(std::env::var("DEEPSEEK_API_KEY").unwrap_or_default())
274        .with_temperature(0.2);
275
276    if let Ok(_) = deepseek_agent.with_llm_config(deepseek_config) {
277        let code_task = TaskRequest::new("Write a Rust function to calculate fibonacci numbers");
278        let response = deepseek_agent.run(code_task).await;
279        println!("   Response: {:?}\n", response.result());
280    }
281
282    // Mistral
283    println!("🌟 Mistral for European AI:");
284    let mut mistral_agent = Agent::new("Mistral Assistant", "mistral::mistral-large-latest");
285
286    let mistral_config = LLMConfig::new("mistral::mistral-large-latest")
287        .with_api_key(std::env::var("MISTRAL_API_KEY").unwrap_or_default())
288        .with_temperature(0.7)
289        .with_max_tokens(500);
290
291    if let Ok(_) = mistral_agent.with_llm_config(mistral_config) {
292        println!("   ✓ Mistral agent configured");
293    }
294
295    Ok(())
296}
297
298/// Example 4: Compare providers
299async fn compare_providers() -> Result<(), Box<dyn std::error::Error>> {
300    println!("\n✨ Comparing responses across providers:\n");
301
302    let providers = vec![
303        ("OpenAI GPT-4", "openai::gpt-4", std::env::var("OPENAI_API_KEY").ok()),
304        ("Anthropic Claude", "anthropic::claude-3-sonnet-20240229", std::env::var("ANTHROPIC_API_KEY").ok()),
305        ("Ollama Llama", "ollama::llama3.2", None),
306        ("Google Gemini", "google::gemini-pro", std::env::var("GOOGLE_API_KEY").ok()),
307        ("Groq Mixtral", "groq::mixtral-8x7b-32768", std::env::var("GROQ_API_KEY").ok()),
308    ];
309
310    for (name, model, api_key) in providers {
311        println!("🤖 {}:", name);
312
313        let mut agent = Agent::new(name, model);
314
315        // Configure with LLMConfig if API key is available
316        if let Some(key) = api_key {
317            let config = LLMConfig::new(model)
318                .with_api_key(key)
319                .with_temperature(0.7)
320                .with_max_tokens(100);
321
322            if let Ok(_) = agent.with_llm_config(config) {
323                let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
324                let response = agent.run(task).await;
325                println!("   ✓ {:?}", response.result());
326            }
327        } else {
328            // Try without explicit API key (for Ollama or env vars)
329            let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
330            let response = agent.run(task).await;
331            println!("   ✓ {:?}", response.result());
332        }
333
334        println!();
335    }
336
337    Ok(())
338}
More examples
Hide additional examples
examples/09_test_groq.rs (line 21)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7    println!("🧪 Testing Groq LLM Provider with API Key\n");
8
9    // Test 1: Using Agent::new_with_config() - Recommended approach
10    println!("{}", "=".repeat(60));
11    println!("Test 1: Using Agent::new_with_config() with explicit API key");
12    println!("{}", "=".repeat(60));
13
14    match std::env::var("GROQ_API_KEY") {
15        Ok(api_key) => {
16            println!("✓ GROQ_API_KEY found in environment\n");
17
18            let groq_config = LLMConfig::new("groq::llama-3.3-70b-versatile")
19                .with_api_key(api_key)
20                .with_temperature(0.7)
21                .with_max_tokens(100);
22
23            match Agent::new_with_config("Groq Agent", groq_config) {
24                Ok(mut agent) => {
25                    println!("✓ Groq agent created successfully!\n");
26
27                    println!("Running task: 'What is 2 + 2?'");
28                    let task = TaskRequest::new("What is 2 + 2? Answer in one short sentence.");
29                    println!("Sending request to Groq API...\n");
30
31                    let response = agent.run(task).await;
32
33                    println!("Response received:");
34                    println!("================");
35                    println!("{:?}", response.result());
36                    println!("================\n");
37
38                    println!("🎉 Test 1 completed successfully!");
39                }
40                Err(e) => {
41                    println!("✗ Failed to create Groq agent: {}\n", e);
42                }
43            }
44        }
45        Err(_) => {
46            println!("✗ GROQ_API_KEY not found in environment");
47            println!("  Please set GROQ_API_KEY to run this test\n");
48        }
49    }
50
51    // Test 2: Using Agent::new() with environment variable - Simpler approach
52    println!("\n{}", "=".repeat(60));
53    println!("Test 2: Using Agent::new() with environment variable");
54    println!("{}", "=".repeat(60));
55
56    if std::env::var("GROQ_API_KEY").is_ok() {
57        println!("✓ GROQ_API_KEY is set, agent will use it automatically\n");
58
59        match std::panic::catch_unwind(|| {
60            Agent::new("Simple Groq Agent", "groq::llama-3.3-70b-versatile")
61        }) {
62            Ok(mut agent) => {
63                println!("✓ Groq agent created successfully with auto API key!\n");
64
65                println!("Running task: 'What is the capital of France?'");
66                let task = TaskRequest::new("What is the capital of France? Answer in one word.");
67
68                let response = agent.run(task).await;
69
70                println!("Response received:");
71                println!("================");
72                println!("{:?}", response.result());
73                println!("================\n");
74
75                println!("🎉 Test 2 completed successfully!");
76            }
77            Err(_) => {
78                println!("✗ Failed to create agent (unexpected panic)\n");
79            }
80        }
81    } else {
82        println!("✗ GROQ_API_KEY not set, skipping this test\n");
83    }
84
85    println!("\n{}", "=".repeat(60));
86    println!("All tests completed!");
87    println!("{}", "=".repeat(60));
88
89    Ok(())
90}
Source

pub fn with_temperature(self, temperature: f32) -> Self

Set temperature

Examples found in repository?
examples/08_llm_providers.rs (line 175)
167async fn advanced_configuration() -> Result<(), Box<dyn std::error::Error>> {
168    println!("\n✨ Using LLMConfig for advanced configuration:\n");
169
170    // Method 1: Using Agent::new_with_config() (Recommended for explicit API keys)
171    println!("📘 Using Agent::new_with_config() with explicit API key:");
172    if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
173        let config = LLMConfig::new("openai::gpt-4")
174            .with_api_key(api_key)
175            .with_temperature(0.7)
176            .with_max_tokens(150)
177            .with_top_p(0.9);
178
179        if let Ok(mut agent) = Agent::new_with_config("Configured Assistant", config) {
180            let task1 = TaskRequest::new("Explain quantum computing in one sentence.");
181            let response = agent.run(task1).await;
182            println!("   Response: {:?}\n", response.result());
183        }
184    } else {
185        println!("   Skipped (OPENAI_API_KEY not set)\n");
186    }
187
188    // Method 2: Using Agent::new() then with_llm_config() (Alternative approach)
189    println!("📗 Using Agent::new() then with_llm_config():");
190    let mut agent = Agent::new("Configured Assistant", "openai::gpt-4");
191
192    let config = LLMConfig::new("openai::gpt-4")
193        .with_temperature(0.8)
194        .with_max_tokens(200);
195
196    if let Ok(_) = agent.with_llm_config(config) {
197        let task2 = TaskRequest::new("What is machine learning in one sentence?");
198        let response = agent.run(task2).await;
199        println!("   Response: {:?}\n", response.result());
200    }
201
202    // Anthropic with reasoning
203    println!("📗 Anthropic with extended thinking:");
204    let mut claude_agent = Agent::new("Thinking Claude", "anthropic::claude-3-opus-20240229");
205
206    let claude_config = LLMConfig::new("anthropic::claude-3-opus-20240229")
207        .with_api_key(std::env::var("ANTHROPIC_API_KEY").unwrap_or_default())
208        .with_temperature(0.5)
209        .with_max_tokens(200)
210        .with_reasoning(true);
211
212    if let Ok(_) = claude_agent.with_llm_config(claude_config) {
213        let task2 = TaskRequest::new("Explain quantum computing in one sentence.");
214        let response = claude_agent.run(task2).await;
215        println!("   Response: {:?}\n", response.result());
216    }
217
218    // Ollama with custom system prompt
219    println!("📙 Ollama with custom system prompt:");
220    let mut ollama_agent = Agent::new("Custom Ollama", "ollama::llama3.2");
221
222    let ollama_config = LLMConfig::new("ollama::llama3.2")
223        .with_system("You are a concise and technical AI assistant.")
224        .with_max_tokens(100)
225        .with_temperature(0.3);
226
227    if let Ok(_) = ollama_agent.with_llm_config(ollama_config) {
228        let task3 = TaskRequest::new("Explain quantum computing in one sentence.");
229        let response = ollama_agent.run(task3).await;
230        println!("   Response: {:?}\n", response.result());
231    }
232
233    Ok(())
234}
235
236/// Example 3: Provider-specific features
237async fn provider_specific_features() -> Result<(), Box<dyn std::error::Error>> {
238    println!("\n✨ Provider-specific features:\n");
239
240    // Azure OpenAI
241    println!("☁️  Azure OpenAI with deployment configuration:");
242    let mut azure_agent = Agent::new("Azure Assistant", "azure::gpt-4");
243
244    let azure_config = LLMConfig::new("azure::gpt-4")
245        .with_api_key(std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_default())
246        .with_deployment_id("your-deployment-id")
247        .with_api_version("2024-02-01")
248        .with_base_url("https://your-resource.openai.azure.com");
249
250    if let Ok(_) = azure_agent.with_llm_config(azure_config) {
251        println!("   ✓ Azure agent configured with deployment settings");
252    }
253
254    // OpenAI with web search
255    println!("\n🌐 OpenAI with web search enabled:");
256    let mut search_agent = Agent::new("Search Assistant", "openai::gpt-4");
257
258    let search_config = LLMConfig::new("openai::gpt-4")
259        .with_api_key(std::env::var("OPENAI_API_KEY").unwrap_or_default())
260        .with_openai_web_search(true);
261
262    if let Ok(_) = search_agent.with_llm_config(search_config) {
263        let task1 = TaskRequest::new("What are the latest developments in AI?");
264        let response = search_agent.run(task1).await;
265        println!("   Response (with web search): {:?}\n", response.result());
266    }
267
268    // DeepSeek
269    println!("🔍 DeepSeek for code-focused tasks:");
270    let mut deepseek_agent = Agent::new("DeepSeek Assistant", "deepseek::deepseek-coder");
271
272    let deepseek_config = LLMConfig::new("deepseek::deepseek-coder")
273        .with_api_key(std::env::var("DEEPSEEK_API_KEY").unwrap_or_default())
274        .with_temperature(0.2);
275
276    if let Ok(_) = deepseek_agent.with_llm_config(deepseek_config) {
277        let code_task = TaskRequest::new("Write a Rust function to calculate fibonacci numbers");
278        let response = deepseek_agent.run(code_task).await;
279        println!("   Response: {:?}\n", response.result());
280    }
281
282    // Mistral
283    println!("🌟 Mistral for European AI:");
284    let mut mistral_agent = Agent::new("Mistral Assistant", "mistral::mistral-large-latest");
285
286    let mistral_config = LLMConfig::new("mistral::mistral-large-latest")
287        .with_api_key(std::env::var("MISTRAL_API_KEY").unwrap_or_default())
288        .with_temperature(0.7)
289        .with_max_tokens(500);
290
291    if let Ok(_) = mistral_agent.with_llm_config(mistral_config) {
292        println!("   ✓ Mistral agent configured");
293    }
294
295    Ok(())
296}
297
298/// Example 4: Compare providers
299async fn compare_providers() -> Result<(), Box<dyn std::error::Error>> {
300    println!("\n✨ Comparing responses across providers:\n");
301
302    let providers = vec![
303        ("OpenAI GPT-4", "openai::gpt-4", std::env::var("OPENAI_API_KEY").ok()),
304        ("Anthropic Claude", "anthropic::claude-3-sonnet-20240229", std::env::var("ANTHROPIC_API_KEY").ok()),
305        ("Ollama Llama", "ollama::llama3.2", None),
306        ("Google Gemini", "google::gemini-pro", std::env::var("GOOGLE_API_KEY").ok()),
307        ("Groq Mixtral", "groq::mixtral-8x7b-32768", std::env::var("GROQ_API_KEY").ok()),
308    ];
309
310    for (name, model, api_key) in providers {
311        println!("🤖 {}:", name);
312
313        let mut agent = Agent::new(name, model);
314
315        // Configure with LLMConfig if API key is available
316        if let Some(key) = api_key {
317            let config = LLMConfig::new(model)
318                .with_api_key(key)
319                .with_temperature(0.7)
320                .with_max_tokens(100);
321
322            if let Ok(_) = agent.with_llm_config(config) {
323                let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
324                let response = agent.run(task).await;
325                println!("   ✓ {:?}", response.result());
326            }
327        } else {
328            // Try without explicit API key (for Ollama or env vars)
329            let task = TaskRequest::new("Explain the concept of 'ownership' in Rust in one sentence.");
330            let response = agent.run(task).await;
331            println!("   ✓ {:?}", response.result());
332        }
333
334        println!();
335    }
336
337    Ok(())
338}
More examples
Hide additional examples
examples/09_test_groq.rs (line 20)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7    println!("🧪 Testing Groq LLM Provider with API Key\n");
8
9    // Test 1: Using Agent::new_with_config() - Recommended approach
10    println!("{}", "=".repeat(60));
11    println!("Test 1: Using Agent::new_with_config() with explicit API key");
12    println!("{}", "=".repeat(60));
13
14    match std::env::var("GROQ_API_KEY") {
15        Ok(api_key) => {
16            println!("✓ GROQ_API_KEY found in environment\n");
17
18            let groq_config = LLMConfig::new("groq::llama-3.3-70b-versatile")
19                .with_api_key(api_key)
20                .with_temperature(0.7)
21                .with_max_tokens(100);
22
23            match Agent::new_with_config("Groq Agent", groq_config) {
24                Ok(mut agent) => {
25                    println!("✓ Groq agent created successfully!\n");
26
27                    println!("Running task: 'What is 2 + 2?'");
28                    let task = TaskRequest::new("What is 2 + 2? Answer in one short sentence.");
29                    println!("Sending request to Groq API...\n");
30
31                    let response = agent.run(task).await;
32
33                    println!("Response received:");
34                    println!("================");
35                    println!("{:?}", response.result());
36                    println!("================\n");
37
38                    println!("🎉 Test 1 completed successfully!");
39                }
40                Err(e) => {
41                    println!("✗ Failed to create Groq agent: {}\n", e);
42                }
43            }
44        }
45        Err(_) => {
46            println!("✗ GROQ_API_KEY not found in environment");
47            println!("  Please set GROQ_API_KEY to run this test\n");
48        }
49    }
50
51    // Test 2: Using Agent::new() with environment variable - Simpler approach
52    println!("\n{}", "=".repeat(60));
53    println!("Test 2: Using Agent::new() with environment variable");
54    println!("{}", "=".repeat(60));
55
56    if std::env::var("GROQ_API_KEY").is_ok() {
57        println!("✓ GROQ_API_KEY is set, agent will use it automatically\n");
58
59        match std::panic::catch_unwind(|| {
60            Agent::new("Simple Groq Agent", "groq::llama-3.3-70b-versatile")
61        }) {
62            Ok(mut agent) => {
63                println!("✓ Groq agent created successfully with auto API key!\n");
64
65                println!("Running task: 'What is the capital of France?'");
66                let task = TaskRequest::new("What is the capital of France? Answer in one word.");
67
68                let response = agent.run(task).await;
69
70                println!("Response received:");
71                println!("================");
72                println!("{:?}", response.result());
73                println!("================\n");
74
75                println!("🎉 Test 2 completed successfully!");
76            }
77            Err(_) => {
78                println!("✗ Failed to create agent (unexpected panic)\n");
79            }
80        }
81    } else {
82        println!("✗ GROQ_API_KEY not set, skipping this test\n");
83    }
84
85    println!("\n{}", "=".repeat(60));
86    println!("All tests completed!");
87    println!("{}", "=".repeat(60));
88
89    Ok(())
90}
Source

pub fn with_top_p(self, top_p: f32) -> Self

Set top_p

Examples found in repository?
examples/08_llm_providers.rs (line 177)
167async fn advanced_configuration() -> Result<(), Box<dyn std::error::Error>> {
168    println!("\n✨ Using LLMConfig for advanced configuration:\n");
169
170    // Method 1: Using Agent::new_with_config() (Recommended for explicit API keys)
171    println!("📘 Using Agent::new_with_config() with explicit API key:");
172    if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
173        let config = LLMConfig::new("openai::gpt-4")
174            .with_api_key(api_key)
175            .with_temperature(0.7)
176            .with_max_tokens(150)
177            .with_top_p(0.9);
178
179        if let Ok(mut agent) = Agent::new_with_config("Configured Assistant", config) {
180            let task1 = TaskRequest::new("Explain quantum computing in one sentence.");
181            let response = agent.run(task1).await;
182            println!("   Response: {:?}\n", response.result());
183        }
184    } else {
185        println!("   Skipped (OPENAI_API_KEY not set)\n");
186    }
187
188    // Method 2: Using Agent::new() then with_llm_config() (Alternative approach)
189    println!("📗 Using Agent::new() then with_llm_config():");
190    let mut agent = Agent::new("Configured Assistant", "openai::gpt-4");
191
192    let config = LLMConfig::new("openai::gpt-4")
193        .with_temperature(0.8)
194        .with_max_tokens(200);
195
196    if let Ok(_) = agent.with_llm_config(config) {
197        let task2 = TaskRequest::new("What is machine learning in one sentence?");
198        let response = agent.run(task2).await;
199        println!("   Response: {:?}\n", response.result());
200    }
201
202    // Anthropic with reasoning
203    println!("📗 Anthropic with extended thinking:");
204    let mut claude_agent = Agent::new("Thinking Claude", "anthropic::claude-3-opus-20240229");
205
206    let claude_config = LLMConfig::new("anthropic::claude-3-opus-20240229")
207        .with_api_key(std::env::var("ANTHROPIC_API_KEY").unwrap_or_default())
208        .with_temperature(0.5)
209        .with_max_tokens(200)
210        .with_reasoning(true);
211
212    if let Ok(_) = claude_agent.with_llm_config(claude_config) {
213        let task2 = TaskRequest::new("Explain quantum computing in one sentence.");
214        let response = claude_agent.run(task2).await;
215        println!("   Response: {:?}\n", response.result());
216    }
217
218    // Ollama with custom system prompt
219    println!("📙 Ollama with custom system prompt:");
220    let mut ollama_agent = Agent::new("Custom Ollama", "ollama::llama3.2");
221
222    let ollama_config = LLMConfig::new("ollama::llama3.2")
223        .with_system("You are a concise and technical AI assistant.")
224        .with_max_tokens(100)
225        .with_temperature(0.3);
226
227    if let Ok(_) = ollama_agent.with_llm_config(ollama_config) {
228        let task3 = TaskRequest::new("Explain quantum computing in one sentence.");
229        let response = ollama_agent.run(task3).await;
230        println!("   Response: {:?}\n", response.result());
231    }
232
233    Ok(())
234}
Source

pub fn with_top_k(self, top_k: u32) -> Self

Set top_k

Source

pub fn with_system(self, system: impl Into<String>) -> Self

Set system prompt

Examples found in repository?
examples/08_llm_providers.rs (line 223)
167async fn advanced_configuration() -> Result<(), Box<dyn std::error::Error>> {
168    println!("\n✨ Using LLMConfig for advanced configuration:\n");
169
170    // Method 1: Using Agent::new_with_config() (Recommended for explicit API keys)
171    println!("📘 Using Agent::new_with_config() with explicit API key:");
172    if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
173        let config = LLMConfig::new("openai::gpt-4")
174            .with_api_key(api_key)
175            .with_temperature(0.7)
176            .with_max_tokens(150)
177            .with_top_p(0.9);
178
179        if let Ok(mut agent) = Agent::new_with_config("Configured Assistant", config) {
180            let task1 = TaskRequest::new("Explain quantum computing in one sentence.");
181            let response = agent.run(task1).await;
182            println!("   Response: {:?}\n", response.result());
183        }
184    } else {
185        println!("   Skipped (OPENAI_API_KEY not set)\n");
186    }
187
188    // Method 2: Using Agent::new() then with_llm_config() (Alternative approach)
189    println!("📗 Using Agent::new() then with_llm_config():");
190    let mut agent = Agent::new("Configured Assistant", "openai::gpt-4");
191
192    let config = LLMConfig::new("openai::gpt-4")
193        .with_temperature(0.8)
194        .with_max_tokens(200);
195
196    if let Ok(_) = agent.with_llm_config(config) {
197        let task2 = TaskRequest::new("What is machine learning in one sentence?");
198        let response = agent.run(task2).await;
199        println!("   Response: {:?}\n", response.result());
200    }
201
202    // Anthropic with reasoning
203    println!("📗 Anthropic with extended thinking:");
204    let mut claude_agent = Agent::new("Thinking Claude", "anthropic::claude-3-opus-20240229");
205
206    let claude_config = LLMConfig::new("anthropic::claude-3-opus-20240229")
207        .with_api_key(std::env::var("ANTHROPIC_API_KEY").unwrap_or_default())
208        .with_temperature(0.5)
209        .with_max_tokens(200)
210        .with_reasoning(true);
211
212    if let Ok(_) = claude_agent.with_llm_config(claude_config) {
213        let task2 = TaskRequest::new("Explain quantum computing in one sentence.");
214        let response = claude_agent.run(task2).await;
215        println!("   Response: {:?}\n", response.result());
216    }
217
218    // Ollama with custom system prompt
219    println!("📙 Ollama with custom system prompt:");
220    let mut ollama_agent = Agent::new("Custom Ollama", "ollama::llama3.2");
221
222    let ollama_config = LLMConfig::new("ollama::llama3.2")
223        .with_system("You are a concise and technical AI assistant.")
224        .with_max_tokens(100)
225        .with_temperature(0.3);
226
227    if let Ok(_) = ollama_agent.with_llm_config(ollama_config) {
228        let task3 = TaskRequest::new("Explain quantum computing in one sentence.");
229        let response = ollama_agent.run(task3).await;
230        println!("   Response: {:?}\n", response.result());
231    }
232
233    Ok(())
234}
Source

pub fn with_timeout_seconds(self, timeout: u64) -> Self

Set timeout in seconds

Source

pub fn with_reasoning(self, enabled: bool) -> Self

Enable reasoning (for supported providers)

Examples found in repository?
examples/08_llm_providers.rs (line 210)
167async fn advanced_configuration() -> Result<(), Box<dyn std::error::Error>> {
168    println!("\n✨ Using LLMConfig for advanced configuration:\n");
169
170    // Method 1: Using Agent::new_with_config() (Recommended for explicit API keys)
171    println!("📘 Using Agent::new_with_config() with explicit API key:");
172    if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
173        let config = LLMConfig::new("openai::gpt-4")
174            .with_api_key(api_key)
175            .with_temperature(0.7)
176            .with_max_tokens(150)
177            .with_top_p(0.9);
178
179        if let Ok(mut agent) = Agent::new_with_config("Configured Assistant", config) {
180            let task1 = TaskRequest::new("Explain quantum computing in one sentence.");
181            let response = agent.run(task1).await;
182            println!("   Response: {:?}\n", response.result());
183        }
184    } else {
185        println!("   Skipped (OPENAI_API_KEY not set)\n");
186    }
187
188    // Method 2: Using Agent::new() then with_llm_config() (Alternative approach)
189    println!("📗 Using Agent::new() then with_llm_config():");
190    let mut agent = Agent::new("Configured Assistant", "openai::gpt-4");
191
192    let config = LLMConfig::new("openai::gpt-4")
193        .with_temperature(0.8)
194        .with_max_tokens(200);
195
196    if let Ok(_) = agent.with_llm_config(config) {
197        let task2 = TaskRequest::new("What is machine learning in one sentence?");
198        let response = agent.run(task2).await;
199        println!("   Response: {:?}\n", response.result());
200    }
201
202    // Anthropic with reasoning
203    println!("📗 Anthropic with extended thinking:");
204    let mut claude_agent = Agent::new("Thinking Claude", "anthropic::claude-3-opus-20240229");
205
206    let claude_config = LLMConfig::new("anthropic::claude-3-opus-20240229")
207        .with_api_key(std::env::var("ANTHROPIC_API_KEY").unwrap_or_default())
208        .with_temperature(0.5)
209        .with_max_tokens(200)
210        .with_reasoning(true);
211
212    if let Ok(_) = claude_agent.with_llm_config(claude_config) {
213        let task2 = TaskRequest::new("Explain quantum computing in one sentence.");
214        let response = claude_agent.run(task2).await;
215        println!("   Response: {:?}\n", response.result());
216    }
217
218    // Ollama with custom system prompt
219    println!("📙 Ollama with custom system prompt:");
220    let mut ollama_agent = Agent::new("Custom Ollama", "ollama::llama3.2");
221
222    let ollama_config = LLMConfig::new("ollama::llama3.2")
223        .with_system("You are a concise and technical AI assistant.")
224        .with_max_tokens(100)
225        .with_temperature(0.3);
226
227    if let Ok(_) = ollama_agent.with_llm_config(ollama_config) {
228        let task3 = TaskRequest::new("Explain quantum computing in one sentence.");
229        let response = ollama_agent.run(task3).await;
230        println!("   Response: {:?}\n", response.result());
231    }
232
233    Ok(())
234}
Source

pub fn with_reasoning_effort(self, effort: impl Into<String>) -> Self

Set reasoning effort

Source

pub fn with_deployment_id(self, deployment_id: impl Into<String>) -> Self

Set Azure deployment ID

Examples found in repository?
examples/08_llm_providers.rs (line 246)
237async fn provider_specific_features() -> Result<(), Box<dyn std::error::Error>> {
238    println!("\n✨ Provider-specific features:\n");
239
240    // Azure OpenAI
241    println!("☁️  Azure OpenAI with deployment configuration:");
242    let mut azure_agent = Agent::new("Azure Assistant", "azure::gpt-4");
243
244    let azure_config = LLMConfig::new("azure::gpt-4")
245        .with_api_key(std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_default())
246        .with_deployment_id("your-deployment-id")
247        .with_api_version("2024-02-01")
248        .with_base_url("https://your-resource.openai.azure.com");
249
250    if let Ok(_) = azure_agent.with_llm_config(azure_config) {
251        println!("   ✓ Azure agent configured with deployment settings");
252    }
253
254    // OpenAI with web search
255    println!("\n🌐 OpenAI with web search enabled:");
256    let mut search_agent = Agent::new("Search Assistant", "openai::gpt-4");
257
258    let search_config = LLMConfig::new("openai::gpt-4")
259        .with_api_key(std::env::var("OPENAI_API_KEY").unwrap_or_default())
260        .with_openai_web_search(true);
261
262    if let Ok(_) = search_agent.with_llm_config(search_config) {
263        let task1 = TaskRequest::new("What are the latest developments in AI?");
264        let response = search_agent.run(task1).await;
265        println!("   Response (with web search): {:?}\n", response.result());
266    }
267
268    // DeepSeek
269    println!("🔍 DeepSeek for code-focused tasks:");
270    let mut deepseek_agent = Agent::new("DeepSeek Assistant", "deepseek::deepseek-coder");
271
272    let deepseek_config = LLMConfig::new("deepseek::deepseek-coder")
273        .with_api_key(std::env::var("DEEPSEEK_API_KEY").unwrap_or_default())
274        .with_temperature(0.2);
275
276    if let Ok(_) = deepseek_agent.with_llm_config(deepseek_config) {
277        let code_task = TaskRequest::new("Write a Rust function to calculate fibonacci numbers");
278        let response = deepseek_agent.run(code_task).await;
279        println!("   Response: {:?}\n", response.result());
280    }
281
282    // Mistral
283    println!("🌟 Mistral for European AI:");
284    let mut mistral_agent = Agent::new("Mistral Assistant", "mistral::mistral-large-latest");
285
286    let mistral_config = LLMConfig::new("mistral::mistral-large-latest")
287        .with_api_key(std::env::var("MISTRAL_API_KEY").unwrap_or_default())
288        .with_temperature(0.7)
289        .with_max_tokens(500);
290
291    if let Ok(_) = mistral_agent.with_llm_config(mistral_config) {
292        println!("   ✓ Mistral agent configured");
293    }
294
295    Ok(())
296}
Source

pub fn with_api_version(self, api_version: impl Into<String>) -> Self

Set Azure API version

Examples found in repository?
examples/08_llm_providers.rs (line 247)
237async fn provider_specific_features() -> Result<(), Box<dyn std::error::Error>> {
238    println!("\n✨ Provider-specific features:\n");
239
240    // Azure OpenAI
241    println!("☁️  Azure OpenAI with deployment configuration:");
242    let mut azure_agent = Agent::new("Azure Assistant", "azure::gpt-4");
243
244    let azure_config = LLMConfig::new("azure::gpt-4")
245        .with_api_key(std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_default())
246        .with_deployment_id("your-deployment-id")
247        .with_api_version("2024-02-01")
248        .with_base_url("https://your-resource.openai.azure.com");
249
250    if let Ok(_) = azure_agent.with_llm_config(azure_config) {
251        println!("   ✓ Azure agent configured with deployment settings");
252    }
253
254    // OpenAI with web search
255    println!("\n🌐 OpenAI with web search enabled:");
256    let mut search_agent = Agent::new("Search Assistant", "openai::gpt-4");
257
258    let search_config = LLMConfig::new("openai::gpt-4")
259        .with_api_key(std::env::var("OPENAI_API_KEY").unwrap_or_default())
260        .with_openai_web_search(true);
261
262    if let Ok(_) = search_agent.with_llm_config(search_config) {
263        let task1 = TaskRequest::new("What are the latest developments in AI?");
264        let response = search_agent.run(task1).await;
265        println!("   Response (with web search): {:?}\n", response.result());
266    }
267
268    // DeepSeek
269    println!("🔍 DeepSeek for code-focused tasks:");
270    let mut deepseek_agent = Agent::new("DeepSeek Assistant", "deepseek::deepseek-coder");
271
272    let deepseek_config = LLMConfig::new("deepseek::deepseek-coder")
273        .with_api_key(std::env::var("DEEPSEEK_API_KEY").unwrap_or_default())
274        .with_temperature(0.2);
275
276    if let Ok(_) = deepseek_agent.with_llm_config(deepseek_config) {
277        let code_task = TaskRequest::new("Write a Rust function to calculate fibonacci numbers");
278        let response = deepseek_agent.run(code_task).await;
279        println!("   Response: {:?}\n", response.result());
280    }
281
282    // Mistral
283    println!("🌟 Mistral for European AI:");
284    let mut mistral_agent = Agent::new("Mistral Assistant", "mistral::mistral-large-latest");
285
286    let mistral_config = LLMConfig::new("mistral::mistral-large-latest")
287        .with_api_key(std::env::var("MISTRAL_API_KEY").unwrap_or_default())
288        .with_temperature(0.7)
289        .with_max_tokens(500);
290
291    if let Ok(_) = mistral_agent.with_llm_config(mistral_config) {
292        println!("   ✓ Mistral agent configured");
293    }
294
295    Ok(())
296}

Enable OpenAI web search

Examples found in repository?
examples/08_llm_providers.rs (line 260)
237async fn provider_specific_features() -> Result<(), Box<dyn std::error::Error>> {
238    println!("\n✨ Provider-specific features:\n");
239
240    // Azure OpenAI
241    println!("☁️  Azure OpenAI with deployment configuration:");
242    let mut azure_agent = Agent::new("Azure Assistant", "azure::gpt-4");
243
244    let azure_config = LLMConfig::new("azure::gpt-4")
245        .with_api_key(std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_default())
246        .with_deployment_id("your-deployment-id")
247        .with_api_version("2024-02-01")
248        .with_base_url("https://your-resource.openai.azure.com");
249
250    if let Ok(_) = azure_agent.with_llm_config(azure_config) {
251        println!("   ✓ Azure agent configured with deployment settings");
252    }
253
254    // OpenAI with web search
255    println!("\n🌐 OpenAI with web search enabled:");
256    let mut search_agent = Agent::new("Search Assistant", "openai::gpt-4");
257
258    let search_config = LLMConfig::new("openai::gpt-4")
259        .with_api_key(std::env::var("OPENAI_API_KEY").unwrap_or_default())
260        .with_openai_web_search(true);
261
262    if let Ok(_) = search_agent.with_llm_config(search_config) {
263        let task1 = TaskRequest::new("What are the latest developments in AI?");
264        let response = search_agent.run(task1).await;
265        println!("   Response (with web search): {:?}\n", response.result());
266    }
267
268    // DeepSeek
269    println!("🔍 DeepSeek for code-focused tasks:");
270    let mut deepseek_agent = Agent::new("DeepSeek Assistant", "deepseek::deepseek-coder");
271
272    let deepseek_config = LLMConfig::new("deepseek::deepseek-coder")
273        .with_api_key(std::env::var("DEEPSEEK_API_KEY").unwrap_or_default())
274        .with_temperature(0.2);
275
276    if let Ok(_) = deepseek_agent.with_llm_config(deepseek_config) {
277        let code_task = TaskRequest::new("Write a Rust function to calculate fibonacci numbers");
278        let response = deepseek_agent.run(code_task).await;
279        println!("   Response: {:?}\n", response.result());
280    }
281
282    // Mistral
283    println!("🌟 Mistral for European AI:");
284    let mut mistral_agent = Agent::new("Mistral Assistant", "mistral::mistral-large-latest");
285
286    let mistral_config = LLMConfig::new("mistral::mistral-large-latest")
287        .with_api_key(std::env::var("MISTRAL_API_KEY").unwrap_or_default())
288        .with_temperature(0.7)
289        .with_max_tokens(500);
290
291    if let Ok(_) = mistral_agent.with_llm_config(mistral_config) {
292        println!("   ✓ Mistral agent configured");
293    }
294
295    Ok(())
296}
Source

pub fn with_resilience(self, enabled: bool, attempts: usize) -> Self

Enable resilience with retry/backoff

Trait Implementations§

Source§

impl Clone for LLMConfig

Source§

fn clone(&self) -> LLMConfig

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for LLMConfig

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl Default for LLMConfig

Source§

fn default() -> Self

Returns the “default value” for a type. Read more
Source§

impl<'de> Deserialize<'de> for LLMConfig

Source§

fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>
where __D: Deserializer<'de>,

Deserialize this value from the given Serde deserializer. Read more
Source§

impl From<LLMConfig> for LLMProviderConfig

Source§

fn from(config: LLMConfig) -> Self

Converts to this type from the input type.
Source§

impl Serialize for LLMConfig

Source§

fn serialize<__S>(&self, __serializer: __S) -> Result<__S::Ok, __S::Error>
where __S: Serializer,

Serialize this value into the given Serde serializer. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> FromRef<T> for T
where T: Clone,

Source§

fn from_ref(input: &T) -> T

Converts to this type from a reference to the input type.
Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> DeserializeOwned for T
where T: for<'de> Deserialize<'de>,

Source§

impl<A, B, T> HttpServerConnExec<A, B> for T
where B: Body,