use babel::{ChatMessage, Groq, GroqModel, LLMBuilder};
use tokio;
#[tokio::main]
async fn main() -> Result<(), String> {
let groq_llm = LLMBuilder::<Groq>::new()
.model(GroqModel::QwenQwq32bPreview)
.temperature(0.7)
.max_tokens(2048)
.system_prompt("You are a helpful assistant.".to_string())
.build()?;
println!("Using Groq model: {}", groq_llm.get_model_id());
let messages = vec![
ChatMessage {
role: "user".to_string(),
content: "What is machine learning?".to_string(),
}
];
println!("Groq response:");
let response = groq_llm.chat(messages).await?;
println!("{}\n", response);
println!("\nMulti-turn conversation example:");
let mut conversation = vec![
ChatMessage {
role: "user".to_string(),
content: "What are the key features of Rust?".to_string(),
}
];
let response = groq_llm.chat(conversation.clone()).await?;
println!("AI: {}\n", response);
conversation.push(ChatMessage {
role: "assistant".to_string(),
content: response,
});
conversation.push(ChatMessage {
role: "user".to_string(),
content: "What advantages does Rust have over C++?".to_string(),
});
let response = groq_llm.chat(conversation.clone()).await?;
println!("AI: {}\n", response);
conversation.push(ChatMessage {
role: "assistant".to_string(),
content: response,
});
conversation.push(ChatMessage {
role: "user".to_string(),
content: "Give me a simple example of Rust's ownership system.".to_string(),
});
let response = groq_llm.chat(conversation).await?;
println!("AI: {}", response);
Ok(())
}