use anyhow::Result;
use brainwires::prelude::*;
use brainwires::chat::ChatProviderFactory;
use brainwires::providers::{ProviderConfig, ProviderType};
async fn chat_with_provider(provider: &dyn Provider, prompt: &str) -> Result<()> {
let messages = vec![Message::user(prompt)];
let options = ChatOptions::default();
let response = provider.chat(&messages, None, &options).await?;
let reply = response.message.text().unwrap_or_default();
println!(" Model reply : {}", reply);
println!(
" Token usage : {} prompt + {} completion",
response.usage.prompt_tokens, response.usage.completion_tokens,
);
Ok(())
}
#[tokio::main]
async fn main() -> Result<()> {
println!("=== Ollama (local) ===");
let ollama_config = ProviderConfig::new(ProviderType::Ollama, "llama3.1".to_string());
let ollama = ChatProviderFactory::create(&ollama_config)?;
println!(" Provider name: {}", ollama.name());
println!("\n=== OpenAI ===");
let openai_config = ProviderConfig::new(ProviderType::OpenAI, "gpt-4o-mini".to_string())
.with_api_key("sk-demo-key");
let openai = ChatProviderFactory::create(&openai_config)?;
println!(" Provider name: {}", openai.name());
println!("\n=== Anthropic ===");
let anthropic_config = ProviderConfig::new(
ProviderType::Anthropic,
"claude-sonnet-4-20250514".to_string(),
)
.with_api_key("sk-ant-demo-key");
let anthropic = ChatProviderFactory::create(&anthropic_config)?;
println!(" Provider name: {}", anthropic.name());
println!("\n=== Groq ===");
let groq_config =
ProviderConfig::new(ProviderType::Groq, "llama-3.3-70b-versatile".to_string())
.with_api_key("gsk_demo");
let groq = ChatProviderFactory::create(&groq_config)?;
println!(" Provider name: {}", groq.name());
println!("\n=== Chat demo (Ollama) ===");
if let Err(e) = chat_with_provider(ollama.as_ref(), "Say hello in one sentence.").await {
println!(" (Ollama not reachable: {e})");
}
println!("\nAll providers created successfully. Swap ProviderType to switch backends!");
Ok(())
}