#[cfg(any(feature = "openai", feature = "anthropic", feature = "google"))]
use std::sync::Arc;
#[cfg(any(feature = "openai", feature = "anthropic", feature = "google"))]
use agent_io::Agent;
#[cfg(feature = "openai")]
use agent_io::llm::ChatOpenAI;
#[cfg(feature = "anthropic")]
use agent_io::llm::ChatAnthropic;
#[cfg(feature = "google")]
use agent_io::llm::ChatGoogle;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
#[cfg(any(feature = "openai", feature = "anthropic", feature = "google"))]
let prompt = "What is 2 + 2? Answer briefly.";
#[cfg(feature = "openai")]
{
println!("=== OpenAI ===");
let llm = ChatOpenAI::new("gpt-5.4-mini")?;
let agent = Agent::builder().with_llm(Arc::new(llm)).build()?;
let response = agent.query(prompt).await?;
println!("Response: {}\n", response);
}
#[cfg(feature = "anthropic")]
{
println!("=== Anthropic ===");
let llm = ChatAnthropic::new("claude-sonnet-4-6")?;
let agent = Agent::builder().with_llm(Arc::new(llm)).build()?;
let response = agent.query(prompt).await?;
println!("Response: {}\n", response);
}
#[cfg(feature = "google")]
{
println!("=== Google ===");
let llm = ChatGoogle::new("gemini-2.0-flash")?;
let agent = Agent::builder().with_llm(Arc::new(llm)).build()?;
let response = agent.query(prompt).await?;
println!("Response: {}\n", response);
}
#[cfg(not(any(feature = "openai", feature = "anthropic", feature = "google")))]
println!("No provider feature enabled. Run with --features openai|anthropic|google|full");
Ok(())
}