agent-io 0.3.2

A Rust SDK for building AI agents with multi-provider LLM support
Documentation
//! Multi-provider example
//!
//! Run with:
//!   cargo run --example multi_provider --features full
//!
//! Or with specific provider:
//!   cargo run --example multi_provider --features openai

#[cfg(any(feature = "openai", feature = "anthropic", feature = "google"))]
use std::sync::Arc;

#[cfg(any(feature = "openai", feature = "anthropic", feature = "google"))]
use agent_io::Agent;

#[cfg(feature = "openai")]
use agent_io::llm::ChatOpenAI;

#[cfg(feature = "anthropic")]
use agent_io::llm::ChatAnthropic;

#[cfg(feature = "google")]
use agent_io::llm::ChatGoogle;

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
    #[cfg(any(feature = "openai", feature = "anthropic", feature = "google"))]
    let prompt = "What is 2 + 2? Answer briefly.";

    // OpenAI
    #[cfg(feature = "openai")]
    {
        println!("=== OpenAI ===");
        let llm = ChatOpenAI::new("gpt-5.4-mini")?;
        let agent = Agent::builder().with_llm(Arc::new(llm)).build()?;

        let response = agent.query(prompt).await?;
        println!("Response: {}\n", response);
    }

    // Anthropic
    #[cfg(feature = "anthropic")]
    {
        println!("=== Anthropic ===");
        let llm = ChatAnthropic::new("claude-sonnet-4-6")?;
        let agent = Agent::builder().with_llm(Arc::new(llm)).build()?;

        let response = agent.query(prompt).await?;
        println!("Response: {}\n", response);
    }

    // Google
    #[cfg(feature = "google")]
    {
        println!("=== Google ===");
        let llm = ChatGoogle::new("gemini-2.0-flash")?;
        let agent = Agent::builder().with_llm(Arc::new(llm)).build()?;

        let response = agent.query(prompt).await?;
        println!("Response: {}\n", response);
    }

    #[cfg(not(any(feature = "openai", feature = "anthropic", feature = "google")))]
    println!("No provider feature enabled. Run with --features openai|anthropic|google|full");

    Ok(())
}