agent_io/lib.rs
1//! # BU Agent SDK
2//!
3//! A Rust SDK for building AI agents with multi-provider LLM support.
4//!
5//! ## Features
6//!
7//! - Multi-provider LLM support (OpenAI, Anthropic, Google Gemini)
8//! - Tool/function calling with dependency injection
9//! - Streaming responses with event-based architecture
10//! - Context compaction for long-running conversations
11//! - Token usage tracking and cost calculation
12//!
13//! ## Quick Start
14//!
15//! ```rust,no_run
16//! use std::sync::Arc;
17//! use agent_io::{Agent, llm::ChatOpenAI, tools::FunctionTool};
18//!
19//! #[tokio::main]
20//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
21//! let llm = ChatOpenAI::new("gpt-4o")?;
22//! let agent = Agent::builder()
23//! .with_llm(Arc::new(llm))
24//! .build()?;
25//!
26//! let response = agent.query("Hello!").await?;
27//! println!("{}", response);
28//! Ok(())
29//! }
30//! ```
31
32pub mod agent;
33pub mod llm;
34pub mod observability;
35pub mod tokens;
36pub mod tools;
37
38pub use agent::{Agent, AgentEvent};
39pub use llm::BaseChatModel;
40pub use observability::*;
41pub use tokens::TokenCost;
42pub use tools::Tool;
43
44/// Result type alias for SDK operations
45pub type Result<T> = std::result::Result<T, Error>;
46
47/// Error types for the SDK
48#[derive(Debug, thiserror::Error)]
49pub enum Error {
50 #[error("LLM error: {0}")]
51 Llm(#[from] llm::LlmError),
52
53 #[error("Tool error: {0}")]
54 Tool(String),
55
56 #[error("Serialization error: {0}")]
57 Serialization(#[from] serde_json::Error),
58
59 #[error("HTTP error: {0}")]
60 Http(#[from] reqwest::Error),
61
62 #[error("Configuration error: {0}")]
63 Config(String),
64
65 #[error("Agent error: {0}")]
66 Agent(String),
67
68 #[error("Max iterations exceeded")]
69 MaxIterationsExceeded,
70}