openai_ergonomic/lib.rs
1#![doc = include_str!("../README.md")]
2#![forbid(unsafe_code)]
3#![warn(missing_docs)]
4#![allow(clippy::missing_const_for_fn)]
5#![allow(clippy::use_self)]
6#![allow(clippy::io_other_error)]
7#![allow(async_fn_in_trait)]
8
9//! # openai-ergonomic
10//!
11//! An ergonomic Rust wrapper for the `OpenAI` API, providing type-safe builder patterns
12//! and async/await support for all `OpenAI` endpoints.
13//!
14//! ## Features
15//!
16//! - **Type-safe builders** - Use builder patterns with compile-time validation
17//! - **Async/await support** - Built on tokio and reqwest for modern async Rust
18//! - **Streaming responses** - First-class support for real-time streaming
19//! - **Comprehensive coverage** - Support for all `OpenAI` API endpoints
20//! - **Error handling** - Structured error types for robust applications
21//! - **Testing support** - Mock-friendly design for unit testing
22//!
23//! ## Quick Start
24//!
25//! ```rust,ignore
26//! use openai_ergonomic::{Client, Config};
27//!
28//! #[tokio::main]
29//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
30//! // Create a client from environment variables
31//! let client = Client::from_env()?;
32//!
33//! // Simple chat completion
34//! let response = client
35//! .chat_simple("Hello, how are you?")
36//! .await?;
37//!
38//! println!("{}", response);
39//! Ok(())
40//! }
41//! ```
42//!
43//! ## Streaming Example
44//!
45//! ```rust,ignore
46//! use openai_ergonomic::Client;
47//! use futures::StreamExt;
48//!
49//! #[tokio::main]
50//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
51//! let client = Client::from_env()?.build();
52//!
53//! // Stream chat completions
54//! let builder = client
55//! .chat()
56//! .user("Tell me a story");
57//!
58//! let mut stream = client.send_chat_stream(builder).await?;
59//!
60//! while let Some(chunk) = stream.next().await {
61//! let chunk = chunk?;
62//! if let Some(content) = chunk.content() {
63//! print!("{}", content);
64//! }
65//! }
66//! Ok(())
67//! }
68//! ```
69//!
70//! ## Error Handling
71//!
72//! ```rust,ignore
73//! use openai_ergonomic::{Client, Error};
74//!
75//! #[tokio::main]
76//! async fn main() {
77//! let client = Client::from_env().expect("API key required");
78//!
79//! match client.chat_simple("Hello").await {
80//! Ok(response) => println!("{}", response),
81//! Err(Error::RateLimit { .. }) => {
82//! println!("Rate limited, please retry later");
83//! }
84//! Err(e) => eprintln!("Error: {}", e),
85//! }
86//! }
87//! ```
88//!
89//! ## Custom Configuration
90//!
91//! ```rust,ignore
92//! use openai_ergonomic::{Client, Config};
93//! use std::time::Duration;
94//!
95//! #[tokio::main]
96//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
97//! let config = Config::builder()
98//! .api_key("your-api-key")
99//! .organization_id("org-123")
100//! .timeout(Duration::from_secs(30))
101//! .max_retries(5)
102//! .build();
103//!
104//! let client = Client::builder(config)?.build();
105//! Ok(())
106//! }
107//! ```
108//!
109//! ## Testing with Mocks
110//!
111//! ```rust,ignore
112//! #[cfg(test)]
113//! mod tests {
114//! use openai_ergonomic::test_utils::MockOpenAIServer;
115//!
116//! #[tokio::test]
117//! async fn test_chat_completion() {
118//! let mock = MockOpenAIServer::new();
119//! mock.mock_chat_completion("Hello!", "Hi there!");
120//!
121//! let client = mock.client();
122//! let response = client.chat_simple("Hello!").await.unwrap();
123//! assert_eq!(response, "Hi there!");
124//! }
125//! }
126//! ```
127//!
128//! # Modules
129//!
130//! - [`builders`] - Builder pattern implementations for API requests
131//! - [`responses`] - Response type wrappers with ergonomic helpers
132//! - [`client`] - Main client for API interactions
133//! - [`config`] - Configuration management
134//! - [`errors`] - Error types and handling
135
136// Re-export bon for builder macros
137pub use bon;
138
139// Core modules
140pub mod azure_middleware;
141pub mod builders;
142pub mod client;
143pub mod config;
144pub mod errors;
145pub mod interceptor;
146pub mod langfuse_interceptor;
147pub mod responses;
148pub mod semantic_conventions;
149pub mod streaming;
150
151// Re-export commonly used types
152pub use client::Client;
153pub use config::{Config, ConfigBuilder};
154pub use errors::{Error, Result};
155pub use interceptor::{
156 AfterResponseContext, BeforeRequestContext, ErrorContext, Interceptor, StreamChunkContext,
157 StreamEndContext,
158};
159pub use langfuse_interceptor::{LangfuseConfig, LangfuseInterceptor, LangfuseState};
160
161// Re-export specific builder and response types for convenience
162// NOTE: We avoid wildcard re-exports to prevent naming conflicts between modules
163pub use builders::audio::{
164 SpeechBuilder, TimestampGranularity, TranscriptionBuilder, TranscriptionRequest,
165 TranslationBuilder, TranslationRequest,
166};
167pub use builders::chat::{
168 image_base64_part, image_base64_part_with_detail, image_url_part, image_url_part_with_detail,
169 system_user, text_part, user_message,
170};
171pub use builders::embeddings::{EmbeddingInput, EmbeddingsBuilder};
172pub use builders::images::{
173 Background, ImageEditBuilder, ImageEditRequest, ImageGenerationBuilder, ImageInputFidelity,
174 ImageInputFidelityTextVariantEnum, ImageVariationBuilder, ImageVariationRequest, Moderation,
175 OutputFormat, Quality, ResponseFormat, Size, Style,
176};
177pub use builders::threads::{
178 AttachmentTool, MessageAttachment, ThreadMessageBuilder, ThreadRequestBuilder,
179};
180pub use builders::uploads::UploadBuilder;
181// Re-export vision types for convenience
182pub use builders::responses::{responses_simple, responses_system_user, ResponsesBuilder};
183pub use builders::{Builder, ChatCompletionBuilder, Sendable};
184pub use openai_client_base::models::chat_completion_request_message_content_part_image_image_url::Detail;
185pub use openai_client_base::models::create_upload_request::Purpose as UploadPurpose;
186pub use responses::chat::{
187 ChatChoice, ChatCompletionResponse, ChatCompletionResponseExt,
188 ChatMessage as ResponseChatMessage, FunctionCall, ToolCall, ToolCallExt,
189};
190pub use responses::{tool_function, tool_web_search, ChatCompletionResponseWrapper};
191pub use responses::{Response, Tool, ToolChoice, Usage};
192pub use streaming::{ChatCompletionChunk, ChatCompletionStream, InterceptedStream};
193
194// Test utilities (feature-gated)
195#[cfg(feature = "test-utils")]
196pub mod test_utils;
197
198#[cfg(test)]
199mod tests {
200 use super::*;
201
202 #[test]
203 fn test_config_creation() {
204 let config = Config::builder().api_key("test-key").build();
205 assert_eq!(config.api_key(), "test-key");
206 }
207
208 #[test]
209 fn test_client_creation_with_config() {
210 let config = Config::builder().api_key("test-key").build();
211 let result = Client::builder(config);
212 assert!(result.is_ok());
213 }
214}