openai_ergonomic/
lib.rs

1#![doc = include_str!("../README.md")]
2#![forbid(unsafe_code)]
3#![warn(missing_docs)]
4#![allow(clippy::missing_const_for_fn)]
5#![allow(clippy::use_self)]
6#![allow(clippy::io_other_error)]
7#![allow(async_fn_in_trait)]
8
9//! # openai-ergonomic
10//!
11//! An ergonomic Rust wrapper for the `OpenAI` API, providing type-safe builder patterns
12//! and async/await support for all `OpenAI` endpoints.
13//!
14//! ## Features
15//!
16//! - **Type-safe builders** - Use builder patterns with compile-time validation
17//! - **Async/await support** - Built on tokio and reqwest for modern async Rust
18//! - **Streaming responses** - First-class support for real-time streaming
19//! - **Comprehensive coverage** - Support for all `OpenAI` API endpoints
20//! - **Error handling** - Structured error types for robust applications
21//! - **Testing support** - Mock-friendly design for unit testing
22//!
23//! ## Quick Start
24//!
25//! ```rust,ignore
26//! use openai_ergonomic::{Client, Config};
27//!
28//! #[tokio::main]
29//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
30//!     // Create a client from environment variables
31//!     let client = Client::from_env()?;
32//!
33//!     // Simple chat completion
34//!     let response = client
35//!         .chat_simple("Hello, how are you?")
36//!         .await?;
37//!
38//!     println!("{}", response);
39//!     Ok(())
40//! }
41//! ```
42//!
43//! ## Streaming Example
44//!
45//! ```rust,ignore
46//! use openai_ergonomic::{Client, Config};
47//! use futures::StreamExt;
48//!
49//! #[tokio::main]
50//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
51//!     let client = Client::from_env()?;
52//!
53//!     // Stream chat completions
54//!     let mut stream = client
55//!         .chat()
56//!         .user("Tell me a story")
57//!         .stream()
58//!         .await?;
59//!
60//!     while let Some(chunk) = stream.next().await {
61//!         print!("{}", chunk?.content());
62//!     }
63//!     Ok(())
64//! }
65//! ```
66//!
67//! ## Error Handling
68//!
69//! ```rust,ignore
70//! use openai_ergonomic::{Client, Error};
71//!
72//! #[tokio::main]
73//! async fn main() {
74//!     let client = Client::from_env().expect("API key required");
75//!
76//!     match client.chat_simple("Hello").await {
77//!         Ok(response) => println!("{}", response),
78//!         Err(Error::RateLimit { .. }) => {
79//!             println!("Rate limited, please retry later");
80//!         }
81//!         Err(e) => eprintln!("Error: {}", e),
82//!     }
83//! }
84//! ```
85//!
86//! ## Custom Configuration
87//!
88//! ```rust,ignore
89//! use openai_ergonomic::{Client, Config};
90//! use std::time::Duration;
91//!
92//! #[tokio::main]
93//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
94//!     let config = Config::builder()
95//!         .api_key("your-api-key")
96//!         .organization_id("org-123")
97//!         .timeout(Duration::from_secs(30))
98//!         .max_retries(5)
99//!         .build();
100//!
101//!     let client = Client::builder(config)?.build();
102//!     Ok(())
103//! }
104//! ```
105//!
106//! ## Testing with Mocks
107//!
108//! ```rust,ignore
109//! #[cfg(test)]
110//! mod tests {
111//!     use openai_ergonomic::test_utils::MockOpenAIServer;
112//!
113//!     #[tokio::test]
114//!     async fn test_chat_completion() {
115//!         let mock = MockOpenAIServer::new();
116//!         mock.mock_chat_completion("Hello!", "Hi there!");
117//!
118//!         let client = mock.client();
119//!         let response = client.chat_simple("Hello!").await.unwrap();
120//!         assert_eq!(response, "Hi there!");
121//!     }
122//! }
123//! ```
124//!
125//! # Modules
126//!
127//! - [`builders`] - Builder pattern implementations for API requests
128//! - [`responses`] - Response type wrappers with ergonomic helpers
129//! - [`client`] - Main client for API interactions
130//! - [`config`] - Configuration management
131//! - [`errors`] - Error types and handling
132
133// Re-export bon for builder macros
134pub use bon;
135
136// Core modules
137pub mod builders;
138pub mod client;
139pub mod config;
140pub mod errors;
141pub mod interceptor;
142pub mod langfuse_interceptor;
143pub mod responses;
144pub mod semantic_conventions;
145
146// Re-export commonly used types
147pub use client::Client;
148pub use config::{Config, ConfigBuilder};
149pub use errors::{Error, Result};
150pub use interceptor::{
151    AfterResponseContext, BeforeRequestContext, ErrorContext, Interceptor, StreamChunkContext,
152    StreamEndContext,
153};
154pub use langfuse_interceptor::{LangfuseConfig, LangfuseInterceptor, LangfuseState};
155
156// Re-export specific builder and response types for convenience
157// NOTE: We avoid wildcard re-exports to prevent naming conflicts between modules
158pub use builders::audio::{
159    SpeechBuilder, TimestampGranularity, TranscriptionBuilder, TranscriptionRequest,
160    TranslationBuilder, TranslationRequest,
161};
162pub use builders::chat::{
163    image_base64_part, image_base64_part_with_detail, image_url_part, image_url_part_with_detail,
164    system_user, text_part, user_message,
165};
166pub use builders::embeddings::{EmbeddingInput, EmbeddingsBuilder};
167pub use builders::images::{
168    Background, ImageEditBuilder, ImageEditRequest, ImageGenerationBuilder, ImageInputFidelity,
169    ImageInputFidelityTextVariantEnum, ImageVariationBuilder, ImageVariationRequest, Moderation,
170    OutputFormat, Quality, ResponseFormat, Size, Style,
171};
172pub use builders::threads::{
173    AttachmentTool, MessageAttachment, ThreadMessageBuilder, ThreadRequestBuilder,
174};
175pub use builders::uploads::UploadBuilder;
176// Re-export vision types for convenience
177pub use builders::responses::{responses_simple, responses_system_user, ResponsesBuilder};
178pub use builders::{Builder, ChatCompletionBuilder, Sendable};
179pub use openai_client_base::models::chat_completion_request_message_content_part_image_image_url::Detail;
180pub use openai_client_base::models::create_upload_request::Purpose as UploadPurpose;
181pub use responses::chat::{
182    ChatChoice, ChatCompletionResponse, ChatCompletionResponseExt,
183    ChatMessage as ResponseChatMessage, FunctionCall, ToolCall, ToolCallExt,
184};
185pub use responses::{tool_function, tool_web_search, ChatCompletionResponseWrapper};
186pub use responses::{Response, Tool, ToolChoice, Usage};
187
188// Test utilities (feature-gated)
189#[cfg(feature = "test-utils")]
190pub mod test_utils;
191
192#[cfg(test)]
193mod tests {
194    use super::*;
195
196    #[test]
197    fn test_config_creation() {
198        let config = Config::builder().api_key("test-key").build();
199        assert_eq!(config.api_key(), "test-key");
200    }
201
202    #[test]
203    fn test_client_creation_with_config() {
204        let config = Config::builder().api_key("test-key").build();
205        let result = Client::builder(config);
206        assert!(result.is_ok());
207    }
208}