openai_interface/
lib.rs

1//! A low-level Rust interface for interacting with OpenAI's API.
2//!
3//! This crate provides a simple, efficient, and low-level way to interact with OpenAI's API,
4//! supporting both streaming and non-streaming responses. It leverages Rust's powerful type
5//! system for safety and performance, while exposing the full flexibility of the API.
6//!
7//! # Features
8//!
9//! - **Chat Completions**: Full support for OpenAI's chat completion and completion API,
10//!   including both streaming and non-streaming responses.
11//! - **File**: Full support for OpenAI's file API.
12//! - **Streaming and Non-streaming**: Support for both streaming and non-streaming responses.
13//! - **Strong Typing**: Complete type definitions for all API requests and responses,
14//!   utilizing Rust's powerful type system.
15//! - **Error Handling**: Comprehensive error handling with detailed error types defined in
16//!   the [`errors`] module.
17//! - **Async/Await**: Built with async/await support.
18//! - **Musl Support**: Designed to work with musl libc out-of-the-box.
19//! - **Multiple Provider Support**: Expected to work with OpenAI, DeepSeek, Qwen, and other
20//!   compatible API providers.
21//!
22//! ## Implemented APIs
23//!
24//! - Chat Completions
25//! - Completions
26//!
27//! ## Developing
28//!
29//! - Files
30//!
31//! # Examples
32//!
33//! ## Non-streaming Chat Completion
34//!
35//! This example demonstrates how to make a non-streaming request to the chat completion API.
36//!
37//! ```rust
38//! use std::sync::LazyLock;
39//! use openai_interface::chat::request::{Message, RequestBody};
40//! use openai_interface::chat::response::no_streaming::ChatCompletion;
41//! use openai_interface::rest::post::PostNoStream;
42//!
43//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
44//! const DEEPSEEK_API_KEY: LazyLock<&str> =
45//!    LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
46//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
47//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
48//!
49//! #[tokio::main]
50//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
51//!     let request = RequestBody {
52//!         messages: vec![
53//!             Message::System {
54//!                 content: "You are a helpful assistant.".to_string(),
55//!                 name: None,
56//!             },
57//!             Message::User {
58//!                 content: "Hello, how are you?".to_string(),
59//!                 name: None,
60//!             },
61//!         ],
62//!         model: DEEPSEEK_MODEL.to_string(),
63//!         stream: false,
64//!         ..Default::default()
65//!     };
66//!
67//!     // Send the request
68//!     let chat_completion: ChatCompletion = request
69//!         .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
70//!         .await?;
71//!     let text = chat_completion.choices[0]
72//!         .message
73//!         .content
74//!         .as_deref()
75//!         .unwrap();
76//!     println!("{:?}", text);
77//!     Ok(())
78//! }
79//! ```
80//!
81//! ## Streaming Chat Completion
82//!
83//! This example demonstrates how to handle streaming responses from the API. As with the non-streaming
84//! example, all API parameters can be adjusted directly through the request struct.
85//!
86//! ```rust
87//! use openai_interface::chat::response::streaming::{CompletionContent, ChatCompletionChunk};
88//! use openai_interface::chat::request::{Message, RequestBody};
89//! use openai_interface::rest::post::PostStream;
90//! use futures_util::StreamExt;
91//!
92//! use std::sync::LazyLock;
93//!
94//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
95//! const DEEPSEEK_API_KEY: LazyLock<&str> =
96//!    LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
97//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
98//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
99//!
100//! #[tokio::main]
101//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
102//!     let request = RequestBody {
103//!         messages: vec![
104//!             Message::System {
105//!                 content: "You are a helpful assistant.".to_string(),
106//!                 name: None,
107//!             },
108//!             Message::User {
109//!                 content: "Who are you?".to_string(),
110//!                 name: None,
111//!             },
112//!         ],
113//!         model: DEEPSEEK_MODEL.to_string(),
114//!         stream: true,
115//!         ..Default::default()
116//!     };
117//!
118//!     // Send the request
119//!     let mut response_stream = request
120//!         .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
121//!         .await?;
122//!
123//!     let mut message = String::new();
124//!
125//!     while let Some(chunk_result) = response_stream.next().await {
126//!         let chunk: ChatCompletionChunk = chunk_result?;
127//!         let content = match chunk.choices[0].delta.content.as_ref().unwrap() {
128//!             CompletionContent::Content(s) => s,
129//!             CompletionContent::ReasoningContent(s) => s,
130//!         };
131//!         println!("lib::test_streaming message: {}", content);
132//!         message.push_str(content);
133//!     }
134//!
135//!     println!("lib::test_streaming message: {}", message);
136//!     Ok(())
137//! }
138//! ```
139//!
140//! # Musl Build
141//!
142//! This crate is designed to work with musl libc, making it suitable for
143//! lightweight deployments in containerized environments. Longer compile times
144//! may be required as OpenSSL needs to be built from source.
145//!
146//! To build for musl:
147//! ```bash
148//! rustup target add x86_64-unknown-linux-musl
149//! cargo build --target x86_64-unknown-linux-musl
150//! ```
151
152pub mod chat;
153pub mod completions;
154pub mod errors;
155pub mod files;
156pub mod rest;
157
158#[cfg(test)]
159mod tests {
160    use crate::chat::request::{Message, RequestBody};
161    use crate::chat::response::no_streaming::ChatCompletion;
162    use crate::chat::response::streaming::{ChatCompletionChunk, CompletionContent};
163    use crate::rest::post::{PostNoStream, PostStream};
164    use futures_util::StreamExt;
165    use std::sync::LazyLock;
166
167    // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
168    const DEEPSEEK_API_KEY: LazyLock<&str> =
169        LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
170    const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
171    const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
172
173    #[tokio::test]
174    async fn test_no_streaming() -> Result<(), Box<dyn std::error::Error>> {
175        let request = RequestBody {
176            messages: vec![
177                Message::System {
178                    content: "You are a helpful assistant.".to_string(),
179                    name: None,
180                },
181                Message::User {
182                    content: "Hello, how are you?".to_string(),
183                    name: None,
184                },
185            ],
186            model: DEEPSEEK_MODEL.to_string(),
187            stream: false,
188            ..Default::default()
189        };
190
191        // Send the request
192        let chat_completion: ChatCompletion = request
193            .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
194            .await?;
195        let text = chat_completion.choices[0]
196            .message
197            .content
198            .as_deref()
199            .unwrap();
200        println!("lib::test_no_streaming message: {}", text);
201        Ok(())
202    }
203
204    #[tokio::test]
205    async fn test_streaming() -> Result<(), Box<dyn std::error::Error>> {
206        let request = RequestBody {
207            messages: vec![
208                Message::System {
209                    content: "You are a helpful assistant.".to_string(),
210                    name: None,
211                },
212                Message::User {
213                    content: "Who are you?".to_string(),
214                    name: None,
215                },
216            ],
217            model: DEEPSEEK_MODEL.to_string(),
218            stream: true,
219            ..Default::default()
220        };
221
222        // Send the request
223        let mut response_stream = request
224            .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
225            .await?;
226
227        let mut message = String::new();
228
229        while let Some(chunk_result) = response_stream.next().await {
230            let chunk: ChatCompletionChunk = chunk_result?;
231            let content = match chunk.choices[0].delta.content.as_ref().unwrap() {
232                CompletionContent::Content(s) => s,
233                CompletionContent::ReasoningContent(s) => s,
234            };
235            println!("lib::test_streaming message: {}", content);
236            message.push_str(content);
237        }
238
239        println!("lib::test_streaming message: {}", message);
240        Ok(())
241    }
242}