openai_interface/
lib.rs

1//! A low-level Rust interface for interacting with OpenAI's API.
2//!
3//! This crate provides a simple, efficient, and low-level way to interact with OpenAI's API,
4//! supporting both streaming and non-streaming responses. It leverages Rust's powerful type
5//! system for safety and performance, while exposing the full flexibility of the API.
6//!
7//! # Features
8//!
9//! - **Chat Completions**: Full support for OpenAI's chat completion and completion API,
10//!   including both streaming and non-streaming responses.
11//! - **File**: Full support for OpenAI's file API.
12//! - **Streaming and Non-streaming**: Support for both streaming and non-streaming responses.
13//! - **Strong Typing**: Complete type definitions for all API requests and responses,
14//!   utilizing Rust's powerful type system.
15//! - **Error Handling**: Comprehensive error handling with detailed error types defined in
16//!   the [`errors`] module.
17//! - **Async/Await**: Built with async/await support.
18//! - **Musl Support**: Designed to work with musl libc out-of-the-box.
19//! - **Multiple Provider Support**: Expected to work with OpenAI, DeepSeek, Qwen, and other
20//!   compatible API providers.
21//!
22//! ## Implemented APIs
23//!
24//! - Chat (only `create` method)
25//! - Completions
26//! - Files
27//!
28//! ## Developing
29//!
30//! - All methods of the `Chat` API
31//!
32//! # Examples
33//!
34//! ## Non-streaming Chat Completion
35//!
36//! This example demonstrates how to make a non-streaming request to the chat completion API.
37//!
38//! ```rust
39//! use std::sync::LazyLock;
40//! use openai_interface::chat::create::request::{Message, RequestBody};
41//! use openai_interface::chat::create::response::no_streaming::ChatCompletion;
42//! use openai_interface::rest::post::PostNoStream;
43//!
44//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
45//! const DEEPSEEK_API_KEY: LazyLock<&str> =
46//!    LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
47//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/";
48//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
49//!
50//! #[tokio::main]
51//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
52//!     let request = RequestBody {
53//!         messages: vec![
54//!             Message::System {
55//!                 content: "You are a helpful assistant.".to_string(),
56//!                 name: None,
57//!             },
58//!             Message::User {
59//!                 content: "Hello, how are you?".to_string(),
60//!                 name: None,
61//!             },
62//!         ],
63//!         model: DEEPSEEK_MODEL.to_string(),
64//!         stream: false,
65//!         ..Default::default()
66//!     };
67//!
68//!     // Send the request
69//!     let chat_completion: ChatCompletion = request
70//!         .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
71//!         .await?;
72//!     let text = chat_completion.choices[0]
73//!         .message
74//!         .content
75//!         .as_deref()
76//!         .unwrap();
77//!     println!("{:?}", text);
78//!     Ok(())
79//! }
80//! ```
81//!
82//! ## Streaming Chat Completion
83//!
84//! This example demonstrates how to handle streaming responses from the API. As with the non-streaming
85//! example, all API parameters can be adjusted directly through the request struct.
86//!
87//! ```rust
88//! use openai_interface::chat::create::response::streaming::{CompletionContent, ChatCompletionChunk};
89//! use openai_interface::chat::create::request::{Message, RequestBody};
90//! use openai_interface::rest::post::PostStream;
91//! use futures_util::StreamExt;
92//!
93//! use std::sync::LazyLock;
94//!
95//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
96//! const DEEPSEEK_API_KEY: LazyLock<&str> =
97//!    LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
98//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/v1/";
99//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
100//!
101//! #[tokio::main]
102//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
103//!     let request = RequestBody {
104//!         messages: vec![
105//!             Message::System {
106//!                 content: "You are a helpful assistant.".to_string(),
107//!                 name: None,
108//!             },
109//!             Message::User {
110//!                 content: "Who are you?".to_string(),
111//!                 name: None,
112//!             },
113//!         ],
114//!         model: DEEPSEEK_MODEL.to_string(),
115//!         stream: true,
116//!         ..Default::default()
117//!     };
118//!
119//!     // Send the request
120//!     let mut response_stream = request
121//!         .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
122//!         .await?;
123//!
124//!     let mut message = String::new();
125//!
126//!     while let Some(chunk_result) = response_stream.next().await {
127//!         let chunk: ChatCompletionChunk = chunk_result?;
128//!         let content = match chunk.choices[0].delta.content.as_ref().unwrap() {
129//!             CompletionContent::Content(s) => s,
130//!             CompletionContent::ReasoningContent(s) => s,
131//!         };
132//!         println!("lib::test_streaming message: {}", content);
133//!         message.push_str(content);
134//!     }
135//!
136//!     println!("lib::test_streaming message: {}", message);
137//!     Ok(())
138//! }
139//! ```
140//!
141//! # Musl Build
142//!
143//! This crate is designed to work with musl libc, making it suitable for
144//! lightweight deployments in containerized environments. Longer compile times
145//! may be required as OpenSSL needs to be built from source.
146//!
147//! To build for musl:
148//! ```bash
149//! rustup target add x86_64-unknown-linux-musl
150//! cargo build --target x86_64-unknown-linux-musl
151//! ```
152
153pub mod chat;
154pub mod completions;
155pub mod errors;
156pub mod files;
157pub mod rest;
158
159#[cfg(test)]
160mod tests {
161    use crate::chat::create::request::{Message, RequestBody};
162    use crate::chat::create::response::no_streaming::ChatCompletion;
163    use crate::chat::create::response::streaming::{ChatCompletionChunk, CompletionContent};
164    use crate::rest::post::{PostNoStream, PostStream};
165    use futures_util::StreamExt;
166    use std::sync::LazyLock;
167
168    // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
169    const DEEPSEEK_API_KEY: LazyLock<&str> =
170        LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
171    const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/v1";
172    const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
173
174    #[tokio::test]
175    async fn test_no_streaming() -> Result<(), Box<dyn std::error::Error>> {
176        let request = RequestBody {
177            messages: vec![
178                Message::System {
179                    content: "You are a helpful assistant.".to_string(),
180                    name: None,
181                },
182                Message::User {
183                    content: "Hello, how are you?".to_string(),
184                    name: None,
185                },
186            ],
187            model: DEEPSEEK_MODEL.to_string(),
188            stream: false,
189            ..Default::default()
190        };
191
192        // Send the request
193        let chat_completion: ChatCompletion = request
194            .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
195            .await?;
196        let text = chat_completion.choices[0]
197            .message
198            .content
199            .as_deref()
200            .unwrap();
201        println!("lib::test_no_streaming message: {}", text);
202        Ok(())
203    }
204
205    #[tokio::test]
206    async fn test_streaming() -> Result<(), Box<dyn std::error::Error>> {
207        let request = RequestBody {
208            messages: vec![
209                Message::System {
210                    content: "You are a helpful assistant.".to_string(),
211                    name: None,
212                },
213                Message::User {
214                    content: "Who are you?".to_string(),
215                    name: None,
216                },
217            ],
218            model: DEEPSEEK_MODEL.to_string(),
219            stream: true,
220            ..Default::default()
221        };
222
223        // Send the request
224        let mut response_stream = request
225            .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
226            .await?;
227
228        let mut message = String::new();
229
230        while let Some(chunk_result) = response_stream.next().await {
231            let chunk: ChatCompletionChunk = chunk_result?;
232            let content = match chunk.choices[0].delta.content.as_ref().unwrap() {
233                CompletionContent::Content(s) => s,
234                CompletionContent::ReasoningContent(s) => s,
235            };
236            println!("lib::test_streaming message: {}", content);
237            message.push_str(content);
238        }
239
240        println!("lib::test_streaming message: {}", message);
241        Ok(())
242    }
243}