openai_interface/
lib.rs

1//! A low-level Rust interface for interacting with OpenAI's API.
2//!
3//! This crate provides a simple, efficient but low-level way to interact with OpenAI's API,
4//! supporting both streaming and non-streaming responses. It utilizes Rust's powerful type
5//! system.
6//!
7//! # Features
8//!
9//! - **Chat Completions**: Full support for OpenAI's chat completion API
10//! - **Streaming and Non-streaming**: Support for both streaming and non-streaming responses
11//! - **Strong Typing**: Complete type definitions for all API requests and responses
12//! - **Error Handling**: Comprehensive error handling with detailed error types
13//! - **Async/Await**: Built with async/await support for efficient asynchronous operations
14//! - **Musl Support**: Designed to work with musl libc for lightweight deployments
15//!
16//! # Modules
17//!
18//! - [`chat`]: Contains all chat completion related structs, enums and methods.
19//! - [`errors`]: Defines error types used throughout the crate
20//!
21//! # Examples
22//!
23//! ## Non-streaming Chat Completion
24//!
25//! ```rust
26//! use std::sync::LazyLock;
27//! use openai_interface::chat::request::{Message, RequestBody};
28//! use openai_interface::chat::response::no_streaming::ChatCompletion;
29//! use openai_interface::rest::post::NoStream;
30//! use std::str::FromStr;
31//!
32//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
33//! const DEEPSEEK_API_KEY: LazyLock<&str> =
34//!    LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
35//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
36//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
37//!
38//! #[tokio::main]
39//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
40//!     let request = RequestBody {
41//!         messages: vec![
42//!             Message::System {
43//!                 content: "You are a helpful assistant.".to_string(),
44//!                 name: None,
45//!             },
46//!             Message::User {
47//!                 content: "Hello, how are you?".to_string(),
48//!                 name: None,
49//!             },
50//!         ],
51//!         model: DEEPSEEK_MODEL.to_string(),
52//!         stream: false,
53//!         ..Default::default()
54//!     };
55//!
56//!     // Send the request
57//!     let response: String = request
58//!         .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
59//!         .await?;
60//!     let chat_completion = ChatCompletion::from_str(&response).unwrap();
61//!     let text = chat_completion.choices[0]
62//!         .message
63//!         .content
64//!         .as_deref()
65//!         .unwrap();
66//!     println!("{:?}", text);
67//!     Ok(())
68//! }
69//! ```
70//!
71//! ## Streaming Chat Completion
72//!
73//! ```rust
74//! use openai_interface::chat::response::streaming::{CompletionContent, ChatCompletionChunk};
75//! use openai_interface::chat::request::{Message, RequestBody};
76//! use openai_interface::rest::post::Stream;
77//! use futures_util::StreamExt;
78//!
79//! use std::str::FromStr;
80//! use std::sync::LazyLock;
81//!
82//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
83//! const DEEPSEEK_API_KEY: LazyLock<&str> =
84//!    LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
85//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
86//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
87//!
88//! #[tokio::main]
89//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
90//!     let request = RequestBody {
91//!         messages: vec![
92//!             Message::System {
93//!                 content: "You are a helpful assistant.".to_string(),
94//!                 name: None,
95//!             },
96//!             Message::User {
97//!                 content: "Who are you?".to_string(),
98//!                 name: None,
99//!             },
100//!         ],
101//!         model: DEEPSEEK_MODEL.to_string(),
102//!         stream: true,
103//!         ..Default::default()
104//!     };
105//!
106//!     // Send the request
107//!     let mut response_stream = request
108//!         .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
109//!         .await?;
110//!
111//!     let mut message = String::new();
112//!
113//!     while let Some(chunk_result) = response_stream.next().await {
114//!         let chunk_string = chunk_result?;
115//!         // let json_string = chunk_string.strip_prefix("data: ").unwrap();
116//!         // if json_string == "[DONE]" {
117//!         //     break;
118//!         // }
119//!         if &chunk_string == "[DONE]" {
120//!             // SSE stream ends.
121//!             break;
122//!         }
123//!         let chunk = ChatCompletionChunk::from_str(&chunk_string).unwrap();
124//!         let content: &String = match chunk.choices[0].delta.content.as_ref().unwrap() {
125//!             CompletionContent::Content(s) => s,
126//!             CompletionContent::ReasoningContent(s) => s,
127//!         };
128//!         println!("lib::test_streaming message: {}", content);
129//!         message.push_str(content);
130//!     }
131//!
132//!     println!("lib::test_streaming message: {}", message);
133//!     Ok(())
134//! }
135//! ```
136//!
137//! # Musl Build
138//!
139//! This crate is designed to adapt with the musl libc, making it suitable for
140//! lightweight deployments in containerized environments. Longer compile times
141//! may be required, for openssl is needed to be built from source.
142//!
143//! To build for musl:
144//! ```bash
145//! rustup target add x86_64-unknown-linux-musl
146//! cargo build --target x86_64-unknown-linux-musl
147//! ```
148
149pub mod chat;
150pub mod completions;
151pub mod errors;
152pub mod rest;
153
154#[cfg(test)]
155mod tests {
156    use crate::chat::request::{Message, RequestBody};
157    use crate::chat::response::no_streaming::ChatCompletion;
158    use crate::chat::response::streaming::{ChatCompletionChunk, CompletionContent};
159    use crate::rest::post::{NoStream, Stream};
160    use futures_util::StreamExt;
161    use std::str::FromStr;
162    use std::sync::LazyLock;
163
164    // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
165    const DEEPSEEK_API_KEY: LazyLock<&str> =
166        LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
167    const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
168    const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
169
170    #[tokio::test]
171    async fn test_no_streaming() -> Result<(), Box<dyn std::error::Error>> {
172        let request = RequestBody {
173            messages: vec![
174                Message::System {
175                    content: "You are a helpful assistant.".to_string(),
176                    name: None,
177                },
178                Message::User {
179                    content: "Hello, how are you?".to_string(),
180                    name: None,
181                },
182            ],
183            model: DEEPSEEK_MODEL.to_string(),
184            stream: false,
185            ..Default::default()
186        };
187
188        // Send the request
189        let response: String = request
190            .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
191            .await?;
192        let chat_completion = ChatCompletion::from_str(&response).unwrap();
193        let text = chat_completion.choices[0]
194            .message
195            .content
196            .as_deref()
197            .unwrap();
198        println!("lib::test_no_streaming message: {}", text);
199        Ok(())
200    }
201
202    #[tokio::test]
203    async fn test_streaming() -> Result<(), Box<dyn std::error::Error>> {
204        let request = RequestBody {
205            messages: vec![
206                Message::System {
207                    content: "You are a helpful assistant.".to_string(),
208                    name: None,
209                },
210                Message::User {
211                    content: "Who are you?".to_string(),
212                    name: None,
213                },
214            ],
215            model: DEEPSEEK_MODEL.to_string(),
216            stream: true,
217            ..Default::default()
218        };
219
220        // Send the request
221        let mut response_stream = request
222            .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
223            .await?;
224
225        let mut message = String::new();
226
227        while let Some(chunk_result) = response_stream.next().await {
228            let chunk_string = chunk_result?;
229            // let json_string = chunk_string.strip_prefix("data: ").unwrap();
230            // if json_string == "[DONE]" {
231            //     break;
232            // }
233            if &chunk_string == "[DONE]" {
234                // SSE stream ends.
235                break;
236            }
237            let chunk = ChatCompletionChunk::from_str(&chunk_string).unwrap();
238            let content = match chunk.choices[0].delta.content.as_ref().unwrap() {
239                CompletionContent::Content(s) => s,
240                CompletionContent::ReasoningContent(s) => s,
241            };
242            println!("lib::test_streaming message: {}", content);
243            message.push_str(content);
244        }
245
246        println!("lib::test_streaming message: {}", message);
247        Ok(())
248    }
249}