openai_interface/
lib.rs

1//! A low-level Rust interface for interacting with OpenAI's API.
2//!
3//! This crate provides a simple, efficient but low-level way to interact with OpenAI's API,
4//! supporting both streaming and non-streaming responses. It utilizes Rust's powerful type
5//! system.
6//!
7//! # Features
8//!
9//! - **Chat Completions**: Full support for OpenAI's chat completion API
10//! - **Streaming and Non-streaming**: Support for both streaming and non-streaming responses
11//! - **Strong Typing**: Complete type definitions for all API requests and responses
12//! - **Error Handling**: Comprehensive error handling with detailed error types
13//! - **Async/Await**: Built with async/await support for efficient asynchronous operations
14//! - **Musl Support**: Designed to work with musl libc for lightweight deployments
15//!
16//! # Modules
17//!
18//! - [`chat`]: Contains all chat completion related structs, enums and methods.
19//! - [`errors`]: Defines error types used throughout the crate
20//!
21//! # Examples
22//!
23//! ## Non-streaming Chat Completion
24//!
25//! ```rust
26//! use std::sync::LazyLock;
27//! use openai_interface::chat::request::{Message, RequestBody};
28//! use openai_interface::chat::response::no_streaming::ChatCompletion;
29//! use std::str::FromStr;
30//!
31//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
32//! const DEEPSEEK_API_KEY: LazyLock<&str> =
33//!    LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
34//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
35//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
36//!
37//! #[tokio::main]
38//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
39//!     let request = RequestBody {
40//!         messages: vec![
41//!             Message::System {
42//!                 content: "You are a helpful assistant.".to_string(),
43//!                 name: None,
44//!             },
45//!             Message::User {
46//!                 content: "Hello, how are you?".to_string(),
47//!                 name: None,
48//!             },
49//!         ],
50//!         model: DEEPSEEK_MODEL.to_string(),
51//!         stream: false,
52//!         ..Default::default()
53//!     };
54//!
55//!     // Send the request
56//!     let response: String = request
57//!         .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
58//!         .await?;
59//!     let chat_completion = ChatCompletion::from_str(&response).unwrap();
60//!     let text = chat_completion.choices[0]
61//!         .message
62//!         .content
63//!         .as_deref()
64//!         .unwrap();
65//!     println!("{:?}", text);
66//!     Ok(())
67//! }
68//! ```
69//!
70//! ## Streaming Chat Completion
71//!
72//! ```rust
73//! use openai_interface::chat::response::streaming::{CompletionContent, ChatCompletionChunk};
74//! use openai_interface::chat::request::{Message, RequestBody};
75//! use futures_util::StreamExt;
76//!
77//! use std::str::FromStr;
78//! use std::sync::LazyLock;
79//!
80//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
81//! const DEEPSEEK_API_KEY: LazyLock<&str> =
82//!    LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
83//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
84//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
85//!
86//! #[tokio::main]
87//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
88//!     let request = RequestBody {
89//!         messages: vec![
90//!             Message::System {
91//!                 content: "You are a helpful assistant.".to_string(),
92//!                 name: None,
93//!             },
94//!             Message::User {
95//!                 content: "Who are you?".to_string(),
96//!                 name: None,
97//!             },
98//!         ],
99//!         model: DEEPSEEK_MODEL.to_string(),
100//!         stream: true,
101//!         ..Default::default()
102//!     };
103//!
104//!     // Send the request
105//!     let mut response_stream = request
106//!         .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
107//!         .await?;
108//!
109//!     let mut message = String::new();
110//!
111//!     while let Some(chunk_result) = response_stream.next().await {
112//!         let chunk_string = chunk_result?;
113//!         // let json_string = chunk_string.strip_prefix("data: ").unwrap();
114//!         // if json_string == "[DONE]" {
115//!         //     break;
116//!         // }
117//!         if &chunk_string == "[DONE]" {
118//!             // SSE stream ends.
119//!             break;
120//!         }
121//!         let chunk = ChatCompletionChunk::from_str(&chunk_string).unwrap();
122//!         let content: &String = match chunk.choices[0].delta.content.as_ref().unwrap() {
123//!             CompletionContent::Content(s) => s,
124//!             CompletionContent::ReasoningContent(s) => s,
125//!         };
126//!         println!("lib::test_streaming message: {}", content);
127//!         message.push_str(content);
128//!     }
129//!
130//!     println!("lib::test_streaming message: {}", message);
131//!     Ok(())
132//! }
133//! ```
134//!
135//! # Musl Build
136//!
137//! This crate is designed to adapt with the musl libc, making it suitable for
138//! lightweight deployments in containerized environments. Longer compile times
139//! may be required, for openssl is needed to be built from source.
140//!
141//! To build for musl:
142//! ```bash
143//! rustup target add x86_64-unknown-linux-musl
144//! cargo build --target x86_64-unknown-linux-musl
145//! ```
146
147pub mod chat;
148pub mod errors;
149
150#[cfg(test)]
151mod tests {
152    use crate::chat::request::{Message, RequestBody};
153    use crate::chat::response::no_streaming::ChatCompletion;
154    use crate::chat::response::streaming::{ChatCompletionChunk, CompletionContent};
155    use futures_util::StreamExt;
156    use std::str::FromStr;
157    use std::sync::LazyLock;
158
159    // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
160    const DEEPSEEK_API_KEY: LazyLock<&str> =
161        LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
162    const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
163    const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
164
165    #[tokio::test]
166    async fn test_no_streaming() -> Result<(), Box<dyn std::error::Error>> {
167        let request = RequestBody {
168            messages: vec![
169                Message::System {
170                    content: "You are a helpful assistant.".to_string(),
171                    name: None,
172                },
173                Message::User {
174                    content: "Hello, how are you?".to_string(),
175                    name: None,
176                },
177            ],
178            model: DEEPSEEK_MODEL.to_string(),
179            stream: false,
180            ..Default::default()
181        };
182
183        // Send the request
184        let response: String = request
185            .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
186            .await?;
187        let chat_completion = ChatCompletion::from_str(&response).unwrap();
188        let text = chat_completion.choices[0]
189            .message
190            .content
191            .as_deref()
192            .unwrap();
193        println!("lib::test_no_streaming message: {}", text);
194        Ok(())
195    }
196
197    #[tokio::test]
198    async fn test_streaming() -> Result<(), Box<dyn std::error::Error>> {
199        let request = RequestBody {
200            messages: vec![
201                Message::System {
202                    content: "You are a helpful assistant.".to_string(),
203                    name: None,
204                },
205                Message::User {
206                    content: "Who are you?".to_string(),
207                    name: None,
208                },
209            ],
210            model: DEEPSEEK_MODEL.to_string(),
211            stream: true,
212            ..Default::default()
213        };
214
215        // Send the request
216        let mut response_stream = request
217            .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
218            .await?;
219
220        let mut message = String::new();
221
222        while let Some(chunk_result) = response_stream.next().await {
223            let chunk_string = chunk_result?;
224            // let json_string = chunk_string.strip_prefix("data: ").unwrap();
225            // if json_string == "[DONE]" {
226            //     break;
227            // }
228            if &chunk_string == "[DONE]" {
229                // SSE stream ends.
230                break;
231            }
232            let chunk = ChatCompletionChunk::from_str(&chunk_string).unwrap();
233            let content = match chunk.choices[0].delta.content.as_ref().unwrap() {
234                CompletionContent::Content(s) => s,
235                CompletionContent::ReasoningContent(s) => s,
236            };
237            println!("lib::test_streaming message: {}", content);
238            message.push_str(content);
239        }
240
241        println!("lib::test_streaming message: {}", message);
242        Ok(())
243    }
244}