openai_interface/lib.rs
1//! A low-level Rust interface for interacting with OpenAI's API.
2//!
3//! This crate provides a simple, efficient but low-level way to interact with OpenAI's API,
4//! supporting both streaming and non-streaming responses. It utilizes Rust's powerful type
5//! system.
6//!
7//! # Features
8//!
9//! - **Chat Completions**: Full support for OpenAI's chat completion API
10//! - **Streaming and Non-streaming**: Support for both streaming and non-streaming responses
11//! - **Strong Typing**: Complete type definitions for all API requests and responses
12//! - **Error Handling**: Comprehensive error handling with detailed error types
13//! - **Async/Await**: Built with async/await support for efficient asynchronous operations
14//! - **Musl Support**: Designed to work with musl libc for lightweight deployments
15//!
16//! # Modules
17//!
18//! - [`chat`]: Contains all chat completion related structs, enums and methods.
19//! - [`errors`]: Defines error types used throughout the crate
20//!
21//! # Examples
22//!
23//! ## Non-streaming Chat Completion
24//!
25//! ```rust
26//! use std::sync::LazyLock;
27//! use openai_interface::chat::request::{Message, RequestBody};
28//! use openai_interface::chat::response::no_streaming::ChatCompletion;
29//! use std::str::FromStr;
30//!
31//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
32//! const DEEPSEEK_API_KEY: LazyLock<&str> =
33//! LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
34//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
35//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
36//!
37//! #[tokio::main]
38//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
39//! let request = RequestBody {
40//! messages: vec![
41//! Message::System {
42//! content: "You are a helpful assistant.".to_string(),
43//! name: None,
44//! },
45//! Message::User {
46//! content: "Hello, how are you?".to_string(),
47//! name: None,
48//! },
49//! ],
50//! model: DEEPSEEK_MODEL.to_string(),
51//! stream: false,
52//! ..Default::default()
53//! };
54//!
55//! // Send the request
56//! let response: String = request
57//! .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
58//! .await?;
59//! let chat_completion = ChatCompletion::from_str(&response).unwrap();
60//! let text = chat_completion.choices[0]
61//! .message
62//! .content
63//! .as_deref()
64//! .unwrap();
65//! println!("{:?}", text);
66//! Ok(())
67//! }
68//! ```
69//!
70//! ## Streaming Chat Completion
71//!
72//! ```rust
73//! use openai_interface::chat::response::streaming::{CompletionContent, ChatCompletionChunk};
74//! use openai_interface::chat::request::{Message, RequestBody};
75//! use futures_util::StreamExt;
76//!
77//! use std::str::FromStr;
78//! use std::sync::LazyLock;
79//!
80//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
81//! const DEEPSEEK_API_KEY: LazyLock<&str> =
82//! LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
83//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
84//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
85//!
86//! #[tokio::main]
87//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
88//! let request = RequestBody {
89//! messages: vec![
90//! Message::System {
91//! content: "You are a helpful assistant.".to_string(),
92//! name: None,
93//! },
94//! Message::User {
95//! content: "Who are you?".to_string(),
96//! name: None,
97//! },
98//! ],
99//! model: DEEPSEEK_MODEL.to_string(),
100//! stream: true,
101//! ..Default::default()
102//! };
103//!
104//! // Send the request
105//! let mut response_stream = request
106//! .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
107//! .await?;
108//!
109//! let mut message = String::new();
110//!
111//! while let Some(chunk_result) = response_stream.next().await {
112//! let chunk_string = chunk_result?;
113//! // let json_string = chunk_string.strip_prefix("data: ").unwrap();
114//! // if json_string == "[DONE]" {
115//! // break;
116//! // }
117//! if &chunk_string == "[DONE]" {
118//! // SSE stream ends.
119//! break;
120//! }
121//! let chunk = ChatCompletionChunk::from_str(&chunk_string).unwrap();
122//! let content: &String = match chunk.choices[0].delta.content.as_ref().unwrap() {
123//! CompletionContent::Content(s) => s,
124//! CompletionContent::ReasoningContent(s) => s,
125//! };
126//! println!("lib::test_streaming message: {}", content);
127//! message.push_str(content);
128//! }
129//!
130//! println!("lib::test_streaming message: {}", message);
131//! Ok(())
132//! }
133//! ```
134//!
135//! # Musl Build
136//!
137//! This crate is designed to adapt with the musl libc, making it suitable for
138//! lightweight deployments in containerized environments. Longer compile times
139//! may be required, for openssl is needed to be built from source.
140//!
141//! To build for musl:
142//! ```bash
143//! rustup target add x86_64-unknown-linux-musl
144//! cargo build --target x86_64-unknown-linux-musl
145//! ```
146
147pub mod chat;
148pub mod completions;
149pub mod errors;
150pub mod rest;
151
152#[cfg(test)]
153mod tests {
154 use crate::chat::request::{Message, RequestBody};
155 use crate::chat::response::no_streaming::ChatCompletion;
156 use crate::chat::response::streaming::{ChatCompletionChunk, CompletionContent};
157 use futures_util::StreamExt;
158 use std::str::FromStr;
159 use std::sync::LazyLock;
160
161 // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
162 const DEEPSEEK_API_KEY: LazyLock<&str> =
163 LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
164 const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
165 const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
166
167 #[tokio::test]
168 async fn test_no_streaming() -> Result<(), Box<dyn std::error::Error>> {
169 let request = RequestBody {
170 messages: vec![
171 Message::System {
172 content: "You are a helpful assistant.".to_string(),
173 name: None,
174 },
175 Message::User {
176 content: "Hello, how are you?".to_string(),
177 name: None,
178 },
179 ],
180 model: DEEPSEEK_MODEL.to_string(),
181 stream: false,
182 ..Default::default()
183 };
184
185 // Send the request
186 let response: String = request
187 .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
188 .await?;
189 let chat_completion = ChatCompletion::from_str(&response).unwrap();
190 let text = chat_completion.choices[0]
191 .message
192 .content
193 .as_deref()
194 .unwrap();
195 println!("lib::test_no_streaming message: {}", text);
196 Ok(())
197 }
198
199 #[tokio::test]
200 async fn test_streaming() -> Result<(), Box<dyn std::error::Error>> {
201 let request = RequestBody {
202 messages: vec![
203 Message::System {
204 content: "You are a helpful assistant.".to_string(),
205 name: None,
206 },
207 Message::User {
208 content: "Who are you?".to_string(),
209 name: None,
210 },
211 ],
212 model: DEEPSEEK_MODEL.to_string(),
213 stream: true,
214 ..Default::default()
215 };
216
217 // Send the request
218 let mut response_stream = request
219 .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
220 .await?;
221
222 let mut message = String::new();
223
224 while let Some(chunk_result) = response_stream.next().await {
225 let chunk_string = chunk_result?;
226 // let json_string = chunk_string.strip_prefix("data: ").unwrap();
227 // if json_string == "[DONE]" {
228 // break;
229 // }
230 if &chunk_string == "[DONE]" {
231 // SSE stream ends.
232 break;
233 }
234 let chunk = ChatCompletionChunk::from_str(&chunk_string).unwrap();
235 let content = match chunk.choices[0].delta.content.as_ref().unwrap() {
236 CompletionContent::Content(s) => s,
237 CompletionContent::ReasoningContent(s) => s,
238 };
239 println!("lib::test_streaming message: {}", content);
240 message.push_str(content);
241 }
242
243 println!("lib::test_streaming message: {}", message);
244 Ok(())
245 }
246}