openai_interface/lib.rs
1//! A low-level Rust interface for interacting with OpenAI's API.
2//!
3//! This crate provides a simple, efficient, and low-level way to interact with OpenAI's API,
4//! supporting both streaming and non-streaming responses. It leverages Rust's powerful type
5//! system for safety and performance, while exposing the full flexibility of the API.
6//!
7//! # Features
8//!
9//! - **Chat Completions**: Full support for OpenAI's chat completion API
10//! - **Streaming and Non-streaming**: Support for both streaming and non-streaming responses
11//! - **Strong Typing**: Complete type definitions for all API requests and responses
12//! - **Error Handling**: Comprehensive error handling with detailed error types
13//! - **Async/Await**: Built with async/await support for efficient asynchronous operations
14//! - **Musl Support**: Designed to work with musl libc for lightweight deployments
15//!
16//! ## Implemented APIs
17//!
18//! - Chat Completions
19//! - Completions
20//!
21//! ## Developing
22//!
23//! - Files
24//!
25//! # Examples
26//!
27//! ## Non-streaming Chat Completion
28//!
29//! This example demonstrates how to make a non-streaming request to the chat completion API.
30//!
31//! ```rust
32//! use std::sync::LazyLock;
33//! use openai_interface::chat::request::{Message, RequestBody};
34//! use openai_interface::chat::response::no_streaming::ChatCompletion;
35//! use openai_interface::rest::post::PostNoStream;
36//!
37//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
38//! const DEEPSEEK_API_KEY: LazyLock<&str> =
39//! LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
40//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
41//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
42//!
43//! #[tokio::main]
44//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
45//! let request = RequestBody {
46//! messages: vec![
47//! Message::System {
48//! content: "You are a helpful assistant.".to_string(),
49//! name: None,
50//! },
51//! Message::User {
52//! content: "Hello, how are you?".to_string(),
53//! name: None,
54//! },
55//! ],
56//! model: DEEPSEEK_MODEL.to_string(),
57//! stream: false,
58//! ..Default::default()
59//! };
60//!
61//! // Send the request
62//! let chat_completion: ChatCompletion = request
63//! .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
64//! .await?;
65//! let text = chat_completion.choices[0]
66//! .message
67//! .content
68//! .as_deref()
69//! .unwrap();
70//! println!("{:?}", text);
71//! Ok(())
72//! }
73//! ```
74//!
75//! ## Streaming Chat Completion
76//!
77//! This example demonstrates how to handle streaming responses from the API. As with the non-streaming
78//! example, all API parameters can be adjusted directly through the request struct.
79//!
80//! ```rust
81//! use openai_interface::chat::response::streaming::{CompletionContent, ChatCompletionChunk};
82//! use openai_interface::chat::request::{Message, RequestBody};
83//! use openai_interface::rest::post::PostStream;
84//! use futures_util::StreamExt;
85//!
86//! use std::sync::LazyLock;
87//!
88//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
89//! const DEEPSEEK_API_KEY: LazyLock<&str> =
90//! LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
91//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
92//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
93//!
94//! #[tokio::main]
95//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
96//! let request = RequestBody {
97//! messages: vec![
98//! Message::System {
99//! content: "You are a helpful assistant.".to_string(),
100//! name: None,
101//! },
102//! Message::User {
103//! content: "Who are you?".to_string(),
104//! name: None,
105//! },
106//! ],
107//! model: DEEPSEEK_MODEL.to_string(),
108//! stream: true,
109//! ..Default::default()
110//! };
111//!
112//! // Send the request
113//! let mut response_stream = request
114//! .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
115//! .await?;
116//!
117//! let mut message = String::new();
118//!
119//! while let Some(chunk_result) = response_stream.next().await {
120//! let chunk: ChatCompletionChunk = chunk_result?;
121//! let content = match chunk.choices[0].delta.content.as_ref().unwrap() {
122//! CompletionContent::Content(s) => s,
123//! CompletionContent::ReasoningContent(s) => s,
124//! };
125//! println!("lib::test_streaming message: {}", content);
126//! message.push_str(content);
127//! }
128//!
129//! println!("lib::test_streaming message: {}", message);
130//! Ok(())
131//! }
132//! ```
133//!
134//! # Musl Build
135//!
136//! This crate is designed to work with musl libc, making it suitable for
137//! lightweight deployments in containerized environments. Longer compile times
138//! may be required as OpenSSL needs to be built from source.
139//!
140//! To build for musl:
141//! ```bash
142//! rustup target add x86_64-unknown-linux-musl
143//! cargo build --target x86_64-unknown-linux-musl
144//! ```
145
146pub mod chat;
147pub mod completions;
148pub mod errors;
149pub mod files;
150pub mod rest;
151
152#[cfg(test)]
153mod tests {
154 use crate::chat::request::{Message, RequestBody};
155 use crate::chat::response::no_streaming::ChatCompletion;
156 use crate::chat::response::streaming::{ChatCompletionChunk, CompletionContent};
157 use crate::rest::post::{PostNoStream, PostStream};
158 use futures_util::StreamExt;
159 use std::sync::LazyLock;
160
161 // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
162 const DEEPSEEK_API_KEY: LazyLock<&str> =
163 LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
164 const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
165 const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
166
167 #[tokio::test]
168 async fn test_no_streaming() -> Result<(), Box<dyn std::error::Error>> {
169 let request = RequestBody {
170 messages: vec![
171 Message::System {
172 content: "You are a helpful assistant.".to_string(),
173 name: None,
174 },
175 Message::User {
176 content: "Hello, how are you?".to_string(),
177 name: None,
178 },
179 ],
180 model: DEEPSEEK_MODEL.to_string(),
181 stream: false,
182 ..Default::default()
183 };
184
185 // Send the request
186 let chat_completion: ChatCompletion = request
187 .get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY)
188 .await?;
189 let text = chat_completion.choices[0]
190 .message
191 .content
192 .as_deref()
193 .unwrap();
194 println!("lib::test_no_streaming message: {}", text);
195 Ok(())
196 }
197
198 #[tokio::test]
199 async fn test_streaming() -> Result<(), Box<dyn std::error::Error>> {
200 let request = RequestBody {
201 messages: vec![
202 Message::System {
203 content: "You are a helpful assistant.".to_string(),
204 name: None,
205 },
206 Message::User {
207 content: "Who are you?".to_string(),
208 name: None,
209 },
210 ],
211 model: DEEPSEEK_MODEL.to_string(),
212 stream: true,
213 ..Default::default()
214 };
215
216 // Send the request
217 let mut response_stream = request
218 .get_stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY)
219 .await?;
220
221 let mut message = String::new();
222
223 while let Some(chunk_result) = response_stream.next().await {
224 let chunk: ChatCompletionChunk = chunk_result?;
225 let content = match chunk.choices[0].delta.content.as_ref().unwrap() {
226 CompletionContent::Content(s) => s,
227 CompletionContent::ReasoningContent(s) => s,
228 };
229 println!("lib::test_streaming message: {}", content);
230 message.push_str(content);
231 }
232
233 println!("lib::test_streaming message: {}", message);
234 Ok(())
235 }
236}