openai_interface/lib.rs
1//! A low-level Rust interface for interacting with OpenAI's API.
2//!
3//! This crate provides a simple, efficient but low-level way to interact with OpenAI's API,
4//! supporting both streaming and non-streaming responses. It utilizes Rust's powerful type
5//! system.
6//!
7//! # Features
8//!
9//! - **Chat Completions**: Full support for OpenAI's chat completion API
10//! - **Streaming and Non-streaming**: Support for both streaming and non-streaming responses
11//! - **Strong Typing**: Complete type definitions for all API requests and responses
12//! - **Error Handling**: Comprehensive error handling with detailed error types
13//! - **Async/Await**: Built with async/await support for efficient asynchronous operations
14//! - **Musl Support**: Designed to work with musl libc for lightweight deployments
15//!
16//! # Modules
17//!
18//! - [`chat`]: Contains all chat completion related structs, enums and methods.
19//! - [`errors`]: Defines error types used throughout the crate
20//!
21//! # Examples
22//!
23//! ## Non-streaming Chat Completion
24//!
25//! ```rust
26//! use std::sync::LazyLock;
27//! use openai_interface::chat::request::{Message, RequestBody};
28//!
29//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
30//! const DEEPSEEK_API_KEY: LazyLock<&str> =
31//! LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
32//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
33//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
34//!
35//! #[tokio::main]
36//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
37//! let request = RequestBody {
38//! messages: vec![
39//! Message::System {
40//! content: "You are a helpful assistant.".to_string(),
41//! name: None,
42//! },
43//! Message::User {
44//! content: "Hello, how are you?".to_string(),
45//! name: None,
46//! },
47//! ],
48//! model: DEEPSEEK_MODEL.to_string(),
49//! stream: false,
50//! ..Default::default()
51//! };
52//!
53//! // Send the request
54//! let response = request.get_response(DEEPSEEK_CHAT_URL, &*DEEPSEEK_API_KEY).await?;
55//! println!("{:?}", response);
56//! Ok(())
57//! }
58//! ```
59//!
60//! ## Streaming Chat Completion
61//!
62//! ```rust
63//! use openai_interface::chat::request::{Message, RequestBody};
64//! use futures_util::StreamExt;
65//!
66//! use std::sync::LazyLock;
67//!
68//! // You need to provide your own DeepSeek API key at /keys/deepseek_domestic_key
69//! const DEEPSEEK_API_KEY: LazyLock<&str> =
70//! LazyLock::new(|| include_str!("../keys/deepseek_domestic_key").trim());
71//! const DEEPSEEK_CHAT_URL: &'static str = "https://api.deepseek.com/chat/completions";
72//! const DEEPSEEK_MODEL: &'static str = "deepseek-chat";
73//!
74//! #[tokio::main]
75//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
76//! let request = RequestBody {
77//! messages: vec![
78//! Message::System {
79//! content: "You are a helpful assistant.".to_string(),
80//! name: None,
81//! },
82//! Message::User {
83//! content: "Count from 1 to 10.".to_string(),
84//! name: None,
85//! },
86//! ],
87//! model: DEEPSEEK_MODEL.to_string(),
88//! stream: true,
89//! ..Default::default()
90//! };
91//!
92//! // Send the request
93//! let mut response_stream = request.stream_response(DEEPSEEK_CHAT_URL, *DEEPSEEK_API_KEY).await?;
94//!
95//! while let Some(chunk) = response_stream.next().await {
96//! println!("{}", chunk?);
97//! }
98//! Ok(())
99//! }
100//! ```
101//!
102//! # Musl Build
103//!
104//! This crate is designed to adapt with the musl libc, making it suitable for
105//! lightweight deployments in containerized environments. Longer compile times
106//! may be required, for openssl is needed to be built from source.
107//!
108//! To build for musl:
109//! ```bash
110//! rustup target add x86_64-unknown-linux-musl
111//! cargo build --target x86_64-unknown-linux-musl
112//! ```
113
114pub mod chat;
115pub mod errors;