async_openai/
lib.rs

1//! Rust library for OpenAI
2//!
3//! ## Creating client
4//!
5//! ```
6//! use async_openai::{Client, config::OpenAIConfig};
7//!
8//! // Create a OpenAI client with api key from env var OPENAI_API_KEY and default base url.
9//! let client = Client::new();
10//!
11//! // Above is shortcut for
12//! let config = OpenAIConfig::default();
13//! let client = Client::with_config(config);
14//!
15//! // OR use API key from different source and a non default organization
16//! let api_key = "sk-..."; // This secret could be from a file, or environment variable.
17//! let config = OpenAIConfig::new()
18//!     .with_api_key(api_key)
19//!     .with_org_id("the-continental");
20//!
21//! let client = Client::with_config(config);
22//!
23//! // Use custom reqwest client
24//! let http_client = reqwest::ClientBuilder::new().user_agent("async-openai").build().unwrap();
25//! let client = Client::new().with_http_client(http_client);
26//! ```
27//!
28//!
29//! ## Making requests
30//!
31//!```
32//!# tokio_test::block_on(async {
33//!
34//! use async_openai::{Client, types::{CreateCompletionRequestArgs}};
35//!
36//! // Create client
37//! let client = Client::new();
38//!
39//! // Create request using builder pattern
40//! // Every request struct has companion builder struct with same name + Args suffix
41//! let request = CreateCompletionRequestArgs::default()
42//!     .model("gpt-3.5-turbo-instruct")
43//!     .prompt("Tell me the recipe of alfredo pasta")
44//!     .max_tokens(40_u32)
45//!     .build()
46//!     .unwrap();
47//!
48//! // Call API
49//! let response = client
50//!     .completions()      // Get the API "group" (completions, images, etc.) from the client
51//!     .create(request)    // Make the API call in that "group"
52//!     .await
53//!     .unwrap();
54//!
55//! println!("{}", response.choices.first().unwrap().text);
56//! # });
57//!```
58//!
59//! ## Bring Your Own Types
60//!
61//! To use custom types for inputs and outputs, enable `byot` feature which provides additional generic methods with same name and `_byot` suffix.
62//! This feature is available on methods whose return type is not `Bytes`
63//!
64//!```
65//!# #[cfg(feature = "byot")]
66//!# tokio_test::block_on(async {
67//! use async_openai::Client;
68//! use serde_json::{Value, json};
69//!
70//! let client = Client::new();
71//!
72//! let response: Value = client
73//!        .chat()
74//!        .create_byot(json!({
75//!            "messages": [
76//!                {
77//!                    "role": "developer",
78//!                    "content": "You are a helpful assistant"
79//!                },
80//!                {
81//!                    "role": "user",
82//!                    "content": "What do you think about life?"
83//!                }
84//!            ],
85//!            "model": "gpt-4o",
86//!            "store": false
87//!        }))
88//!        .await
89//!        .unwrap();
90//!
91//!  if let Some(content) = response["choices"][0]["message"]["content"].as_str() {
92//!     println!("{}", content);
93//!  }
94//! # });
95//!```
96//!
97//! ## Dynamic Dispatch for OpenAI-compatible Providers
98//!
99//! For any struct that implements `Config` trait, wrap it in a smart pointer and cast the pointer to `dyn Config`
100//! trait object, then create a client with `Box` or `Arc` wrapped configuration.
101//!
102//! For example:
103//! ```
104//! use async_openai::{Client, config::{Config, OpenAIConfig}};
105//!
106//! // Use `Box` or `std::sync::Arc` to wrap the config
107//! let config = Box::new(OpenAIConfig::default()) as Box<dyn Config>;
108//! // A function can now accept a `&Client<Box<dyn Config>>` parameter
109//! // which can invoke any openai compatible api
110//! let client: Client<Box<dyn Config>> = Client::with_config(config);
111//! ```
112//!
113//! ## Microsoft Azure
114//!
115//! ```
116//! use async_openai::{Client, config::AzureConfig};
117//!
118//! let config = AzureConfig::new()
119//!     .with_api_base("https://my-resource-name.openai.azure.com")
120//!     .with_api_version("2023-03-15-preview")
121//!     .with_deployment_id("deployment-id")
122//!     .with_api_key("...");
123//!
124//! let client = Client::with_config(config);
125//!
126//! // Note that `async-openai` only implements OpenAI spec
127//! // and doesn't maintain parity with the spec of Azure OpenAI service.
128//!
129//! ```
130//!
131//!
132//! ## Examples
133//! For full working examples for all supported features see [examples](https://github.com/64bit/async-openai/tree/main/examples) directory in the repository.
134//!
135#![cfg_attr(docsrs, feature(doc_cfg))]
136
137#[cfg(feature = "byot")]
138pub(crate) use async_openai_macros::byot;
139
140#[cfg(not(feature = "byot"))]
141pub(crate) use async_openai_macros::byot_passthrough as byot;
142
143mod admin;
144mod assistants;
145mod audio;
146mod batches;
147mod chat;
148mod chatkit;
149mod client;
150mod completion;
151pub mod config;
152mod containers;
153mod download;
154mod embedding;
155pub mod error;
156mod evals;
157mod file;
158mod fine_tuning;
159mod image;
160mod impls;
161mod model;
162mod moderation;
163#[cfg(feature = "realtime")]
164mod realtime;
165mod request_options;
166mod responses;
167pub mod traits;
168pub mod types;
169mod uploads;
170mod util;
171mod vectorstores;
172mod video;
173#[cfg(feature = "webhook")]
174pub mod webhooks;
175
176pub use admin::*;
177pub use assistants::*;
178pub use audio::Audio;
179pub use audio::*;
180pub use batches::Batches;
181pub use chat::Chat;
182pub use chatkit::Chatkit;
183pub use client::Client;
184pub use completion::Completions;
185pub use containers::*;
186pub use embedding::Embeddings;
187pub use evals::*;
188pub use file::Files;
189pub use fine_tuning::FineTuning;
190pub use image::Images;
191pub use model::Models;
192pub use moderation::Moderations;
193#[cfg(feature = "realtime")]
194pub use realtime::Realtime;
195pub use request_options::RequestOptions;
196pub use responses::*;
197pub use uploads::Uploads;
198pub use vectorstores::*;
199pub use video::Videos;