async_openai/lib.rs
1//! Rust library for OpenAI
2//!
3//! ## Creating client
4//!
5//! ```
6//! use async_openai::{Client, config::OpenAIConfig};
7//!
8//! // Create a OpenAI client with api key from env var OPENAI_API_KEY and default base url.
9//! let client = Client::new();
10//!
11//! // Above is shortcut for
12//! let config = OpenAIConfig::default();
13//! let client = Client::with_config(config);
14//!
15//! // OR use API key from different source and a non default organization
16//! let api_key = "sk-..."; // This secret could be from a file, or environment variable.
17//! let config = OpenAIConfig::new()
18//! .with_api_key(api_key)
19//! .with_org_id("the-continental");
20//!
21//! let client = Client::with_config(config);
22//!
23//! // Use custom reqwest client
24//! let http_client = reqwest::ClientBuilder::new().user_agent("async-openai").build().unwrap();
25//! let client = Client::new().with_http_client(http_client);
26//! ```
27//!
28//!
29//! ## Making requests
30//!
31//!```
32//!# tokio_test::block_on(async {
33//!
34//! use async_openai::{Client, types::{CreateCompletionRequestArgs}};
35//!
36//! // Create client
37//! let client = Client::new();
38//!
39//! // Create request using builder pattern
40//! // Every request struct has companion builder struct with same name + Args suffix
41//! let request = CreateCompletionRequestArgs::default()
42//! .model("gpt-3.5-turbo-instruct")
43//! .prompt("Tell me the recipe of alfredo pasta")
44//! .max_tokens(40_u32)
45//! .build()
46//! .unwrap();
47//!
48//! // Call API
49//! let response = client
50//! .completions() // Get the API "group" (completions, images, etc.) from the client
51//! .create(request) // Make the API call in that "group"
52//! .await
53//! .unwrap();
54//!
55//! println!("{}", response.choices.first().unwrap().text);
56//! # });
57//!```
58//!
59//! ## Bring Your Own Types
60//!
61//! To use custom types for inputs and outputs, enable `byot` feature which provides additional generic methods with same name and `_byot` suffix.
62//! This feature is available on methods whose return type is not `Bytes`
63//!
64//!```
65//!# #[cfg(feature = "byot")]
66//!# tokio_test::block_on(async {
67//! use async_openai::Client;
68//! use serde_json::{Value, json};
69//!
70//! let client = Client::new();
71//!
72//! let response: Value = client
73//! .chat()
74//! .create_byot(json!({
75//! "messages": [
76//! {
77//! "role": "developer",
78//! "content": "You are a helpful assistant"
79//! },
80//! {
81//! "role": "user",
82//! "content": "What do you think about life?"
83//! }
84//! ],
85//! "model": "gpt-4o",
86//! "store": false
87//! }))
88//! .await
89//! .unwrap();
90//!
91//! if let Some(content) = response["choices"][0]["message"]["content"].as_str() {
92//! println!("{}", content);
93//! }
94//! # });
95//!```
96//!
97//! ## Dynamic Dispatch for Different Providers
98//!
99//! For any struct that implements `Config` trait, you can wrap it in a smart pointer and cast the pointer to `dyn Config`
100//! trait object, then your client can accept any wrapped configuration type.
101//!
102//! For example,
103//! ```
104//! use async_openai::{Client, config::Config, config::OpenAIConfig};
105//! unsafe { std::env::set_var("OPENAI_API_KEY", "only for doc test") }
106//!
107//! let openai_config = OpenAIConfig::default();
108//! // You can use `std::sync::Arc` to wrap the config as well
109//! let config = Box::new(openai_config) as Box<dyn Config>;
110//! let client: Client<Box<dyn Config> > = Client::with_config(config);
111//! ```
112//!
113//! ## Microsoft Azure
114//!
115//! ```
116//! use async_openai::{Client, config::AzureConfig};
117//!
118//! let config = AzureConfig::new()
119//! .with_api_base("https://my-resource-name.openai.azure.com")
120//! .with_api_version("2023-03-15-preview")
121//! .with_deployment_id("deployment-id")
122//! .with_api_key("...");
123//!
124//! let client = Client::with_config(config);
125//!
126//! // Note that `async-openai` only implements OpenAI spec
127//! // and doesn't maintain parity with the spec of Azure OpenAI service.
128//!
129//! ```
130//!
131//!
132//! ## Examples
133//! For full working examples for all supported features see [examples](https://github.com/64bit/async-openai/tree/main/examples) directory in the repository.
134//!
135#![cfg_attr(docsrs, feature(doc_cfg))]
136
137#[cfg(feature = "byot")]
138pub(crate) use async_openai_macros::byot;
139
140#[cfg(not(feature = "byot"))]
141pub(crate) use async_openai_macros::byot_passthrough as byot;
142
143mod assistants;
144mod audio;
145mod audit_logs;
146mod batches;
147mod chat;
148mod client;
149mod completion;
150pub mod config;
151mod download;
152mod embedding;
153pub mod error;
154mod file;
155mod fine_tuning;
156mod image;
157mod invites;
158mod messages;
159mod model;
160mod moderation;
161mod project_api_keys;
162mod project_service_accounts;
163mod project_users;
164mod projects;
165mod responses;
166mod runs;
167mod steps;
168mod threads;
169pub mod traits;
170pub mod types;
171mod uploads;
172mod users;
173mod util;
174mod vector_store_file_batches;
175mod vector_store_files;
176mod vector_stores;
177
178pub use assistants::Assistants;
179pub use audio::Audio;
180pub use audit_logs::AuditLogs;
181pub use batches::Batches;
182pub use chat::Chat;
183pub use client::Client;
184pub use completion::Completions;
185pub use embedding::Embeddings;
186pub use file::Files;
187pub use fine_tuning::FineTuning;
188pub use image::Images;
189pub use invites::Invites;
190pub use messages::Messages;
191pub use model::Models;
192pub use moderation::Moderations;
193pub use project_api_keys::ProjectAPIKeys;
194pub use project_service_accounts::ProjectServiceAccounts;
195pub use project_users::ProjectUsers;
196pub use projects::Projects;
197pub use responses::Responses;
198pub use runs::Runs;
199pub use steps::Steps;
200pub use threads::Threads;
201pub use uploads::Uploads;
202pub use users::Users;
203pub use vector_store_file_batches::VectorStoreFileBatches;
204pub use vector_store_files::VectorStoreFiles;
205pub use vector_stores::VectorStores;