1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
//! Unofficial OpenAI API client for Rust.
//!
//! Fieri provides an asynchronous Rust interface for interacting with the OpenAI API,
//! allowing you to easily use OpenAI's state-of-the-art machine learning models in your Rust projects.
//!
//! Before you can use the Rust Client for OpenAI, you'll need to sign up for an API key at the OpenAI Developer Portal.
//! Once you've signed up, you'll be able to find your API key in the API Keys section of the developer portal.
//!
//! Each request requires a Client, initialized with your API key.
//! By default, the API key is read from the `OPENAI_API_KEY` environment variable.
//!
//! ## Examples
//!
//! ### Generate text based on a prompt
//! ```no_run
//! use fieri::{
//! completion::{create, CompletionParamBuilder},
//! Client, Error,
//! };
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Error> {
//! let client = Client::new();
//!
//! let param = CompletionParamBuilder::new("ada")
//! .prompt("Generate a plot for an absurd interstellar parody.")
//! .max_tokens(500)
//! .temperature(0.9)
//! .top_p(1.0)
//! .frequency_penalty(0.0)
//! .presence_penalty(0.0)
//! .build()?;
//!
//! let resp = create(&client, ¶m).await?;
//! println!("Generated text: {:#?}", resp);
//!
//! Ok(())
//! }
//! ```
//!
//! ### Generate and stream back text based on a prompt
//! ```no_run
//! use fieri::{
//! completion::{create_with_stream, Completion, CompletionParamBuilder},
//! Client, Error,
//! };
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Error> {
//! let client = Client::new();
//!
//! let param = CompletionParamBuilder::new("ada")
//! .prompt("unnecessarily lo")
//! .temperature(0.5)
//! .build()?;
//!
//! let mut resp = create_with_stream(&client, ¶m).await?;
//!
//! while let Some(chunk) = resp.chunk().await? {
//! if chunk.to_vec() == b"data: [DONE]\n\n" {
//! break;
//! }
//!
//! let v: Completion = serde_json::from_slice(&chunk[5..])?;
//! v.choices.iter().for_each(|c| println!("{:?}", c.text));
//! }
//!
//! Ok(())
//! }
//! ```
//!
//! ### Generate an image based on a prompt and save it locally.
//! ```no_run
//! use fieri::{
//! image::{ImageSize, GenerateImageParamBuilder, generate},
//! Client, Error,
//! };
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Error> {
//! let client = Client::new();
//!
//! let param = GenerateImageParamBuilder::new("A bunch of cats dancing tango on top of the highest mountain on Mars.")
//! .size(ImageSize::S1024x1024)
//! .n(1)
//! .build()?;
//!
//! generate(&client, ¶m)
//! .await?
//! .save("/tmp/")
//! .await?;
//!
//! Ok(())
//! }
//! ```
pub use ;
pub use Client;
pub use Error;
/// Result returned from each interaction with the OpenAI API.
pub type Result<T> = Result;