Struct Client

Source
pub struct Client { /* private fields */ }
Expand description

This is the main interface to interact with the api.

Implementations§

Source§

impl Client

Source

pub fn new(api_key: &str) -> Client

Create a new client. This will automatically build a reqwest::Client used internally.

Examples found in repository?
examples/chat_example.rs (line 6)
5async fn main() {
6    let client = openai_rust::Client::new(&std::env::var("OPENAI_API_KEY").unwrap());
7    let args = openai_rust::chat::ChatArguments::new(
8        "gpt-3.5-turbo",
9        vec![openai_rust::chat::Message {
10            role: "user".to_owned(),
11            content: "Hello GPT!".to_owned(),
12        }],
13    );
14    let res = client.create_chat(args).await.unwrap();
15    println!("{}", res);
16}
More examples
Hide additional examples
examples/chat_stream_example.rs (line 8)
7async fn main() {
8    let client = openai_rust::Client::new(&std::env::var("OPENAI_API_KEY").unwrap());
9    let args = openai_rust::chat::ChatArguments::new(
10        "gpt-3.5-turbo",
11        vec![openai_rust::chat::Message {
12            role: "user".to_owned(),
13            content: "Hello GPT!".to_owned(),
14        }],
15    );
16    let mut res = client.create_chat_stream(args).await.unwrap();
17    while let Some(chunk) = res.next().await {
18        print!("{}", chunk.unwrap());
19        std::io::stdout().flush().unwrap();
20    }
21}
Source

pub fn new_with_client(api_key: &str, req_client: Client) -> Client

Build a client using your own reqwest::Client.

Source

pub async fn list_models(&self) -> Result<Vec<Model>, Error>

List and describe the various models available in the API. You can refer to the Models documentation to understand what models are available and the differences between them.

let client = openai_rust::Client::new(api_key);
let models = client.list_models().await.unwrap();

See https://platform.openai.com/docs/api-reference/models/list.

Source

pub async fn create_chat( &self, args: ChatArguments, ) -> Result<ChatCompletion, Error>

Given a list of messages comprising a conversation, the model will return a response.

See https://platform.openai.com/docs/api-reference/chat.

let client = openai_rust::Client::new(api_key);
let args = openai_rust::chat::ChatArguments::new("gpt-3.5-turbo", vec![
   openai_rust::chat::Message {
       role: "user".to_owned(),
       content: "Hello GPT!".to_owned(),
   }
]);
let res = client.create_chat(args).await.unwrap();
println!("{}", res.choices[0].message.content);
Examples found in repository?
examples/chat_example.rs (line 14)
5async fn main() {
6    let client = openai_rust::Client::new(&std::env::var("OPENAI_API_KEY").unwrap());
7    let args = openai_rust::chat::ChatArguments::new(
8        "gpt-3.5-turbo",
9        vec![openai_rust::chat::Message {
10            role: "user".to_owned(),
11            content: "Hello GPT!".to_owned(),
12        }],
13    );
14    let res = client.create_chat(args).await.unwrap();
15    println!("{}", res);
16}
Source

pub async fn create_chat_stream( &self, args: ChatArguments, ) -> Result<ChatCompletionChunkStream>

Like Client::create_chat but with streaming.

See https://platform.openai.com/docs/api-reference/chat.

This method will return a stream of chat::stream::ChatCompletionChunks. Use with futures_util::StreamExt::next.

use openai_rust::futures_util::StreamExt;
let mut res = client.create_chat_stream(args).await.unwrap();
while let Some(chunk) = res.next().await {
    print!("{}", chunk.unwrap());
    std::io::stdout().flush().unwrap();
}
Examples found in repository?
examples/chat_stream_example.rs (line 16)
7async fn main() {
8    let client = openai_rust::Client::new(&std::env::var("OPENAI_API_KEY").unwrap());
9    let args = openai_rust::chat::ChatArguments::new(
10        "gpt-3.5-turbo",
11        vec![openai_rust::chat::Message {
12            role: "user".to_owned(),
13            content: "Hello GPT!".to_owned(),
14        }],
15    );
16    let mut res = client.create_chat_stream(args).await.unwrap();
17    while let Some(chunk) = res.next().await {
18        print!("{}", chunk.unwrap());
19        std::io::stdout().flush().unwrap();
20    }
21}
Source

pub async fn create_completion( &self, args: CompletionArguments, ) -> Result<CompletionResponse>

Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position.

See https://platform.openai.com/docs/api-reference/completions

let c = openai_rust::Client::new(api_key);
let args = openai_rust::completions::CompletionArguments::new("text-davinci-003", "The quick brown fox".to_owned());
println!("{}", c.create_completion(args).await.unwrap().choices[0].text);
Source

pub async fn create_edit(&self, args: EditArguments) -> Result<EditResponse>

👎Deprecated: Use the chat api instead

Given a prompt and an instruction, the model will return an edited version of the prompt.

See https://platform.openai.com/docs/api-reference/edits

let c = openai_rust::Client::new(api_key);
let args = openai_rust::edits::EditArguments::new("text-davinci-edit-001", "The quick brown fox".to_owned(), "Complete this sentence.".to_owned());
println!("{}", c.create_edit(args).await.unwrap().to_string());
Source

pub async fn create_embeddings( &self, args: EmbeddingsArguments, ) -> Result<EmbeddingsResponse>

Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.

See https://platform.openai.com/docs/api-reference/embeddings

let c = openai_rust::Client::new(api_key);
let args = openai_rust::embeddings::EmbeddingsArguments::new("text-embedding-ada-002", "The food was delicious and the waiter...".to_owned());
println!("{:?}", c.create_embeddings(args).await.unwrap().data);
Source

pub async fn create_image(&self, args: ImageArguments) -> Result<Vec<String>>

Creates an image given a prompt.

Auto Trait Implementations§

§

impl Freeze for Client

§

impl !RefUnwindSafe for Client

§

impl Send for Client

§

impl Sync for Client

§

impl Unpin for Client

§

impl !UnwindSafe for Client

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> ErasedDestructor for T
where T: 'static,

Source§

impl<T> MaybeSendSync for T