Struct openai_gpt_rs::client::Client
source · pub struct Client { /* private fields */ }
Implementations§
source§impl Client
impl Client
sourcepub fn new(key: &str) -> Client
pub fn new(key: &str) -> Client
Creates a new client with the given api key.
Examples found in repository?
examples/image.rs (line 16)
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
async fn main() {
let mut prompt = String::new();
print!("Enter a prompt: ");
let _ = stdout().flush();
stdin().read_line(&mut prompt).unwrap();
println!("Generating image...\n");
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let resp = client
.create_image(|args| args.prompt(prompt).size(ImageSize::Medium).n(1))
.await
.unwrap();
let url = resp.get_content(0).unwrap();
println!("Url: {}", url);
}
More examples
examples/completion.rs (line 15)
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
async fn main() {
let mut prompt = String::new();
print!("Enter a prompt: ");
let _ = stdout().flush();
stdin().read_line(&mut prompt).unwrap();
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let resp = client
.create_completion(|args| {
args.prompt(prompt)
.model(CompletionModels::TextDavinci3)
.max_tokens(32)
.n(1)
.temperature(1.0)
})
.await
.unwrap();
let completion = resp.get_content(0).unwrap();
println!("{}", completion);
}
examples/edit.rs (line 21)
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37
async fn main() {
let mut prompt = String::new();
let mut instruction = String::new();
print!("Enter a prompt: ");
let _ = stdout().flush();
stdin().read_line(&mut prompt).unwrap();
print!("Enter the instruction: ");
let _ = stdout().flush();
stdin().read_line(&mut instruction).unwrap();
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let resp = client
.create_edit(|args| {
args.input(prompt)
.instruction(instruction)
.model(EditModels::TextDavinciEdit1)
.n(1)
.temperature(1.0)
})
.await
.unwrap();
let text = resp.get_content(0).unwrap();
println!("{}", text);
}
examples/chat.rs (line 8)
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
async fn main() {
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let mut role: String;
let mut message = String::new();
let mut index = String::new();
print!(" 1: system\n 2: assistant\n 3: user\nSelect a role: ");
let _ = stdout().flush();
stdin().read_line(&mut index).unwrap();
if index.trim() == "1" {
role = "system".to_string();
} else if index.trim() == "2" {
role = "assistant".to_string();
} else if index.trim() == "3" {
role = "user".to_string();
} else {
panic!("Invalid role!");
}
role = role.trim().to_string();
print!("Enter a message: ");
let _ = stdout().flush();
stdin().read_line(&mut message).unwrap();
let content = message.trim().to_string();
let message = Message { role, content };
let message = vec![message];
let resp = client
.create_chat_completion(|args| args.messages(message))
.await
.unwrap();
let content = resp.get_content(0).unwrap();
println!("Response: {}", content);
}
sourcepub async fn create_completion<T>(
&self,
f: T
) -> Result<CompletionResp, ResponseError>where
T: FnOnce(&mut CompletionArgs) -> &mut CompletionArgs,
pub async fn create_completion<T>( &self, f: T ) -> Result<CompletionResp, ResponseError>where T: FnOnce(&mut CompletionArgs) -> &mut CompletionArgs,
Makes an api call to OpenAI Completion API and returns the response.
Arguments
f
- A closure that takes a mutable reference toCompletionArgs
and returns it.
Example
use openai_gpt_rs::{args::CompletionArgs, client::Client, response::{CompletionResp, Content}, models::CompletionModels};
use std::env;
#[tokio::main]
async fn main() {
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let resp = client.create_completion(|args| {
args.prompt("This is a test")
.model(CompletionModels::TextDavinci3)
.max_tokens(32)
.n(5)
})
.await
.unwrap();
let text = resp.get_contents(0..5);
for val in text {
assert!(!val.is_empty());
}
}
Panics
This function will panic if the request to OpenAI fails.
Examples found in repository?
examples/completion.rs (lines 18-24)
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
async fn main() {
let mut prompt = String::new();
print!("Enter a prompt: ");
let _ = stdout().flush();
stdin().read_line(&mut prompt).unwrap();
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let resp = client
.create_completion(|args| {
args.prompt(prompt)
.model(CompletionModels::TextDavinci3)
.max_tokens(32)
.n(1)
.temperature(1.0)
})
.await
.unwrap();
let completion = resp.get_content(0).unwrap();
println!("{}", completion);
}
sourcepub async fn create_edit<T>(&self, f: T) -> Result<EditResp, ResponseError>where
T: FnOnce(&mut EditArgs) -> &mut EditArgs,
pub async fn create_edit<T>(&self, f: T) -> Result<EditResp, ResponseError>where T: FnOnce(&mut EditArgs) -> &mut EditArgs,
Makes an api call to OpenAI Edit API and returns the response.
Arguments
f
- A closure that takes a mutable reference toEditArgs
and returns it.
Example
use openai_gpt_rs::{args::EditArgs, client::Client, response::{EditResp, Content}, models::EditModels};
use std::env;
#[tokio::main]
async fn main() {
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let resp = client.create_edit(|args| {
args.model(EditModels::TextDavinciEdit1)
.input("How is you dae")
.instruction("Fix the spelling mistakes")
.n(5)
})
.await
.unwrap();
let text = resp.get_contents(0..5);
for val in text {
assert!(!val.is_empty());
}
}
Panics
This function will panic if the request to OpenAI fails.
Examples found in repository?
examples/edit.rs (lines 24-30)
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37
async fn main() {
let mut prompt = String::new();
let mut instruction = String::new();
print!("Enter a prompt: ");
let _ = stdout().flush();
stdin().read_line(&mut prompt).unwrap();
print!("Enter the instruction: ");
let _ = stdout().flush();
stdin().read_line(&mut instruction).unwrap();
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let resp = client
.create_edit(|args| {
args.input(prompt)
.instruction(instruction)
.model(EditModels::TextDavinciEdit1)
.n(1)
.temperature(1.0)
})
.await
.unwrap();
let text = resp.get_content(0).unwrap();
println!("{}", text);
}
sourcepub async fn create_image<T>(&self, f: T) -> Result<ImageResp, ResponseError>where
T: FnOnce(&mut ImageArgs) -> &mut ImageArgs,
pub async fn create_image<T>(&self, f: T) -> Result<ImageResp, ResponseError>where T: FnOnce(&mut ImageArgs) -> &mut ImageArgs,
Makes an api call to OpenAI Image API and returns the response.
Arguments
f
- A closure that takes a mutable reference toImageArgs
and returns it.
Example
use openai_gpt_rs::{args::{ImageArgs, ImageSize}, client::Client, response::{ImageResp, Content}};
use std::env;
#[tokio::main]
async fn main() {
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let resp = client.create_image(|args| {
args.prompt("Kitty")
.size(ImageSize::Small)
.n(2)
})
.await
.unwrap();
let urls = resp.get_contents(0..2);
for val in urls {
assert!(!val.is_empty());
}
}
Panics
This function will panic if the request to OpenAI fails.
Examples found in repository?
examples/image.rs (line 19)
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
async fn main() {
let mut prompt = String::new();
print!("Enter a prompt: ");
let _ = stdout().flush();
stdin().read_line(&mut prompt).unwrap();
println!("Generating image...\n");
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let resp = client
.create_image(|args| args.prompt(prompt).size(ImageSize::Medium).n(1))
.await
.unwrap();
let url = resp.get_content(0).unwrap();
println!("Url: {}", url);
}
sourcepub async fn get_models(&self) -> Result<Value, Error>
pub async fn get_models(&self) -> Result<Value, Error>
Returns a json listing all the models
sourcepub async fn create_chat_completion<T>(
&self,
f: T
) -> Result<ChatResp, ResponseError>where
T: FnOnce(&mut ChatArgs) -> &mut ChatArgs,
pub async fn create_chat_completion<T>( &self, f: T ) -> Result<ChatResp, ResponseError>where T: FnOnce(&mut ChatArgs) -> &mut ChatArgs,
Makes an api call to OpenAI Chat Completion API and returns the response.
Arguments
f
- A closure that takes a mutable reference toChatArgs
and returns it.
Example
use openai_gpt_rs::{args::ChatArgs, client::Client, response::{ChatResp, Content}, models::ChatModels, chat::Message};
use std::env;
use std::collections::HashMap;
#[tokio::main]
async fn main() {
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let message1 = Message {
role: "user".to_string(),
content: "Who won the world series in 2020?".to_string(),
};
let message2 = Message {
role: "system".to_string(),
content: "You are a helpful assistant.".to_string(),
};
let messages = vec![message1, message2];
let resp = client
.create_chat_completion(|args| args.messages(messages.clone()))
.await
.unwrap();
let contents = resp.get_content(0).unwrap();
assert!(!contents.is_empty());
}
Errors
This function will return an error if the api call fails.
The error will be of type reqwest::Error
.
Examples found in repository?
examples/chat.rs (line 44)
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
async fn main() {
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().as_str());
let mut role: String;
let mut message = String::new();
let mut index = String::new();
print!(" 1: system\n 2: assistant\n 3: user\nSelect a role: ");
let _ = stdout().flush();
stdin().read_line(&mut index).unwrap();
if index.trim() == "1" {
role = "system".to_string();
} else if index.trim() == "2" {
role = "assistant".to_string();
} else if index.trim() == "3" {
role = "user".to_string();
} else {
panic!("Invalid role!");
}
role = role.trim().to_string();
print!("Enter a message: ");
let _ = stdout().flush();
stdin().read_line(&mut message).unwrap();
let content = message.trim().to_string();
let message = Message { role, content };
let message = vec![message];
let resp = client
.create_chat_completion(|args| args.messages(message))
.await
.unwrap();
let content = resp.get_content(0).unwrap();
println!("Response: {}", content);
}
Auto Trait Implementations§
impl !RefUnwindSafe for Client
impl Send for Client
impl Sync for Client
impl Unpin for Client
impl !UnwindSafe for Client
Blanket Implementations§
source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere T: ?Sized,
source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more