pub struct CreateCompletionRequest {
pub model: String,
pub prompt: String,
pub suffix: Option<String>,
pub temperature: Option<f64>,
pub max_tokens: Option<i32>,
pub n: Option<i32>,
}Fields§
§model: String§prompt: String§suffix: Option<String>§temperature: Option<f64>§max_tokens: Option<i32>§n: Option<i32>Implementations§
Source§impl CreateCompletionRequest
impl CreateCompletionRequest
Sourcepub fn new(prompt: &str) -> Self
pub fn new(prompt: &str) -> Self
Examples found in repository?
examples/completion.rs (line 10)
4async fn main() -> Result<(), Box<dyn std::error::Error>> {
5 let api_token = std::env::var("OPENAI_API_KEY")?;
6 let openai = Client::new(&api_token);
7
8 let prompt = "The 10 most popular programming languages are ";
9
10 let req = CreateCompletionRequest::new(prompt)
11 .model("text-davinci-003")
12 .temperature(1.8);
13 let resp = openai.send(req).await?;
14
15 let text = &resp.choices[0].text;
16
17 println!("{prompt}{text}");
18
19 Ok(())
20}Sourcepub fn model(self, model: &str) -> Self
pub fn model(self, model: &str) -> Self
Examples found in repository?
examples/completion.rs (line 11)
4async fn main() -> Result<(), Box<dyn std::error::Error>> {
5 let api_token = std::env::var("OPENAI_API_KEY")?;
6 let openai = Client::new(&api_token);
7
8 let prompt = "The 10 most popular programming languages are ";
9
10 let req = CreateCompletionRequest::new(prompt)
11 .model("text-davinci-003")
12 .temperature(1.8);
13 let resp = openai.send(req).await?;
14
15 let text = &resp.choices[0].text;
16
17 println!("{prompt}{text}");
18
19 Ok(())
20}pub fn suffix(self, suffix: &str) -> Self
Sourcepub fn temperature(self, temperature: f64) -> Self
pub fn temperature(self, temperature: f64) -> Self
Examples found in repository?
examples/completion.rs (line 12)
4async fn main() -> Result<(), Box<dyn std::error::Error>> {
5 let api_token = std::env::var("OPENAI_API_KEY")?;
6 let openai = Client::new(&api_token);
7
8 let prompt = "The 10 most popular programming languages are ";
9
10 let req = CreateCompletionRequest::new(prompt)
11 .model("text-davinci-003")
12 .temperature(1.8);
13 let resp = openai.send(req).await?;
14
15 let text = &resp.choices[0].text;
16
17 println!("{prompt}{text}");
18
19 Ok(())
20}pub fn max_tokens(self, max_tokens: i32) -> Self
pub fn n(self, n: i32) -> Self
Trait Implementations§
Source§impl Clone for CreateCompletionRequest
impl Clone for CreateCompletionRequest
Source§fn clone(&self) -> CreateCompletionRequest
fn clone(&self) -> CreateCompletionRequest
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source. Read moreSource§impl Default for CreateCompletionRequest
impl Default for CreateCompletionRequest
Source§fn default() -> CreateCompletionRequest
fn default() -> CreateCompletionRequest
Returns the “default value” for a type. Read more
Source§impl Request for CreateCompletionRequest
impl Request for CreateCompletionRequest
Auto Trait Implementations§
impl Freeze for CreateCompletionRequest
impl RefUnwindSafe for CreateCompletionRequest
impl Send for CreateCompletionRequest
impl Sync for CreateCompletionRequest
impl Unpin for CreateCompletionRequest
impl UnwindSafe for CreateCompletionRequest
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more