pub struct OpenAI { /* private fields */ }
Expand description
Chat models take a list of messages as input and return a model-generated message as output. Although the chat format is designed to make multi-turn conversations easy, it’s just as useful for single-turn tasks without any conversation.
Implementations§
Source§impl OpenAI
impl OpenAI
Sourcepub fn new(
open_ai_key: &str,
model: OpenAIModels,
max_tokens: Option<usize>,
temperature: Option<u32>,
) -> Self
pub fn new( open_ai_key: &str, model: OpenAIModels, max_tokens: Option<usize>, temperature: Option<u32>, ) -> Self
Examples found in repository?
examples/use_openai.rs (line 21)
16async fn main() {
17 env_logger::init();
18 let api_key: String = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set");
19 let model = OpenAIModels::Gpt3_5Turbo; // Choose the model
20
21 let open_ai = OpenAI::new(&api_key, model, None, None);
22
23 // Example context and instructions
24 let instructions =
25 "Translate the following English text to all the languages in the response type";
26
27 match open_ai
28 .get_answer::<TranslationResponse>(instructions)
29 .await
30 {
31 Ok(response) => println!("Response: {:?}", response),
32 Err(e) => eprintln!("Error: {:?}", e),
33 }
34}
pub fn debug(self) -> Self
pub fn function_calling(self, function_call: bool) -> Self
pub fn set_context<T: Serialize>( self, input_name: &str, input_data: &T, ) -> Result<Self>
pub fn check_prompt_tokens<T: JsonSchema + DeserializeOwned>( &self, instructions: &str, ) -> Result<usize>
Sourcepub async fn get_answer<T: JsonSchema + DeserializeOwned>(
self,
instructions: &str,
) -> Result<T>
pub async fn get_answer<T: JsonSchema + DeserializeOwned>( self, instructions: &str, ) -> Result<T>
Examples found in repository?
examples/use_openai.rs (line 28)
16async fn main() {
17 env_logger::init();
18 let api_key: String = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set");
19 let model = OpenAIModels::Gpt3_5Turbo; // Choose the model
20
21 let open_ai = OpenAI::new(&api_key, model, None, None);
22
23 // Example context and instructions
24 let instructions =
25 "Translate the following English text to all the languages in the response type";
26
27 match open_ai
28 .get_answer::<TranslationResponse>(instructions)
29 .await
30 {
31 Ok(response) => println!("Response: {:?}", response),
32 Err(e) => eprintln!("Error: {:?}", e),
33 }
34}
Auto Trait Implementations§
impl Freeze for OpenAI
impl RefUnwindSafe for OpenAI
impl Send for OpenAI
impl Sync for OpenAI
impl Unpin for OpenAI
impl UnwindSafe for OpenAI
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more