pub struct LLMServiceFlows<'a> { /* private fields */ }
Expand description

The main struct for setting the basic configuration for LLM Service interface.

Implementations§

source§

impl<'a> LLMServiceFlows<'a>

source

pub async fn chat_completion( &self, conversation_id: &str, sentence: &str, options: &ChatOptions<'_> ) -> Result<ChatResponse, String>

Create chat completion with the provided sentence. It uses OpenAI’s GPT-4 model to make a conversation.

conversation_id is the identifier of the conversation. The history will be fetched and attached to the sentence as a whole prompt for ChatGPT.

sentence is a String that reprensents the current utterance of the conversation.

     // Create a conversation_id.
     // Only numbers, letters, underscores, dashes, and pound signs are allowed, up to 50 characters.
     let chat_id = format!("news-summary-N");
     // System_prompt content in text.
     let system = &format!("You're a news editor AI.");

     // Create ChatOptions.
     let co = ChatOptions {
         model: Some("gpt-4"),
         token_limit: 8192,
         restart: true,
         system_prompt: Some(system),
     // Use .. to extract the default value for the remaining fields.
         ..Default::default()
     };

     // Create a `sentence`, the concatenation of user prompt and the text to work with.
     let question = format!("Make a concise summary within 200 words on this: {news_body}.");

     // Chat completion to get the result and handle the failure.
     match llm.chat_completion(&chat_id, &question, &co).await {
         Ok(r) => Ok(r.choice),
         Err(e) =>  Err(e.into()),
     }
source§

impl<'a> LLMServiceFlows<'a>

source

pub async fn create_embeddings( &self, model: Option<&str>, input: EmbeddingsInput ) -> Result<Vec<Vec<f64>>, String>

Create embeddings from the provided input.

params is an [EmbeddingsRequest] object.

   // This code snippet computes embeddings for `text`, the question created in previous step.
   // Wrap the `text` in EmbeddingsInput struct.
   let input = EmbeddingsInput::String(text.to_string());
   // Call the create_embeddings function.
   let question_vector = match llm.create_embeddings(Some("text-embedding-ada-002"), input).await {
       Ok(r) => r[0],
       Err(e) => {your error handling},
   };
source§

impl<'a> LLMServiceFlows<'a>

source

pub fn new(service_endpoint: &'a str) -> LLMServiceFlows<'a>

source

pub fn set_retry_times(&mut self, retry_times: u8)

source

pub fn set_api_key(&mut self, api_key: &'a str)

Auto Trait Implementations§

§

impl<'a> RefUnwindSafe for LLMServiceFlows<'a>

§

impl<'a> Send for LLMServiceFlows<'a>

§

impl<'a> Sync for LLMServiceFlows<'a>

§

impl<'a> Unpin for LLMServiceFlows<'a>

§

impl<'a> UnwindSafe for LLMServiceFlows<'a>

Blanket Implementations§

source§

impl<T> Any for Twhere T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for Twhere T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for Twhere T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

source§

impl<T, U> Into<U> for Twhere U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

source§

impl<T, U> TryFrom<U> for Twhere U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for Twhere U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
§

impl<V, T> VZip<V> for Twhere V: MultiLane<T>,

§

fn vzip(self) -> V