llmservice_flows

Struct LLMServiceFlows

Source
pub struct LLMServiceFlows<'a> { /* private fields */ }
Expand description

The main struct for setting the basic configuration for LLM Service interface.

Implementations§

Source§

impl<'a> LLMServiceFlows<'a>

Source

pub async fn transcribe( &self, input: TranscribeInput, ) -> Result<TranscriptionOutput, String>

Transcribe audio into the input language.

input is an TranscribeInput object.

   // This code snippet transcribe input audio into English, the audio is collected in previous step.
   // Prepare the TranscribeInput struct.
   let input = TranscribeInput {
      audio: audio,
      audio_format: "wav".to_string(),
      language: "en".to_string(),
   };
   // Call the transcribe function.
   let transcription = match llm.transcribe(input).await {
       Ok(r) => r.text,
       Err(e) => {your error handling},
   };
Source

pub async fn translate( &self, input: TranslateInput, ) -> Result<TranslationOutput, String>

Translate audio into English.

input is an TranslateInput object.

   // This code snippet translate input audio into English, the audio is collected in previous step.
   // Prepare the TranslateInput struct.
   let input = TranslateInput {
      audio: audio,
      audio_format: "wav".to_string(),
      language: "zh".to_string(),
   };
   // Call the translate function.
   let translation = match llm.translate(input).await {
       Ok(r) => r.text,
       Err(e) => {your error handling},
   };
Source§

impl<'a> LLMServiceFlows<'a>

Source

pub async fn chat_completion( &self, conversation_id: &str, sentence: &str, options: &ChatOptions<'_>, ) -> Result<ChatResponse, String>

Create chat completion with the provided sentence. It uses OpenAI’s GPT-4 model to make a conversation.

conversation_id is the identifier of the conversation. The history will be fetched and attached to the sentence as a whole prompt for ChatGPT.

sentence is a String that reprensents the current utterance of the conversation.

     // Create a conversation_id.
     // Only numbers, letters, underscores, dashes, and pound signs are allowed, up to 50 characters.
     let chat_id = format!("news-summary-N");
     // System_prompt content in text.
     let system = &format!("You're a news editor AI.");

     // Create ChatOptions.
     let co = ChatOptions {
         model: Some("gpt-4"),
         token_limit: 8192,
         restart: true,
         system_prompt: Some(system),
     // Use .. to extract the default value for the remaining fields.
         ..Default::default()
     };

     // Create a `sentence`, the concatenation of user prompt and the text to work with.
     let question = format!("Make a concise summary within 200 words on this: {news_body}.");

     // Chat completion to get the result and handle the failure.
     match llm.chat_completion(&chat_id, &question, &co).await {
         Ok(r) => Ok(r.choice),
         Err(e) =>  Err(e.into()),
     }
Source§

impl<'a> LLMServiceFlows<'a>

Source

pub async fn create_embeddings( &self, model: Option<&str>, input: EmbeddingsInput, ) -> Result<Vec<Vec<f64>>, String>

Create embeddings from the provided input.

params is an EmbeddingsInput object.

   // This code snippet computes embeddings for `text`, the question created in previous step.
   // Wrap the `text` in EmbeddingsInput struct.
   let input = EmbeddingsInput::String(text.to_string());
   // Call the create_embeddings function.
   let question_vector = match llm.create_embeddings(Some("text-embedding-ada-002"), input).await {
       Ok(r) => r[0],
       Err(e) => {your error handling},
   };
Source§

impl<'a> LLMServiceFlows<'a>

Source

pub fn new(service_endpoint: &'a str) -> LLMServiceFlows<'a>

Source

pub fn set_retry_times(&mut self, retry_times: u8)

Source

pub fn set_api_key(&mut self, api_key: &'a str)

Auto Trait Implementations§

§

impl<'a> Freeze for LLMServiceFlows<'a>

§

impl<'a> RefUnwindSafe for LLMServiceFlows<'a>

§

impl<'a> Send for LLMServiceFlows<'a>

§

impl<'a> Sync for LLMServiceFlows<'a>

§

impl<'a> Unpin for LLMServiceFlows<'a>

§

impl<'a> UnwindSafe for LLMServiceFlows<'a>

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> ErasedDestructor for T
where T: 'static,

Source§

impl<T> MaybeSendSync for T