llmservice-flows 0.5.0

LLM Service integration for flows.network
Documentation
//! LLM Service integration for [Flows.network](https://flows.network)
//!
//! # Quick Start
//!
//! To get started, let's write a tiny flow function.
//!
//! ```rust
//! use llmservice_flows::{
//!     chat::ChatOptions,
//!     LLMServiceFlows,
//! };
//! use lambda_flows::{request_received, send_response};
//! use serde_json::Value;
//! use std::collections::HashMap;
//!
//! #[no_mangle]
//! #[tokio::main(flavor = "current_thread")]
//! pub async fn run() {
//!     request_received(handler).await;
//! }
//!
//! async fn handler(_qry: HashMap<String, Value>, body: Vec<u8>) {
//!     let co = ChatOptions {
//!       model: Some("gpt-4"),
//!       token_limit: 8192,
//!       ..Default::default()
//!     };
//!     let mut lf = LLMServiceFlows::new("https://api.openai.com/v1");
//!     lf.set_api_key("your api key");
//!
//!     let r = match lf.chat_completion(
//!         "any_conversation_id",
//!         String::from_utf8_lossy(&body).into_owned().as_str(),
//!         &co,
//!     )
//!     .await
//!     {
//!         Ok(c) => c.choice,
//!         Err(e) => e,
//!     };
//!
//!     send_response(
//!         200,
//!         vec![(
//!             String::from("content-type"),
//!             String::from("text/plain; charset=UTF-8"),
//!         )],
//!         r.as_bytes().to_vec(),
//!     );
//! }
//! ```
//!
//! When the Lambda request is received, chat
//! using [LLMServiceFlows::chat_completion] then send the response.
//!

use lazy_static::lazy_static;
use std::time::Duration;
use tokio::time::sleep;

pub mod audio;
pub mod chat;
pub mod embeddings;

lazy_static! {
    static ref LLM_API_PREFIX: String =
        String::from(std::option_env!("LLM_API_PREFIX").unwrap_or("https://llm.flows.network/api"));
}

extern "C" {
    fn get_flows_user(p: *mut u8) -> i32;
    fn get_flow_id(p: *mut u8) -> i32;
}

unsafe fn _get_flows_user() -> String {
    let mut flows_user = Vec::<u8>::with_capacity(100);
    let c = get_flows_user(flows_user.as_mut_ptr());
    flows_user.set_len(c as usize);
    String::from_utf8(flows_user).unwrap()
}

unsafe fn _get_flow_id() -> String {
    let mut flow_id = Vec::<u8>::with_capacity(100);
    let c = get_flow_id(flow_id.as_mut_ptr());
    if c == 0 {
        panic!("Failed to get flow id");
    }
    flow_id.set_len(c as usize);
    String::from_utf8(flow_id).unwrap()
}

const MAX_RETRY_TIMES: u8 = 10;
const RETRY_INTERVAL: u64 = 10; // Wait 10 seconds before retry

/// The main struct for setting the basic configuration
/// for LLM Service interface.
pub struct LLMServiceFlows<'a> {
    /// Exposed url of the LLM Service
    service_endpoint: &'a str,

    /// API Key
    api_key: Option<&'a str>,

    /// Use retry_times to set the number of retries when requesting
    /// LLM Service's api encounters a problem. Default is 0 and max number is 10.
    retry_times: u8,
}

pub(crate) trait LLMApi {
    type Output;
    async fn api(&self, endpoint: &str, api_key: &str) -> Retry<Self::Output>;
}

impl<'a> LLMServiceFlows<'a> {
    pub fn new(service_endpoint: &'a str) -> LLMServiceFlows<'a> {
        LLMServiceFlows {
            service_endpoint,
            api_key: None,
            retry_times: 0,
        }
    }

    pub fn set_retry_times(&mut self, retry_times: u8) {
        self.retry_times = retry_times;
    }

    pub fn set_api_key(&mut self, api_key: &'a str) {
        self.api_key = Some(api_key);
    }

    async fn keep_trying<F: LLMApi>(&self, llmapi: F) -> Result<<F as LLMApi>::Output, String> {
        let mut retry_times = match self.retry_times {
            r if r > MAX_RETRY_TIMES => MAX_RETRY_TIMES,
            r => r,
        };

        loop {
            match llmapi
                .api(self.service_endpoint, self.api_key.unwrap_or_default())
                .await
            {
                Retry::Yes(s) => match retry_times > 0 {
                    true => {
                        sleep(Duration::from_secs(crate::RETRY_INTERVAL)).await;
                        retry_times = retry_times - 1;
                        continue;
                    }
                    false => return Err(s),
                },
                Retry::No(r) => return r,
            }
        }
    }
}

enum Retry<T> {
    Yes(String),
    No(Result<T, String>),
}