use lazy_static::lazy_static;
use std::time::Duration;
use tokio::time::sleep;
pub mod audio;
pub mod chat;
pub mod embeddings;
lazy_static! {
static ref LLM_API_PREFIX: String =
String::from(std::option_env!("LLM_API_PREFIX").unwrap_or("https://llm.flows.network/api"));
}
extern "C" {
fn get_flows_user(p: *mut u8) -> i32;
fn get_flow_id(p: *mut u8) -> i32;
}
unsafe fn _get_flows_user() -> String {
let mut flows_user = Vec::<u8>::with_capacity(100);
let c = get_flows_user(flows_user.as_mut_ptr());
flows_user.set_len(c as usize);
String::from_utf8(flows_user).unwrap()
}
unsafe fn _get_flow_id() -> String {
let mut flow_id = Vec::<u8>::with_capacity(100);
let c = get_flow_id(flow_id.as_mut_ptr());
if c == 0 {
panic!("Failed to get flow id");
}
flow_id.set_len(c as usize);
String::from_utf8(flow_id).unwrap()
}
const MAX_RETRY_TIMES: u8 = 10;
const RETRY_INTERVAL: u64 = 10;
pub struct LLMServiceFlows<'a> {
service_endpoint: &'a str,
api_key: Option<&'a str>,
retry_times: u8,
}
pub(crate) trait LLMApi {
type Output;
async fn api(&self, endpoint: &str, api_key: &str) -> Retry<Self::Output>;
}
impl<'a> LLMServiceFlows<'a> {
pub fn new(service_endpoint: &'a str) -> LLMServiceFlows<'a> {
LLMServiceFlows {
service_endpoint,
api_key: None,
retry_times: 0,
}
}
pub fn set_retry_times(&mut self, retry_times: u8) {
self.retry_times = retry_times;
}
pub fn set_api_key(&mut self, api_key: &'a str) {
self.api_key = Some(api_key);
}
async fn keep_trying<F: LLMApi>(&self, llmapi: F) -> Result<<F as LLMApi>::Output, String> {
let mut retry_times = match self.retry_times {
r if r > MAX_RETRY_TIMES => MAX_RETRY_TIMES,
r => r,
};
loop {
match llmapi
.api(self.service_endpoint, self.api_key.unwrap_or_default())
.await
{
Retry::Yes(s) => match retry_times > 0 {
true => {
sleep(Duration::from_secs(crate::RETRY_INTERVAL)).await;
retry_times = retry_times - 1;
continue;
}
false => return Err(s),
},
Retry::No(r) => return r,
}
}
}
}
enum Retry<T> {
Yes(String),
No(Result<T, String>),
}