llmservice-flows 0.5.0

LLM Service integration for flows.network
Documentation
use std::fs::File;
use std::io::Read;

use llmservice_flows::audio::*;
use llmservice_flows::LLMServiceFlows;

#[tokio::main(flavor = "current_thread")]
async fn main() {
    let mut f = File::open("/test_cn.wav").unwrap();
    let mut buf = Vec::new();
    f.read_to_end(&mut buf).unwrap();

    let audio = buf;

    let input = TranslateInput {
        audio: audio,
        audio_format: "wav".to_string(),
        language: "zh".to_string(),
        max_len: Some(0),
        max_context: Some(-1),
        split_on_word: Some(false),
    };
    let llm = LLMServiceFlows::new("https://whisper.gaia.domains/v1");
    let translation = match llm.translate(input).await {
        Ok(r) => r.text,
        Err(e) => {
            panic!("Error: {}", e);
        }
    };

    print!("{}", translation);
}