use std::time::Instant;
use anyhow::{Ok, Result};
use crate::exec::ExecModel;
use crate::models::GenerateModel;
use crate::models::lfm2::generate::Lfm2GenerateModel;
use crate::utils::get_file_path;
pub struct Lfm2Exec;
impl ExecModel for Lfm2Exec {
fn run(input: &[String], output: Option<&str>, weight_path: &str) -> Result<()> {
let input_text = &input[0];
let target_text = if input_text.starts_with("file://") {
let path = get_file_path(input_text)?;
std::fs::read_to_string(path)?
} else {
input_text.to_string()
};
let i_start = Instant::now();
let mut model = Lfm2GenerateModel::init(weight_path, None, None)?;
let i_duration = i_start.elapsed();
println!("Time elapsed in load model is: {:?}", i_duration);
let message = format!(
r#"{{
"temperature": 0.3,
"top_p": 0.8,
"model": "lfm2",
"messages": [
{{
"role": "user",
"content": "{}"
}}
]
}}"#,
target_text.replace('"', "\\\"")
);
let mes = serde_json::from_str(&message)?;
let i_start = Instant::now();
let result = model.generate(mes)?;
let i_duration = i_start.elapsed();
println!("Time elapsed in generate is: {:?}", i_duration);
println!("Result: {:?}", result);
if let Some(out) = output {
std::fs::write(out, format!("{:?}", result))?;
println!("Output saved to: {}", out);
}
Ok(())
}
}