use crate::error::AmbiError;
use serde::Deserialize;
use std::path::Path;
#[derive(Debug, Deserialize, Clone)]
pub struct LlamaEngineConfig {
pub model_path: String,
pub mmproj_path: Option<String>,
#[serde(default)]
pub integrated_vision: bool,
pub max_tokens: i32,
pub buffer_size: usize,
pub use_gpu: bool,
pub n_gpu_layers: u32,
pub n_ctx: u32,
pub n_tokens: usize,
pub n_seq_max: i32,
pub penalty_last_n: i32,
pub penalty_repeat: f32,
pub penalty_freq: f32,
pub penalty_present: f32,
pub temp: f32,
pub top_p: f32,
pub seed: u32,
pub min_keep: usize,
}
impl LlamaEngineConfig {
pub fn validate(&self) -> crate::error::Result<()> {
if !Path::new(&self.model_path).exists() {
return Err(AmbiError::EngineError(format!(
"Local model file does not exist: {}",
self.model_path
)));
}
if let Some(path) = &self.mmproj_path {
if !Path::new(path).exists() {
return Err(AmbiError::EngineError(format!(
"Local vision projector (mmproj) file does not exist: {}",
path
)));
}
}
if self.n_ctx == 0 {
return Err(AmbiError::EngineError(
"Context n_ctx cannot be 0.".to_string(),
));
}
if self.temp < 0.0 || self.temp > 2.0 {
return Err(AmbiError::EngineError(
"Temperature must be between 0.0 and 2.0".to_string(),
));
}
Ok(())
}
}