use crate::config::layered::ModelConfig;
use crate::errors::LocalModelError;
use crate::models::catalog::MODEL_CATALOG;
use std::path::PathBuf;
pub struct LocalLLMAdapter {
model_path: PathBuf,
config: ModelConfig,
context_size: usize,
threads: usize,
}
impl LocalLLMAdapter {
pub async fn load(spec_name: &str, config: &ModelConfig) -> Result<Self, LocalModelError> {
let spec = MODEL_CATALOG
.iter()
.find(|model| model.name == spec_name)
.ok_or_else(|| LocalModelError::ModelNotFound {
name: spec_name.to_string(),
})?;
let model_path = match &config.path {
Some(path) => path.join(&spec.filename),
None => {
let mut path = dirs::data_dir()
.unwrap_or_else(|| std::env::current_dir().unwrap())
.join("kandil")
.join("models");
std::fs::create_dir_all(&path)
.map_err(|e| LocalModelError::IoError { source: e })?;
path.join(&spec.filename)
}
};
if !model_path.exists() {
return Err(LocalModelError::ConfigurationError {
message: format!(
"Model not found at {:?}. Run `kandil model install {}`",
model_path, spec.name
),
});
}
Ok(Self {
model_path,
config: config.clone(),
context_size: config.context_size,
threads: config.threads.unwrap_or(4),
})
}
pub async fn default() -> Result<Self, LocalModelError> {
use crate::core::auto_config::AutoConfig;
use crate::core::hardware::detect_hardware;
let hardware = detect_hardware();
let auto_config = AutoConfig::from_hardware(&hardware);
Self::load(&auto_config.model.name, &auto_config.model).await
}
}
#[async_trait::async_trait]
impl crate::common::traits::AIProvider for LocalLLMAdapter {
async fn complete(&self, prompt: &str) -> Result<String, LocalModelError> {
Ok(format!("Local model response for prompt: {}", prompt))
}
async fn is_available(&self) -> bool {
self.model_path.exists()
}
async fn name(&self) -> String {
self.config.name.clone()
}
}