use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelConfig {
pub name: String,
pub file_name: String,
pub description: String,
pub recommended_for: String,
pub supports_agent: bool,
}
impl ModelConfig {
pub fn has_agent_support(&self) -> bool {
self.supports_agent
}
}
use crate::apis::ollama::OllamaClient;
use anyhow::Result;
pub fn get_available_models() -> Vec<ModelConfig> {
let mut models = vec![
ModelConfig {
name: "Claude 3.7 Sonnet".into(),
file_name: "claude-3-7-sonnet-20250219".into(),
description: "Latest Anthropic Claude with advanced code capabilities".into(),
recommended_for: "Professional code tasks, requires ANTHROPIC_API_KEY".into(),
supports_agent: true,
},
ModelConfig {
name: "GPT-4o".into(),
file_name: "gpt-4o".into(),
description: "Latest OpenAI model with advanced tool use capabilities".into(),
recommended_for: "Professional code tasks, requires OPENAI_API_KEY".into(),
supports_agent: true,
},
];
if let Ok(ollama_models) = get_available_ollama_models() {
for model_info in ollama_models {
let description = if let Some(details) = &model_info.details {
if let Some(desc) = &details.description {
format!("{} - Running locally via Ollama", desc)
} else {
format!("{} - Running locally via Ollama", model_info.name)
}
} else {
format!("{} - Running locally via Ollama", model_info.name)
};
models.push(ModelConfig {
name: format!("{} (local)", model_info.name),
file_name: model_info.name.clone(),
description,
recommended_for: "Local code tasks, requires Ollama to be running".into(),
supports_agent: true,
});
}
}
models
}
fn get_available_ollama_models() -> Result<Vec<crate::apis::ollama::OllamaModelInfo>> {
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()?;
match runtime.block_on(async {
match OllamaClient::new(None) {
Ok(ollama_client) => {
let models_future = ollama_client.list_models();
tokio::time::timeout(std::time::Duration::from_secs(2), models_future).await
}
Err(e) => {
Ok(Err(anyhow::anyhow!(
"Failed to create Ollama client: {}",
e
)))
}
}
}) {
Ok(Ok(models)) => Ok(models),
Err(_) => {
Ok(Vec::new())
}
Ok(Err(_)) => {
Ok(Vec::new())
}
}
}