use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use crate::providers::manifest::ProviderManifest;
const CACHE_TTL_SECS: u64 = 86400;
const OPENROUTER_MAX_MODELS: usize = 30;
#[derive(Debug, Serialize, Deserialize)]
struct CachedModels {
fetched_at: u64,
models: Vec<String>,
}
pub fn resolve_models_sync(manifest: &ProviderManifest) -> Vec<String> {
if let Some(ref url) = manifest.catalog_url {
match fetch_and_cache_blocking(url, &manifest.id, manifest.id == "openrouter") {
Ok(models) => return models,
Err(e) => {
eprintln!(
" \x1b[2mFailed to fetch {}: {}, using fallback\x1b[0m",
manifest.id, e
);
if let Ok(cached) = load_cache(&manifest.id) {
if !cached.models.is_empty() {
return cached.models;
}
}
}
}
}
manifest.models.clone()
}
pub fn refresh_models_sync(manifest: &ProviderManifest) -> Result<Vec<String>> {
if let Some(ref url) = manifest.catalog_url {
fetch_blocking(url, &manifest.id, manifest.id == "openrouter")
} else {
Ok(manifest.models.clone())
}
}
fn fetch_and_cache_blocking(
url: &str,
provider_id: &str,
filter_openrouter: bool,
) -> Result<Vec<String>> {
if let Ok(cached) = load_cache(provider_id) {
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
if now - cached.fetched_at < CACHE_TTL_SECS {
return Ok(cached.models);
}
}
fetch_blocking(url, provider_id, filter_openrouter)
}
fn fetch_blocking(url: &str, provider_id: &str, filter_openrouter: bool) -> Result<Vec<String>> {
let client = reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_secs(15))
.build()?;
let resp = client.get(url).send()?;
if !resp.status().is_success() {
anyhow::bail!("HTTP {}", resp.status());
}
let models = parse_catalog_response(resp.text()?, filter_openrouter)?;
if !models.is_empty() {
save_cache(provider_id, &models)?;
}
Ok(models)
}
fn parse_catalog_response(body: String, filter_openrouter: bool) -> Result<Vec<String>> {
#[derive(Deserialize)]
struct OpenRouterResponse {
data: Vec<OpenRouterModel>,
}
#[derive(Deserialize)]
struct OpenRouterModel {
id: String,
}
let all_models: Vec<String> = match serde_json::from_str::<OpenRouterResponse>(&body) {
Ok(resp) => resp.data.into_iter().map(|m| m.id).collect(),
Err(e) => anyhow::bail!("parse error: {}", e),
};
if filter_openrouter {
Ok(filter_openrouter_models(&all_models))
} else {
Ok(all_models)
}
}
fn filter_openrouter_models(models: &[String]) -> Vec<String> {
let popular_patterns = [
"anthropic/",
"openai/",
"google/",
"deepseek/",
"meta-llama/",
"mistralai/",
"cohere/",
"qwen/",
"nousresearch/",
"sao10k/",
"together/",
"perplexity/",
];
let popular: Vec<String> = models
.iter()
.filter(|m| popular_patterns.iter().any(|p| m.starts_with(p)))
.cloned()
.collect();
if popular.len() >= 5 {
let mut sorted = popular;
sorted.sort_by_key(|m| {
popular_patterns
.iter()
.position(|p| m.starts_with(p))
.unwrap_or(99)
});
sorted.into_iter().take(OPENROUTER_MAX_MODELS).collect()
} else {
models.iter().take(OPENROUTER_MAX_MODELS).cloned().collect()
}
}
fn cache_path(provider_id: &str) -> Result<PathBuf> {
let home = dirs::home_dir().unwrap_or_else(|| PathBuf::from("."));
Ok(home
.join(".garden")
.join("cache")
.join(format!("{}_models.json", provider_id)))
}
fn load_cache(provider_id: &str) -> Result<CachedModels> {
let path = cache_path(provider_id)?;
let content = std::fs::read_to_string(&path)?;
let cached: CachedModels = serde_json::from_str(&content)?;
Ok(cached)
}
fn save_cache(provider_id: &str, models: &[String]) -> Result<()> {
let path = cache_path(provider_id)?;
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
let cache = CachedModels {
fetched_at: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0),
models: models.to_vec(),
};
std::fs::write(&path, serde_json::to_string_pretty(&cache)?)?;
Ok(())
}