clawgarden-cli 0.7.3

ClawGarden CLI - Multi-bot/multi-agent Garden management tool
//! Model catalog — dynamic fetching and fallback
//!
//! Providers with a `catalog_url` (e.g. OpenRouter) can fetch models at runtime.
//! Results are cached in ~/.garden/cache/<provider>_models.json.
//! If fetch fails or cache is expired, falls back to the static manifest list.

use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;

use crate::providers::manifest::ProviderManifest;

// ── Constants ───────────────────────────────────────────────────────────────

/// Cache TTL in seconds (24 hours)
const CACHE_TTL_SECS: u64 = 86400;

/// Max models to show for OpenRouter (filters noise)
const OPENROUTER_MAX_MODELS: usize = 30;

// ── Cache types ─────────────────────────────────────────────────────────────

/// Cached model list
#[derive(Debug, Serialize, Deserialize)]
struct CachedModels {
    /// Unix timestamp (seconds) when the cache was written
    fetched_at: u64,
    /// Model IDs
    models: Vec<String>,
}

// ── Public API ────────────────────────────────────────────────────────────────

/// Resolve models for a provider (blocking).
/// - If provider has catalog_url: try fetch → valid cache → fallback
/// - Otherwise: return static manifest list
pub fn resolve_models_sync(manifest: &ProviderManifest) -> Vec<String> {
    if let Some(ref url) = manifest.catalog_url {
        match fetch_and_cache_blocking(url, &manifest.id, manifest.id == "openrouter") {
            Ok(models) => return models,
            Err(e) => {
                eprintln!(
                    "  \x1b[2mFailed to fetch {}: {}, using fallback\x1b[0m",
                    manifest.id, e
                );
                if let Ok(cached) = load_cache(&manifest.id) {
                    if !cached.models.is_empty() {
                        return cached.models;
                    }
                }
            }
        }
    }
    manifest.models.clone()
}

/// Force-refresh models, bypassing cache (blocking).
/// Used by `garden provider refresh`.
pub fn refresh_models_sync(manifest: &ProviderManifest) -> Result<Vec<String>> {
    if let Some(ref url) = manifest.catalog_url {
        fetch_blocking(url, &manifest.id, manifest.id == "openrouter")
    } else {
        Ok(manifest.models.clone())
    }
}

// ── Fetch ───────────────────────────────────────────────────────────────────

fn fetch_and_cache_blocking(
    url: &str,
    provider_id: &str,
    filter_openrouter: bool,
) -> Result<Vec<String>> {
    // Return valid cache without refetching
    if let Ok(cached) = load_cache(provider_id) {
        let now = std::time::SystemTime::now()
            .duration_since(std::time::UNIX_EPOCH)
            .map(|d| d.as_secs())
            .unwrap_or(0);

        if now - cached.fetched_at < CACHE_TTL_SECS {
            return Ok(cached.models);
        }
    }

    fetch_blocking(url, provider_id, filter_openrouter)
}

fn fetch_blocking(url: &str, provider_id: &str, filter_openrouter: bool) -> Result<Vec<String>> {
    let client = reqwest::blocking::Client::builder()
        .timeout(std::time::Duration::from_secs(15))
        .build()?;

    let resp = client.get(url).send()?;

    if !resp.status().is_success() {
        anyhow::bail!("HTTP {}", resp.status());
    }

    let models = parse_catalog_response(resp.text()?, filter_openrouter)?;

    // Don't cache empty responses
    if !models.is_empty() {
        save_cache(provider_id, &models)?;
    }

    Ok(models)
}

/// Parse the provider catalog API response.
fn parse_catalog_response(body: String, filter_openrouter: bool) -> Result<Vec<String>> {
    // OpenRouter returns {data: [...model objects...]}
    #[derive(Deserialize)]
    struct OpenRouterResponse {
        data: Vec<OpenRouterModel>,
    }
    #[derive(Deserialize)]
    struct OpenRouterModel {
        id: String,
    }

    let all_models: Vec<String> = match serde_json::from_str::<OpenRouterResponse>(&body) {
        Ok(resp) => resp.data.into_iter().map(|m| m.id).collect(),
        Err(e) => anyhow::bail!("parse error: {}", e),
    };

    if filter_openrouter {
        Ok(filter_openrouter_models(&all_models))
    } else {
        Ok(all_models)
    }
}

/// Filter OpenRouter models to popular/quality subset.
fn filter_openrouter_models(models: &[String]) -> Vec<String> {
    let popular_patterns = [
        "anthropic/",
        "openai/",
        "google/",
        "deepseek/",
        "meta-llama/",
        "mistralai/",
        "cohere/",
        "qwen/",
        "nousresearch/",
        "sao10k/",
        "together/",
        "perplexity/",
    ];

    let popular: Vec<String> = models
        .iter()
        .filter(|m| popular_patterns.iter().any(|p| m.starts_with(p)))
        .cloned()
        .collect();

    if popular.len() >= 5 {
        let mut sorted = popular;
        sorted.sort_by_key(|m| {
            popular_patterns
                .iter()
                .position(|p| m.starts_with(p))
                .unwrap_or(99)
        });
        sorted.into_iter().take(OPENROUTER_MAX_MODELS).collect()
    } else {
        models.iter().take(OPENROUTER_MAX_MODELS).cloned().collect()
    }
}

// ── Cache I/O ───────────────────────────────────────────────────────────────

fn cache_path(provider_id: &str) -> Result<PathBuf> {
    let home = dirs::home_dir().unwrap_or_else(|| PathBuf::from("."));
    Ok(home
        .join(".garden")
        .join("cache")
        .join(format!("{}_models.json", provider_id)))
}

fn load_cache(provider_id: &str) -> Result<CachedModels> {
    let path = cache_path(provider_id)?;
    let content = std::fs::read_to_string(&path)?;
    let cached: CachedModels = serde_json::from_str(&content)?;
    Ok(cached)
}

fn save_cache(provider_id: &str, models: &[String]) -> Result<()> {
    let path = cache_path(provider_id)?;
    if let Some(parent) = path.parent() {
        std::fs::create_dir_all(parent)?;
    }

    let cache = CachedModels {
        fetched_at: std::time::SystemTime::now()
            .duration_since(std::time::UNIX_EPOCH)
            .map(|d| d.as_secs())
            .unwrap_or(0),
        models: models.to_vec(),
    };

    std::fs::write(&path, serde_json::to_string_pretty(&cache)?)?;
    Ok(())
}