lc/
unified_cache.rs

1use crate::{
2    config::Config,
3    model_metadata::{extract_models_from_provider, ModelMetadata},
4    provider::Provider,
5};
6use anyhow::Result;
7use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9use std::path::PathBuf;
10use std::sync::{Arc, RwLock};
11use std::time::{Duration, SystemTime, UNIX_EPOCH};
12use tokio::fs;
13
14#[derive(Debug, Clone, Serialize, Deserialize)]
15pub struct CachedProviderData {
16    pub last_updated: u64,          // Unix timestamp
17    pub raw_response: String,       // Raw JSON response from provider
18    pub models: Vec<ModelMetadata>, // Extracted metadata
19    // Cache the serialized JSON to avoid repeated serialization
20    #[serde(skip)]
21    pub cached_json: Option<String>,
22}
23
24impl CachedProviderData {
25    fn new(raw_response: String, models: Vec<ModelMetadata>) -> Self {
26        let now = std::time::SystemTime::now()
27            .duration_since(std::time::UNIX_EPOCH)
28            .unwrap_or(std::time::Duration::from_secs(0))
29            .as_secs();
30
31        Self {
32            last_updated: now,
33            raw_response,
34            models,
35            cached_json: None,
36        }
37    }
38
39    fn get_cached_json(&mut self) -> Result<&str> {
40        if self.cached_json.is_none() {
41            self.cached_json = Some(serde_json::to_string_pretty(self)?);
42        }
43        Ok(self.cached_json.as_ref().unwrap())
44    }
45}
46
47// In-memory cache entry with TTL
48#[derive(Debug, Clone)]
49struct MemoryCacheEntry {
50    data: CachedProviderData,
51    expires_at: u64,
52}
53
54impl MemoryCacheEntry {
55    fn new(data: CachedProviderData, ttl_seconds: u64) -> Self {
56        let now = SystemTime::now()
57            .duration_since(UNIX_EPOCH)
58            .unwrap_or(Duration::from_secs(0))
59            .as_secs();
60
61        Self {
62            data,
63            expires_at: now + ttl_seconds,
64        }
65    }
66
67    fn is_expired(&self) -> bool {
68        let now = SystemTime::now()
69            .duration_since(UNIX_EPOCH)
70            .unwrap_or(Duration::from_secs(0))
71            .as_secs();
72
73        now >= self.expires_at
74    }
75}
76
77// Global in-memory cache with efficient invalidation
78lazy_static::lazy_static! {
79    static ref MEMORY_CACHE: Arc<RwLock<HashMap<String, MemoryCacheEntry>>> =
80        Arc::new(RwLock::new(HashMap::new()));
81}
82
83pub struct UnifiedCache;
84
85impl UnifiedCache {
86    /// Cache TTL in seconds (24 hours)
87    const CACHE_TTL: u64 = 86400;
88
89    /// Get the models directory path (cross-platform)
90    pub fn models_dir() -> Result<PathBuf> {
91        let config_dir =
92            dirs::config_dir().ok_or_else(|| anyhow::anyhow!("Could not find config directory"))?;
93
94        Ok(config_dir.join("lc").join("models"))
95    }
96
97    /// Get the cache file path for a specific provider
98    pub fn provider_cache_path(provider: &str) -> Result<PathBuf> {
99        let models_dir = Self::models_dir()?;
100        Ok(models_dir.join(format!("{}.json", provider)))
101    }
102
103    /// Check in-memory cache first, then file cache
104    pub async fn is_cache_fresh(provider: &str) -> Result<bool> {
105        crate::debug_log!("Checking cache freshness for provider '{}'", provider);
106
107        // Check in-memory cache first
108        if let Ok(cache) = MEMORY_CACHE.read() {
109            if let Some(entry) = cache.get(provider) {
110                if !entry.is_expired() {
111                    crate::debug_log!("Found fresh in-memory cache for provider '{}'", provider);
112                    return Ok(true);
113                } else {
114                    crate::debug_log!("In-memory cache expired for provider '{}'", provider);
115                }
116            }
117        }
118
119        // Fall back to file cache
120        let cache_path = Self::provider_cache_path(provider)?;
121
122        if !cache_path.exists() {
123            crate::debug_log!("Cache file does not exist for provider '{}'", provider);
124            return Ok(false);
125        }
126
127        // Use async file I/O to avoid blocking
128        let content = fs::read_to_string(&cache_path).await?;
129        let cached_data: CachedProviderData = serde_json::from_str(&content)?;
130
131        let now = SystemTime::now()
132            .duration_since(UNIX_EPOCH)
133            .unwrap_or(Duration::from_secs(0))
134            .as_secs();
135
136        let age_seconds = now - cached_data.last_updated;
137        let is_fresh = age_seconds < Self::CACHE_TTL;
138
139        crate::debug_log!(
140            "File cache for provider '{}' is {} seconds old, fresh: {}",
141            provider,
142            age_seconds,
143            is_fresh
144        );
145
146        // If file cache is fresh, populate in-memory cache
147        if is_fresh {
148            Self::populate_memory_cache(provider, cached_data);
149        }
150
151        Ok(is_fresh)
152    }
153
154    /// Populate in-memory cache with data
155    fn populate_memory_cache(provider: &str, data: CachedProviderData) {
156        if let Ok(mut cache) = MEMORY_CACHE.write() {
157            let entry = MemoryCacheEntry::new(data, Self::CACHE_TTL);
158            cache.insert(provider.to_string(), entry);
159            crate::debug_log!("Populated in-memory cache for provider '{}'", provider);
160        }
161    }
162
163    /// Invalidate cache for a specific provider
164    pub fn invalidate_provider_cache(provider: &str) {
165        if let Ok(mut cache) = MEMORY_CACHE.write() {
166            cache.remove(provider);
167            crate::debug_log!("Invalidated in-memory cache for provider '{}'", provider);
168        }
169    }
170
171    /// Clear all in-memory cache
172    #[allow(dead_code)]
173    pub fn clear_memory_cache() {
174        if let Ok(mut cache) = MEMORY_CACHE.write() {
175            cache.clear();
176            crate::debug_log!("Cleared all in-memory cache");
177        }
178    }
179
180    /// Get cache age in human-readable format (e.g., "5 mins ago", "2 hrs ago")
181    pub async fn get_cache_age_display(provider: &str) -> Result<String> {
182        // Check in-memory cache first
183        if let Ok(cache) = MEMORY_CACHE.read() {
184            if let Some(entry) = cache.get(provider) {
185                let now = SystemTime::now()
186                    .duration_since(UNIX_EPOCH)
187                    .unwrap_or(Duration::from_secs(0))
188                    .as_secs();
189
190                let age_seconds = now - entry.data.last_updated;
191                return Ok(Self::format_age(age_seconds));
192            }
193        }
194
195        // Fall back to file cache
196        let cache_path = Self::provider_cache_path(provider)?;
197
198        if !cache_path.exists() {
199            return Ok("No cache".to_string());
200        }
201
202        let content = fs::read_to_string(&cache_path).await?;
203        let cached_data: CachedProviderData = serde_json::from_str(&content)?;
204
205        let now = SystemTime::now()
206            .duration_since(UNIX_EPOCH)
207            .unwrap_or(Duration::from_secs(0))
208            .as_secs();
209
210        let age_seconds = now - cached_data.last_updated;
211        Ok(Self::format_age(age_seconds))
212    }
213
214    /// Format age in seconds to human-readable string
215    fn format_age(age_seconds: u64) -> String {
216        if age_seconds < 60 {
217            format!("{} secs ago", age_seconds)
218        } else if age_seconds < 3600 {
219            let minutes = age_seconds / 60;
220            format!("{} min{} ago", minutes, if minutes == 1 { "" } else { "s" })
221        } else if age_seconds < 86400 {
222            let hours = age_seconds / 3600;
223            format!("{} hr{} ago", hours, if hours == 1 { "" } else { "s" })
224        } else {
225            let days = age_seconds / 86400;
226            format!("{} day{} ago", days, if days == 1 { "" } else { "s" })
227        }
228    }
229
230    /// Load cached models for a provider (async with in-memory cache)
231    pub async fn load_provider_models(provider: &str) -> Result<Vec<ModelMetadata>> {
232        crate::debug_log!("Loading cached models for provider '{}'", provider);
233
234        // Check in-memory cache first
235        if let Ok(cache) = MEMORY_CACHE.read() {
236            if let Some(entry) = cache.get(provider) {
237                if !entry.is_expired() {
238                    crate::debug_log!(
239                        "Loaded {} models from in-memory cache for provider '{}'",
240                        entry.data.models.len(),
241                        provider
242                    );
243                    return Ok(entry.data.models.clone());
244                } else {
245                    crate::debug_log!("In-memory cache expired for provider '{}'", provider);
246                }
247            }
248        }
249
250        // Fall back to file cache
251        let cache_path = Self::provider_cache_path(provider)?;
252
253        if !cache_path.exists() {
254            crate::debug_log!("No cache file found for provider '{}'", provider);
255            return Ok(Vec::new());
256        }
257
258        let content = fs::read_to_string(&cache_path).await?;
259        let cached_data: CachedProviderData = serde_json::from_str(&content)?;
260
261        crate::debug_log!(
262            "Loaded {} models from file cache for provider '{}'",
263            cached_data.models.len(),
264            provider
265        );
266
267        // Populate in-memory cache if data is fresh
268        let now = SystemTime::now()
269            .duration_since(UNIX_EPOCH)
270            .unwrap_or(Duration::from_secs(0))
271            .as_secs();
272
273        if now - cached_data.last_updated < Self::CACHE_TTL {
274            Self::populate_memory_cache(provider, cached_data.clone());
275        }
276
277        Ok(cached_data.models)
278    }
279
280    /// Fetch and cache models for a provider
281    pub async fn fetch_and_cache_provider_models(
282        provider: &str,
283        force_refresh: bool,
284    ) -> Result<Vec<ModelMetadata>> {
285        crate::debug_log!(
286            "Fetching models for provider '{}', force_refresh: {}",
287            provider,
288            force_refresh
289        );
290
291        // Check if we need to refresh
292        if !force_refresh && Self::is_cache_fresh(provider).await? {
293            crate::debug_log!(
294                "Using cached models for provider '{}' (cache is fresh)",
295                provider
296            );
297            return Self::load_provider_models(provider).await;
298        }
299
300        crate::debug_log!(
301            "Cache is stale or refresh forced, fetching fresh models for provider '{}'",
302            provider
303        );
304        println!("Fetching models from provider '{}'...", provider);
305
306        // Invalidate existing cache
307        Self::invalidate_provider_cache(provider);
308
309        // Load config and create client
310        let config = Config::load()?;
311        let provider_config = config.get_provider(provider)?;
312
313        crate::debug_log!(
314            "Creating authenticated client for provider '{}' with endpoint: {}",
315            provider,
316            provider_config.endpoint
317        );
318
319        let mut config_mut = config.clone();
320        let client = crate::chat::create_authenticated_client(&mut config_mut, provider).await?;
321
322        // Save config if tokens were updated
323        if config_mut.get_cached_token(provider) != config.get_cached_token(provider) {
324            crate::debug_log!(
325                "Tokens were updated for provider '{}', saving config",
326                provider
327            );
328            config_mut.save()?;
329        }
330
331        // Fetch raw response
332        crate::debug_log!(
333            "Making API request to fetch models from provider '{}'",
334            provider
335        );
336        let raw_response = crate::cli::fetch_raw_models_response(&client, provider_config).await?;
337
338        crate::debug_log!(
339            "Received raw response from provider '{}' ({} bytes)",
340            provider,
341            raw_response.len()
342        );
343
344        // Extract metadata using the new generic approach
345        crate::debug_log!(
346            "Extracting metadata from response for provider '{}'",
347            provider
348        );
349
350        // Create a Provider object for the extractor
351        let provider_obj = Provider {
352            provider: provider.to_string(),
353            status: "active".to_string(),
354            supports_tools: false,
355            supports_structured_output: false,
356        };
357
358        let models = extract_models_from_provider(&provider_obj, &raw_response)?;
359
360        crate::debug_log!(
361            "Extracted {} models from provider '{}'",
362            models.len(),
363            provider
364        );
365
366        // Cache the data (both in-memory and file)
367        crate::debug_log!("Saving cache data for provider '{}'", provider);
368        Self::save_provider_cache(provider, &raw_response, &models).await?;
369
370        Ok(models)
371    }
372
373    /// Save provider data to cache (async with in-memory caching)
374    async fn save_provider_cache(
375        provider: &str,
376        raw_response: &str,
377        models: &[ModelMetadata],
378    ) -> Result<()> {
379        let cache_path = Self::provider_cache_path(provider)?;
380
381        crate::debug_log!(
382            "Saving cache for provider '{}' to: {}",
383            provider,
384            cache_path.display()
385        );
386
387        // Create cached data
388        let cached_data = CachedProviderData::new(raw_response.to_string(), models.to_vec());
389
390        // Update in-memory cache first (fastest access)
391        Self::populate_memory_cache(provider, cached_data.clone());
392
393        // Ensure cache directory exists
394        if let Some(parent) = cache_path.parent() {
395            crate::debug_log!("Creating cache directory: {}", parent.display());
396            fs::create_dir_all(parent).await?;
397        }
398
399        // Use async file I/O to avoid blocking
400        let mut cached_data_mut = cached_data;
401        let content = cached_data_mut.get_cached_json()?;
402        crate::debug_log!(
403            "Writing {} bytes to cache file for provider '{}'",
404            content.len(),
405            provider
406        );
407        fs::write(&cache_path, content).await?;
408
409        crate::debug_log!(
410            "Successfully saved cache for provider '{}' with {} models",
411            provider,
412            models.len()
413        );
414
415        Ok(())
416    }
417
418    /// Load all cached models from all providers (async with in-memory cache)
419    pub async fn load_all_cached_models() -> Result<Vec<ModelMetadata>> {
420        let models_dir = Self::models_dir()?;
421        let mut all_models = Vec::new();
422
423        if !models_dir.exists() {
424            return Ok(all_models);
425        }
426
427        let mut entries = fs::read_dir(&models_dir).await?;
428
429        while let Some(entry) = entries.next_entry().await? {
430            let path = entry.path();
431
432            if let Some(extension) = path.extension() {
433                if extension == "json" {
434                    if let Some(provider_name) = path.file_stem().and_then(|s| s.to_str()) {
435                        match Self::load_provider_models(provider_name).await {
436                            Ok(mut models) => {
437                                all_models.append(&mut models);
438                            }
439                            Err(e) => {
440                                eprintln!(
441                                    "Warning: Failed to load cached models for {}: {}",
442                                    provider_name, e
443                                );
444                            }
445                        }
446                    }
447                }
448            }
449        }
450
451        // Sort by provider, then by model name
452        all_models.sort_by(|a, b| a.provider.cmp(&b.provider).then(a.id.cmp(&b.id)));
453
454        Ok(all_models)
455    }
456
457    /// Refresh all providers' caches
458    pub async fn refresh_all_providers() -> Result<()> {
459        let config = Config::load()?;
460        let mut successful_providers = 0;
461        let mut total_models = 0;
462
463        println!("Refreshing models cache for all providers...");
464
465        for (provider_name, provider_config) in &config.providers {
466            // Skip providers without API keys
467            if provider_config.api_key.is_none() {
468                continue;
469            }
470
471            match Self::fetch_and_cache_provider_models(provider_name, true).await {
472                Ok(models) => {
473                    let count = models.len();
474                    successful_providers += 1;
475                    total_models += count;
476                    println!("✓ {} ({} models)", provider_name, count);
477                }
478                Err(e) => {
479                    println!("✗ {} ({})", provider_name, e);
480                }
481            }
482        }
483
484        println!(
485            "\nCache updated: {} providers, {} total models",
486            successful_providers, total_models
487        );
488        Ok(())
489    }
490}