ricecoder_storage/
cache_implementations.rs

1//! Concrete caching implementations for ricecoder
2//!
3//! This module provides ready-to-use caching implementations for common operations:
4//! - Configuration caching
5//! - Specification caching
6//! - Provider response caching
7//! - Project analysis caching
8
9use crate::CacheManager;
10use std::path::Path;
11use tracing::{debug, info};
12use std::sync::atomic::{AtomicU64, Ordering};
13use std::sync::Arc;
14use serde::Serialize;
15
16/// Cache statistics tracker
17#[derive(Debug, Clone)]
18pub struct CacheStats {
19    hits: Arc<AtomicU64>,
20    misses: Arc<AtomicU64>,
21}
22
23impl CacheStats {
24    /// Create new cache statistics tracker
25    pub fn new() -> Self {
26        Self {
27            hits: Arc::new(AtomicU64::new(0)),
28            misses: Arc::new(AtomicU64::new(0)),
29        }
30    }
31
32    /// Record a cache hit
33    pub fn record_hit(&self) {
34        self.hits.fetch_add(1, Ordering::Relaxed);
35    }
36
37    /// Record a cache miss
38    pub fn record_miss(&self) {
39        self.misses.fetch_add(1, Ordering::Relaxed);
40    }
41
42    /// Get cache hit rate (0.0 to 1.0)
43    pub fn hit_rate(&self) -> f64 {
44        let hits = self.hits.load(Ordering::Relaxed);
45        let misses = self.misses.load(Ordering::Relaxed);
46        let total = hits + misses;
47
48        if total == 0 {
49            0.0
50        } else {
51            hits as f64 / total as f64
52        }
53    }
54
55    /// Get statistics tuple (hits, misses, hit_rate)
56    pub fn stats(&self) -> (u64, u64, f64) {
57        let hits = self.hits.load(Ordering::Relaxed);
58        let misses = self.misses.load(Ordering::Relaxed);
59        let rate = self.hit_rate();
60        (hits, misses, rate)
61    }
62
63    /// Log cache statistics
64    pub fn log_stats(&self, name: &str) {
65        let (hits, misses, rate) = self.stats();
66        info!(
67            "{} cache statistics: {} hits, {} misses, {:.2}% hit rate",
68            name,
69            hits,
70            misses,
71            rate * 100.0
72        );
73    }
74
75    /// Reset statistics
76    pub fn reset(&self) {
77        self.hits.store(0, Ordering::Relaxed);
78        self.misses.store(0, Ordering::Relaxed);
79    }
80}
81
82impl Default for CacheStats {
83    fn default() -> Self {
84        Self::new()
85    }
86}
87
88/// Configuration caching wrapper
89pub struct ConfigCache {
90    cache: CacheManager,
91    stats: CacheStats,
92}
93
94impl ConfigCache {
95    /// Create new configuration cache
96    pub fn new(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
97        Ok(Self {
98            cache: CacheManager::new(cache_dir)?,
99            stats: CacheStats::new(),
100        })
101    }
102
103    /// Get cached configuration or load from file
104    pub fn get_config<T: serde::de::DeserializeOwned + Serialize>(
105        &self,
106        path: &Path,
107    ) -> Result<T, Box<dyn std::error::Error>> {
108        let cache_key = format!("config_{}", path.display());
109
110        // Check cache first
111        if let Some(cached) = self.cache.get(&cache_key)? {
112            debug!("Configuration cache hit: {}", path.display());
113            self.stats.record_hit();
114            return Ok(serde_json::from_str(&cached)?);
115        }
116
117        debug!("Configuration cache miss: {}", path.display());
118        self.stats.record_miss();
119
120        // Load and parse configuration
121        let content = std::fs::read_to_string(path)?;
122        let config: T = serde_yaml::from_str(&content)?;
123
124        // Cache for 1 hour (3600 seconds)
125        let json = serde_json::to_string(&config)?;
126        self.cache.set(
127            &cache_key,
128            json,
129            crate::CacheInvalidationStrategy::Ttl(3600),
130        )?;
131
132        Ok(config)
133    }
134
135    /// Invalidate configuration cache
136    pub fn invalidate_config(&self, path: &Path) -> Result<(), Box<dyn std::error::Error>> {
137        let cache_key = format!("config_{}", path.display());
138        self.cache.invalidate(&cache_key)?;
139        debug!("Configuration cache invalidated: {}", path.display());
140        Ok(())
141    }
142
143    /// Get cache statistics
144    pub fn stats(&self) -> &CacheStats {
145        &self.stats
146    }
147}
148
149/// Specification caching wrapper
150pub struct SpecCache {
151    cache: CacheManager,
152    stats: CacheStats,
153}
154
155impl SpecCache {
156    /// Create new specification cache
157    pub fn new(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
158        Ok(Self {
159            cache: CacheManager::new(cache_dir)?,
160            stats: CacheStats::new(),
161        })
162    }
163
164    /// Get cached specification or load from file
165    pub fn get_spec<T: serde::de::DeserializeOwned + Serialize>(
166        &self,
167        path: &Path,
168    ) -> Result<T, Box<dyn std::error::Error>> {
169        let cache_key = format!("spec_{}", path.display());
170
171        // Check cache first
172        if let Some(cached) = self.cache.get(&cache_key)? {
173            debug!("Specification cache hit: {}", path.display());
174            self.stats.record_hit();
175            return Ok(serde_json::from_str(&cached)?);
176        }
177
178        debug!("Specification cache miss: {}", path.display());
179        self.stats.record_miss();
180
181        // Load and parse specification
182        let content = std::fs::read_to_string(path)?;
183        let spec: T = serde_yaml::from_str(&content)?;
184
185        // Cache for 1 hour (3600 seconds)
186        let json = serde_json::to_string(&spec)?;
187        self.cache.set(
188            &cache_key,
189            json,
190            crate::CacheInvalidationStrategy::Ttl(3600),
191        )?;
192
193        Ok(spec)
194    }
195
196    /// Invalidate specification cache
197    pub fn invalidate_spec(&self, path: &Path) -> Result<(), Box<dyn std::error::Error>> {
198        let cache_key = format!("spec_{}", path.display());
199        self.cache.invalidate(&cache_key)?;
200        debug!("Specification cache invalidated: {}", path.display());
201        Ok(())
202    }
203
204    /// Get cache statistics
205    pub fn stats(&self) -> &CacheStats {
206        &self.stats
207    }
208}
209
210/// Provider response caching wrapper
211pub struct ProviderCache {
212    cache: CacheManager,
213    stats: CacheStats,
214}
215
216impl ProviderCache {
217    /// Create new provider response cache
218    pub fn new(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
219        Ok(Self {
220            cache: CacheManager::new(cache_dir)?,
221            stats: CacheStats::new(),
222        })
223    }
224
225    /// Get cached provider response
226    pub fn get_response(
227        &self,
228        provider: &str,
229        model: &str,
230        prompt: &str,
231    ) -> Result<Option<String>, Box<dyn std::error::Error>> {
232        let cache_key = self.make_cache_key(provider, model, prompt);
233
234        // Check cache first
235        if let Some(cached) = self.cache.get(&cache_key)? {
236            debug!("Provider response cache hit: {}/{}", provider, model);
237            self.stats.record_hit();
238            return Ok(Some(cached));
239        }
240
241        debug!("Provider response cache miss: {}/{}", provider, model);
242        self.stats.record_miss();
243        Ok(None)
244    }
245
246    /// Cache provider response
247    pub fn cache_response(
248        &self,
249        provider: &str,
250        model: &str,
251        prompt: &str,
252        response: &str,
253    ) -> Result<(), Box<dyn std::error::Error>> {
254        let cache_key = self.make_cache_key(provider, model, prompt);
255
256        // Cache for 24 hours (86400 seconds)
257        self.cache.set(
258            &cache_key,
259            response.to_string(),
260            crate::CacheInvalidationStrategy::Ttl(86400),
261        )?;
262
263        debug!("Provider response cached: {}/{}", provider, model);
264        Ok(())
265    }
266
267    /// Make cache key from provider, model, and prompt
268    fn make_cache_key(&self, provider: &str, model: &str, prompt: &str) -> String {
269        // Use simple hash of prompt to avoid long keys
270        // Calculate a simple hash by summing byte values
271        let hash = prompt
272            .bytes()
273            .fold(0u64, |acc, b| acc.wrapping_mul(31).wrapping_add(b as u64));
274
275        format!("provider_{}_{}_{}",provider, model, hash)
276    }
277
278    /// Get cache statistics
279    pub fn stats(&self) -> &CacheStats {
280        &self.stats
281    }
282}
283
284/// Project analysis caching wrapper
285pub struct ProjectAnalysisCache {
286    cache: CacheManager,
287    stats: CacheStats,
288}
289
290impl ProjectAnalysisCache {
291    /// Create new project analysis cache
292    pub fn new(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
293        Ok(Self {
294            cache: CacheManager::new(cache_dir)?,
295            stats: CacheStats::new(),
296        })
297    }
298
299    /// Get cached project analysis
300    pub fn get_analysis<T: serde::de::DeserializeOwned + Serialize>(
301        &self,
302        project_path: &Path,
303    ) -> Result<Option<T>, Box<dyn std::error::Error>> {
304        let cache_key = format!("analysis_{}", project_path.display());
305
306        if let Some(cached) = self.cache.get(&cache_key)? {
307            debug!("Project analysis cache hit: {}", project_path.display());
308            self.stats.record_hit();
309            return Ok(Some(serde_json::from_str(&cached)?));
310        }
311
312        debug!("Project analysis cache miss: {}", project_path.display());
313        self.stats.record_miss();
314        Ok(None)
315    }
316
317    /// Cache project analysis
318    pub fn cache_analysis<T: serde::Serialize>(
319        &self,
320        project_path: &Path,
321        analysis: &T,
322    ) -> Result<(), Box<dyn std::error::Error>> {
323        let cache_key = format!("analysis_{}", project_path.display());
324
325        // Cache for 1 hour (3600 seconds)
326        let json = serde_json::to_string(analysis)?;
327        self.cache.set(
328            &cache_key,
329            json,
330            crate::CacheInvalidationStrategy::Ttl(3600),
331        )?;
332
333        debug!("Project analysis cached: {}", project_path.display());
334        Ok(())
335    }
336
337    /// Invalidate project analysis cache
338    pub fn invalidate_analysis(&self, project_path: &Path) -> Result<(), Box<dyn std::error::Error>> {
339        let cache_key = format!("analysis_{}", project_path.display());
340        self.cache.invalidate(&cache_key)?;
341        debug!("Project analysis cache invalidated: {}", project_path.display());
342        Ok(())
343    }
344
345    /// Get cache statistics
346    pub fn stats(&self) -> &CacheStats {
347        &self.stats
348    }
349}
350
351#[cfg(test)]
352mod tests {
353    use super::*;
354    use tempfile::TempDir;
355
356    #[test]
357    fn test_cache_stats() {
358        let stats = CacheStats::new();
359        
360        stats.record_hit();
361        stats.record_hit();
362        stats.record_miss();
363        
364        let (hits, misses, rate) = stats.stats();
365        assert_eq!(hits, 2);
366        assert_eq!(misses, 1);
367        assert!((rate - 2.0/3.0).abs() < 0.01);
368    }
369
370    #[test]
371    fn test_cache_stats_reset() {
372        let stats = CacheStats::new();
373        
374        stats.record_hit();
375        stats.record_miss();
376        stats.reset();
377        
378        let (hits, misses, _) = stats.stats();
379        assert_eq!(hits, 0);
380        assert_eq!(misses, 0);
381    }
382
383    #[test]
384    fn test_config_cache() -> Result<(), Box<dyn std::error::Error>> {
385        let temp_dir = TempDir::new()?;
386        let cache_dir = temp_dir.path().join("cache");
387        std::fs::create_dir(&cache_dir)?;
388
389        let config_path = temp_dir.path().join("config.yaml");
390        std::fs::write(&config_path, "key: value")?;
391
392        let cache = ConfigCache::new(&cache_dir)?;
393        
394        // First access: miss
395        let _: serde_json::Value = cache.get_config(&config_path)?;
396        assert_eq!(cache.stats().stats().1, 1); // 1 miss
397
398        // Second access: hit
399        let _: serde_json::Value = cache.get_config(&config_path)?;
400        assert_eq!(cache.stats().stats().0, 1); // 1 hit
401
402        Ok(())
403    }
404
405    #[test]
406    fn test_spec_cache() -> Result<(), Box<dyn std::error::Error>> {
407        let temp_dir = TempDir::new()?;
408        let cache_dir = temp_dir.path().join("cache");
409        std::fs::create_dir(&cache_dir)?;
410
411        let spec_path = temp_dir.path().join("spec.yaml");
412        std::fs::write(&spec_path, "name: test")?;
413
414        let cache = SpecCache::new(&cache_dir)?;
415        
416        // First access: miss
417        let _: serde_json::Value = cache.get_spec(&spec_path)?;
418        assert_eq!(cache.stats().stats().1, 1); // 1 miss
419
420        // Second access: hit
421        let _: serde_json::Value = cache.get_spec(&spec_path)?;
422        assert_eq!(cache.stats().stats().0, 1); // 1 hit
423
424        Ok(())
425    }
426
427    #[test]
428    fn test_provider_cache() -> Result<(), Box<dyn std::error::Error>> {
429        let temp_dir = TempDir::new()?;
430        let cache_dir = temp_dir.path().join("cache");
431        std::fs::create_dir(&cache_dir)?;
432
433        let cache = ProviderCache::new(&cache_dir)?;
434        
435        // First access: miss
436        let result = cache.get_response("openai", "gpt-4", "hello")?;
437        assert!(result.is_none());
438        assert_eq!(cache.stats().stats().1, 1); // 1 miss
439
440        // Cache response
441        cache.cache_response("openai", "gpt-4", "hello", "world")?;
442
443        // Second access: hit
444        let result = cache.get_response("openai", "gpt-4", "hello")?;
445        assert_eq!(result, Some("world".to_string()));
446        assert_eq!(cache.stats().stats().0, 1); // 1 hit
447
448        Ok(())
449    }
450}