mockforge_core/
cache.rs

1//! High-performance caching utilities for MockForge
2//!
3//! This module provides various caching strategies to optimize
4//! performance for frequently accessed data.
5
6use std::collections::HashMap;
7use std::hash::Hash;
8use std::sync::Arc;
9use std::time::{Duration, Instant};
10use tokio::sync::RwLock;
11
12/// Cache entry with expiration support
13#[derive(Debug, Clone)]
14struct CacheEntry<V> {
15    value: V,
16    expires_at: Option<Instant>,
17    access_count: u64,
18    last_accessed: Instant,
19}
20
21impl<V> CacheEntry<V> {
22    fn new(value: V, ttl: Option<Duration>) -> Self {
23        let now = Instant::now();
24        Self {
25            value,
26            expires_at: ttl.map(|duration| now + duration),
27            access_count: 0,
28            last_accessed: now,
29        }
30    }
31
32    fn is_expired(&self) -> bool {
33        self.expires_at.is_some_and(|expires_at| Instant::now() > expires_at)
34    }
35
36    fn access(&mut self) -> &V {
37        self.access_count += 1;
38        self.last_accessed = Instant::now();
39        &self.value
40    }
41}
42
43/// High-performance in-memory cache with TTL and LRU eviction
44#[derive(Debug)]
45pub struct Cache<K, V> {
46    storage: Arc<RwLock<HashMap<K, CacheEntry<V>>>>,
47    max_size: usize,
48    default_ttl: Option<Duration>,
49    stats: Arc<RwLock<CacheStats>>,
50}
51
52#[derive(Debug, Default, Clone)]
53pub struct CacheStats {
54    pub hits: u64,
55    pub misses: u64,
56    pub evictions: u64,
57    pub expirations: u64,
58    pub insertions: u64,
59}
60
61impl<K: Hash + Eq + Clone, V: Clone> Cache<K, V> {
62    /// Create a new cache with specified maximum size
63    pub fn new(max_size: usize) -> Self {
64        Self {
65            storage: Arc::new(RwLock::new(HashMap::new())),
66            max_size,
67            default_ttl: None,
68            stats: Arc::new(RwLock::new(CacheStats::default())),
69        }
70    }
71
72    /// Create a new cache with TTL support
73    pub fn with_ttl(max_size: usize, default_ttl: Duration) -> Self {
74        Self {
75            storage: Arc::new(RwLock::new(HashMap::new())),
76            max_size,
77            default_ttl: Some(default_ttl),
78            stats: Arc::new(RwLock::new(CacheStats::default())),
79        }
80    }
81
82    /// Insert a value with optional custom TTL
83    pub async fn insert(&self, key: K, value: V, ttl: Option<Duration>) {
84        let mut storage = self.storage.write().await;
85        let mut stats = self.stats.write().await;
86
87        // Use provided TTL or default TTL
88        let effective_ttl = ttl.or(self.default_ttl);
89
90        // Clean up expired entries
91        self.cleanup_expired(&mut storage, &mut stats).await;
92
93        // Evict LRU entries if at capacity
94        if storage.len() >= self.max_size && !storage.contains_key(&key) {
95            self.evict_lru(&mut storage, &mut stats).await;
96        }
97
98        storage.insert(key, CacheEntry::new(value, effective_ttl));
99        stats.insertions += 1;
100    }
101
102    /// Get a value from the cache
103    pub async fn get(&self, key: &K) -> Option<V> {
104        let mut storage = self.storage.write().await;
105        let mut stats = self.stats.write().await;
106
107        if let Some(entry) = storage.get_mut(key) {
108            if entry.is_expired() {
109                storage.remove(key);
110                stats.expirations += 1;
111                stats.misses += 1;
112                return None;
113            }
114
115            stats.hits += 1;
116            Some(entry.access().clone())
117        } else {
118            stats.misses += 1;
119            None
120        }
121    }
122
123    /// Check if a key exists in the cache (without updating access stats)
124    pub async fn contains_key(&self, key: &K) -> bool {
125        let storage = self.storage.read().await;
126        if let Some(entry) = storage.get(key) {
127            !entry.is_expired()
128        } else {
129            false
130        }
131    }
132
133    /// Remove a key from the cache
134    pub async fn remove(&self, key: &K) -> Option<V> {
135        let mut storage = self.storage.write().await;
136        storage.remove(key).map(|entry| entry.value)
137    }
138
139    /// Clear all entries from the cache
140    pub async fn clear(&self) {
141        let mut storage = self.storage.write().await;
142        storage.clear();
143    }
144
145    /// Get current cache size
146    pub async fn len(&self) -> usize {
147        let storage = self.storage.read().await;
148        storage.len()
149    }
150
151    /// Check if cache is empty
152    pub async fn is_empty(&self) -> bool {
153        let storage = self.storage.read().await;
154        storage.is_empty()
155    }
156
157    /// Get cache statistics
158    pub async fn stats(&self) -> CacheStats {
159        let stats = self.stats.read().await;
160        stats.clone()
161    }
162
163    /// Reset cache statistics
164    pub async fn reset_stats(&self) {
165        let mut stats = self.stats.write().await;
166        *stats = CacheStats::default();
167    }
168
169    /// Get or insert a value using a closure
170    pub async fn get_or_insert<F, Fut>(&self, key: K, f: F) -> V
171    where
172        F: FnOnce() -> Fut,
173        Fut: std::future::Future<Output = V>,
174    {
175        if let Some(value) = self.get(&key).await {
176            return value;
177        }
178
179        let value = f().await;
180        self.insert(key, value.clone(), None).await;
181        value
182    }
183
184    /// Get or insert a value with custom TTL using a closure
185    pub async fn get_or_insert_with_ttl<F, Fut>(&self, key: K, f: F, ttl: Duration) -> V
186    where
187        F: FnOnce() -> Fut,
188        Fut: std::future::Future<Output = V>,
189    {
190        if let Some(value) = self.get(&key).await {
191            return value;
192        }
193
194        let value = f().await;
195        self.insert(key, value.clone(), Some(ttl)).await;
196        value
197    }
198
199    /// Cleanup expired entries (internal)
200    async fn cleanup_expired(
201        &self,
202        storage: &mut HashMap<K, CacheEntry<V>>,
203        stats: &mut CacheStats,
204    ) {
205        let expired_keys: Vec<K> = storage
206            .iter()
207            .filter_map(|(k, v)| {
208                if v.is_expired() {
209                    Some(k.clone())
210                } else {
211                    None
212                }
213            })
214            .collect();
215
216        for key in expired_keys {
217            storage.remove(&key);
218            stats.expirations += 1;
219        }
220    }
221
222    /// Evict least recently used entry (internal)
223    async fn evict_lru(&self, storage: &mut HashMap<K, CacheEntry<V>>, stats: &mut CacheStats) {
224        if let Some((lru_key, _)) = storage
225            .iter()
226            .min_by_key(|(_, entry)| entry.last_accessed)
227            .map(|(k, v)| (k.clone(), v.clone()))
228        {
229            storage.remove(&lru_key);
230            stats.evictions += 1;
231        }
232    }
233}
234
235/// Response cache specifically optimized for HTTP responses
236#[derive(Debug)]
237pub struct ResponseCache {
238    cache: Cache<String, CachedResponse>,
239}
240
241#[derive(Debug, Clone)]
242pub struct CachedResponse {
243    pub status_code: u16,
244    pub headers: HashMap<String, String>,
245    pub body: String,
246    pub content_type: Option<String>,
247}
248
249impl ResponseCache {
250    /// Create a new response cache
251    pub fn new(max_size: usize, ttl: Duration) -> Self {
252        Self {
253            cache: Cache::with_ttl(max_size, ttl),
254        }
255    }
256
257    /// Generate cache key from request parameters
258    pub fn generate_key(
259        method: &str,
260        path: &str,
261        query: &str,
262        headers: &HashMap<String, String>,
263    ) -> String {
264        use std::collections::hash_map::DefaultHasher;
265        use std::hash::Hasher;
266
267        let mut hasher = DefaultHasher::new();
268        hasher.write(method.as_bytes());
269        hasher.write(path.as_bytes());
270        hasher.write(query.as_bytes());
271
272        // Include relevant headers in cache key
273        let mut sorted_headers: Vec<_> = headers.iter().collect();
274        sorted_headers.sort_by_key(|(k, _)| *k);
275        for (key, value) in sorted_headers {
276            if key.to_lowercase() != "authorization" && !key.to_lowercase().starts_with("x-") {
277                hasher.write(key.as_bytes());
278                hasher.write(value.as_bytes());
279            }
280        }
281
282        format!("resp_{}_{}", hasher.finish(), path.len())
283    }
284
285    /// Cache a response
286    pub async fn cache_response(&self, key: String, response: CachedResponse) {
287        self.cache.insert(key, response, None).await;
288    }
289
290    /// Get cached response
291    pub async fn get_response(&self, key: &str) -> Option<CachedResponse> {
292        self.cache.get(&key.to_string()).await
293    }
294
295    /// Get cache statistics
296    pub async fn stats(&self) -> CacheStats {
297        self.cache.stats().await
298    }
299}
300
301/// Template cache for compiled templates
302#[derive(Debug)]
303pub struct TemplateCache {
304    cache: Cache<String, CompiledTemplate>,
305}
306
307#[derive(Debug, Clone)]
308pub struct CompiledTemplate {
309    pub template: String,
310    pub variables: Vec<String>,
311    pub compiled_at: Instant,
312}
313
314impl TemplateCache {
315    /// Create a new template cache
316    pub fn new(max_size: usize) -> Self {
317        Self {
318            cache: Cache::new(max_size),
319        }
320    }
321
322    /// Cache a compiled template
323    pub async fn cache_template(&self, key: String, template: String, variables: Vec<String>) {
324        let compiled = CompiledTemplate {
325            template,
326            variables,
327            compiled_at: Instant::now(),
328        };
329        self.cache.insert(key, compiled, None).await;
330    }
331
332    /// Get cached template
333    pub async fn get_template(&self, key: &str) -> Option<CompiledTemplate> {
334        self.cache.get(&key.to_string()).await
335    }
336
337    /// Get cache statistics
338    pub async fn stats(&self) -> CacheStats {
339        self.cache.stats().await
340    }
341}
342
343#[cfg(test)]
344mod tests {
345    use super::*;
346    use tokio::time::sleep;
347
348    #[tokio::test]
349    async fn test_basic_cache_operations() {
350        let cache = Cache::new(3);
351
352        cache.insert("key1".to_string(), "value1".to_string(), None).await;
353        cache.insert("key2".to_string(), "value2".to_string(), None).await;
354
355        assert_eq!(cache.get(&"key1".to_string()).await, Some("value1".to_string()));
356        assert_eq!(cache.get(&"key2".to_string()).await, Some("value2".to_string()));
357        assert_eq!(cache.get(&"key3".to_string()).await, None);
358
359        assert_eq!(cache.len().await, 2);
360        assert!(!cache.is_empty().await);
361    }
362
363    #[tokio::test]
364    async fn test_ttl_expiration() {
365        let cache = Cache::with_ttl(10, Duration::from_millis(50));
366
367        cache.insert("key1".to_string(), "value1".to_string(), None).await;
368        assert_eq!(cache.get(&"key1".to_string()).await, Some("value1".to_string()));
369
370        sleep(Duration::from_millis(60)).await;
371        assert_eq!(cache.get(&"key1".to_string()).await, None);
372    }
373
374    #[tokio::test]
375    async fn test_lru_eviction() {
376        let cache = Cache::new(2);
377
378        cache.insert("key1".to_string(), "value1".to_string(), None).await;
379        cache.insert("key2".to_string(), "value2".to_string(), None).await;
380
381        // Access key1 to make it more recently used
382        cache.get(&"key1".to_string()).await;
383
384        // Insert key3, should evict key2 (least recently used)
385        cache.insert("key3".to_string(), "value3".to_string(), None).await;
386
387        assert_eq!(cache.get(&"key1".to_string()).await, Some("value1".to_string()));
388        assert_eq!(cache.get(&"key2".to_string()).await, None);
389        assert_eq!(cache.get(&"key3".to_string()).await, Some("value3".to_string()));
390    }
391
392    #[tokio::test]
393    async fn test_cache_stats() {
394        let cache = Cache::new(10);
395
396        cache.insert("key1".to_string(), "value1".to_string(), None).await;
397        cache.get(&"key1".to_string()).await; // Hit
398        cache.get(&"key2".to_string()).await; // Miss
399
400        let stats = cache.stats().await;
401        assert_eq!(stats.hits, 1);
402        assert_eq!(stats.misses, 1);
403        assert_eq!(stats.insertions, 1);
404    }
405
406    #[tokio::test]
407    async fn test_response_cache() {
408        let response_cache = ResponseCache::new(100, Duration::from_secs(300));
409
410        let headers = HashMap::new();
411        let key = ResponseCache::generate_key("GET", "/api/users", "", &headers);
412
413        let response = CachedResponse {
414            status_code: 200,
415            headers: HashMap::new(),
416            body: "test response".to_string(),
417            content_type: Some("application/json".to_string()),
418        };
419
420        response_cache.cache_response(key.clone(), response.clone()).await;
421        let cached = response_cache.get_response(&key).await;
422
423        assert!(cached.is_some());
424        assert_eq!(cached.unwrap().body, "test response");
425    }
426}