mockforge_core/
cache.rs

1//! High-performance caching utilities for MockForge
2//!
3//! This module provides various caching strategies to optimize
4//! performance for frequently accessed data.
5
6use std::collections::HashMap;
7use std::hash::Hash;
8use std::sync::Arc;
9use std::time::{Duration, Instant};
10use tokio::sync::RwLock;
11
12/// Cache entry with expiration support
13#[derive(Debug, Clone)]
14struct CacheEntry<V> {
15    value: V,
16    expires_at: Option<Instant>,
17    access_count: u64,
18    last_accessed: Instant,
19}
20
21impl<V> CacheEntry<V> {
22    fn new(value: V, ttl: Option<Duration>) -> Self {
23        let now = Instant::now();
24        Self {
25            value,
26            expires_at: ttl.map(|duration| now + duration),
27            access_count: 0,
28            last_accessed: now,
29        }
30    }
31
32    fn is_expired(&self) -> bool {
33        self.expires_at.is_some_and(|expires_at| Instant::now() > expires_at)
34    }
35
36    fn access(&mut self) -> &V {
37        self.access_count += 1;
38        self.last_accessed = Instant::now();
39        &self.value
40    }
41}
42
43/// High-performance in-memory cache with TTL and LRU eviction
44#[derive(Debug)]
45pub struct Cache<K, V> {
46    storage: Arc<RwLock<HashMap<K, CacheEntry<V>>>>,
47    max_size: usize,
48    default_ttl: Option<Duration>,
49    stats: Arc<RwLock<CacheStats>>,
50}
51
52/// Statistics for cache performance tracking
53#[derive(Debug, Default, Clone)]
54pub struct CacheStats {
55    /// Number of cache hits (successful lookups)
56    pub hits: u64,
57    /// Number of cache misses (failed lookups)
58    pub misses: u64,
59    /// Number of entries evicted due to size limits
60    pub evictions: u64,
61    /// Number of entries expired due to TTL
62    pub expirations: u64,
63    /// Total number of insertions
64    pub insertions: u64,
65}
66
67impl<K: Hash + Eq + Clone, V: Clone> Cache<K, V> {
68    /// Create a new cache with specified maximum size
69    pub fn new(max_size: usize) -> Self {
70        Self {
71            storage: Arc::new(RwLock::new(HashMap::new())),
72            max_size,
73            default_ttl: None,
74            stats: Arc::new(RwLock::new(CacheStats::default())),
75        }
76    }
77
78    /// Create a new cache with TTL support
79    pub fn with_ttl(max_size: usize, default_ttl: Duration) -> Self {
80        Self {
81            storage: Arc::new(RwLock::new(HashMap::new())),
82            max_size,
83            default_ttl: Some(default_ttl),
84            stats: Arc::new(RwLock::new(CacheStats::default())),
85        }
86    }
87
88    /// Insert a value with optional custom TTL
89    pub async fn insert(&self, key: K, value: V, ttl: Option<Duration>) {
90        let mut storage = self.storage.write().await;
91        let mut stats = self.stats.write().await;
92
93        // Use provided TTL or default TTL
94        let effective_ttl = ttl.or(self.default_ttl);
95
96        // Clean up expired entries
97        self.cleanup_expired(&mut storage, &mut stats).await;
98
99        // Evict LRU entries if at capacity
100        if storage.len() >= self.max_size && !storage.contains_key(&key) {
101            self.evict_lru(&mut storage, &mut stats).await;
102        }
103
104        storage.insert(key, CacheEntry::new(value, effective_ttl));
105        stats.insertions += 1;
106    }
107
108    /// Get a value from the cache
109    pub async fn get(&self, key: &K) -> Option<V> {
110        let mut storage = self.storage.write().await;
111        let mut stats = self.stats.write().await;
112
113        if let Some(entry) = storage.get_mut(key) {
114            if entry.is_expired() {
115                storage.remove(key);
116                stats.expirations += 1;
117                stats.misses += 1;
118                return None;
119            }
120
121            stats.hits += 1;
122            Some(entry.access().clone())
123        } else {
124            stats.misses += 1;
125            None
126        }
127    }
128
129    /// Check if a key exists in the cache (without updating access stats)
130    pub async fn contains_key(&self, key: &K) -> bool {
131        let storage = self.storage.read().await;
132        if let Some(entry) = storage.get(key) {
133            !entry.is_expired()
134        } else {
135            false
136        }
137    }
138
139    /// Remove a key from the cache
140    pub async fn remove(&self, key: &K) -> Option<V> {
141        let mut storage = self.storage.write().await;
142        storage.remove(key).map(|entry| entry.value)
143    }
144
145    /// Clear all entries from the cache
146    pub async fn clear(&self) {
147        let mut storage = self.storage.write().await;
148        storage.clear();
149    }
150
151    /// Get current cache size
152    pub async fn len(&self) -> usize {
153        let storage = self.storage.read().await;
154        storage.len()
155    }
156
157    /// Check if cache is empty
158    pub async fn is_empty(&self) -> bool {
159        let storage = self.storage.read().await;
160        storage.is_empty()
161    }
162
163    /// Get cache statistics
164    pub async fn stats(&self) -> CacheStats {
165        let stats = self.stats.read().await;
166        stats.clone()
167    }
168
169    /// Reset cache statistics
170    pub async fn reset_stats(&self) {
171        let mut stats = self.stats.write().await;
172        *stats = CacheStats::default();
173    }
174
175    /// Get or insert a value using a closure
176    pub async fn get_or_insert<F, Fut>(&self, key: K, f: F) -> V
177    where
178        F: FnOnce() -> Fut,
179        Fut: std::future::Future<Output = V>,
180    {
181        if let Some(value) = self.get(&key).await {
182            return value;
183        }
184
185        let value = f().await;
186        self.insert(key, value.clone(), None).await;
187        value
188    }
189
190    /// Get or insert a value with custom TTL using a closure
191    pub async fn get_or_insert_with_ttl<F, Fut>(&self, key: K, f: F, ttl: Duration) -> V
192    where
193        F: FnOnce() -> Fut,
194        Fut: std::future::Future<Output = V>,
195    {
196        if let Some(value) = self.get(&key).await {
197            return value;
198        }
199
200        let value = f().await;
201        self.insert(key, value.clone(), Some(ttl)).await;
202        value
203    }
204
205    /// Cleanup expired entries (internal)
206    async fn cleanup_expired(
207        &self,
208        storage: &mut HashMap<K, CacheEntry<V>>,
209        stats: &mut CacheStats,
210    ) {
211        let expired_keys: Vec<K> = storage
212            .iter()
213            .filter_map(|(k, v)| {
214                if v.is_expired() {
215                    Some(k.clone())
216                } else {
217                    None
218                }
219            })
220            .collect();
221
222        for key in expired_keys {
223            storage.remove(&key);
224            stats.expirations += 1;
225        }
226    }
227
228    /// Evict least recently used entry (internal)
229    async fn evict_lru(&self, storage: &mut HashMap<K, CacheEntry<V>>, stats: &mut CacheStats) {
230        if let Some((lru_key, _)) = storage
231            .iter()
232            .min_by_key(|(_, entry)| entry.last_accessed)
233            .map(|(k, v)| (k.clone(), v.clone()))
234        {
235            storage.remove(&lru_key);
236            stats.evictions += 1;
237        }
238    }
239}
240
241/// Response cache specifically optimized for HTTP responses
242#[derive(Debug)]
243pub struct ResponseCache {
244    cache: Cache<String, CachedResponse>,
245}
246
247/// Cached HTTP response data
248#[derive(Debug, Clone)]
249pub struct CachedResponse {
250    /// HTTP status code
251    pub status_code: u16,
252    /// Response headers
253    pub headers: HashMap<String, String>,
254    /// Response body content
255    pub body: String,
256    /// Content-Type header value, if present
257    pub content_type: Option<String>,
258}
259
260impl ResponseCache {
261    /// Create a new response cache
262    pub fn new(max_size: usize, ttl: Duration) -> Self {
263        Self {
264            cache: Cache::with_ttl(max_size, ttl),
265        }
266    }
267
268    /// Generate cache key from request parameters
269    pub fn generate_key(
270        method: &str,
271        path: &str,
272        query: &str,
273        headers: &HashMap<String, String>,
274    ) -> String {
275        use std::collections::hash_map::DefaultHasher;
276        use std::hash::Hasher;
277
278        let mut hasher = DefaultHasher::new();
279        hasher.write(method.as_bytes());
280        hasher.write(path.as_bytes());
281        hasher.write(query.as_bytes());
282
283        // Include relevant headers in cache key
284        let mut sorted_headers: Vec<_> = headers.iter().collect();
285        sorted_headers.sort_by_key(|(k, _)| *k);
286        for (key, value) in sorted_headers {
287            if key.to_lowercase() != "authorization" && !key.to_lowercase().starts_with("x-") {
288                hasher.write(key.as_bytes());
289                hasher.write(value.as_bytes());
290            }
291        }
292
293        format!("resp_{}_{}", hasher.finish(), path.len())
294    }
295
296    /// Cache a response
297    pub async fn cache_response(&self, key: String, response: CachedResponse) {
298        self.cache.insert(key, response, None).await;
299    }
300
301    /// Get cached response
302    pub async fn get_response(&self, key: &str) -> Option<CachedResponse> {
303        self.cache.get(&key.to_string()).await
304    }
305
306    /// Get cache statistics
307    pub async fn stats(&self) -> CacheStats {
308        self.cache.stats().await
309    }
310}
311
312/// Template cache for compiled templates
313#[derive(Debug)]
314pub struct TemplateCache {
315    cache: Cache<String, CompiledTemplate>,
316}
317
318/// Compiled template with metadata for caching
319#[derive(Debug, Clone)]
320pub struct CompiledTemplate {
321    /// The compiled template string
322    pub template: String,
323    /// List of variable names used in the template
324    pub variables: Vec<String>,
325    /// Timestamp when the template was compiled
326    pub compiled_at: Instant,
327}
328
329impl TemplateCache {
330    /// Create a new template cache
331    pub fn new(max_size: usize) -> Self {
332        Self {
333            cache: Cache::new(max_size),
334        }
335    }
336
337    /// Cache a compiled template
338    pub async fn cache_template(&self, key: String, template: String, variables: Vec<String>) {
339        let compiled = CompiledTemplate {
340            template,
341            variables,
342            compiled_at: Instant::now(),
343        };
344        self.cache.insert(key, compiled, None).await;
345    }
346
347    /// Get cached template
348    pub async fn get_template(&self, key: &str) -> Option<CompiledTemplate> {
349        self.cache.get(&key.to_string()).await
350    }
351
352    /// Get cache statistics
353    pub async fn stats(&self) -> CacheStats {
354        self.cache.stats().await
355    }
356}
357
358#[cfg(test)]
359mod tests {
360    use super::*;
361    use tokio::time::sleep;
362
363    #[tokio::test]
364    async fn test_basic_cache_operations() {
365        let cache = Cache::new(3);
366
367        cache.insert("key1".to_string(), "value1".to_string(), None).await;
368        cache.insert("key2".to_string(), "value2".to_string(), None).await;
369
370        assert_eq!(cache.get(&"key1".to_string()).await, Some("value1".to_string()));
371        assert_eq!(cache.get(&"key2".to_string()).await, Some("value2".to_string()));
372        assert_eq!(cache.get(&"key3".to_string()).await, None);
373
374        assert_eq!(cache.len().await, 2);
375        assert!(!cache.is_empty().await);
376    }
377
378    #[tokio::test]
379    async fn test_ttl_expiration() {
380        let cache = Cache::with_ttl(10, Duration::from_millis(50));
381
382        cache.insert("key1".to_string(), "value1".to_string(), None).await;
383        assert_eq!(cache.get(&"key1".to_string()).await, Some("value1".to_string()));
384
385        sleep(Duration::from_millis(60)).await;
386        assert_eq!(cache.get(&"key1".to_string()).await, None);
387    }
388
389    #[tokio::test]
390    async fn test_lru_eviction() {
391        let cache = Cache::new(2);
392
393        cache.insert("key1".to_string(), "value1".to_string(), None).await;
394        cache.insert("key2".to_string(), "value2".to_string(), None).await;
395
396        // Access key1 to make it more recently used
397        cache.get(&"key1".to_string()).await;
398
399        // Insert key3, should evict key2 (least recently used)
400        cache.insert("key3".to_string(), "value3".to_string(), None).await;
401
402        assert_eq!(cache.get(&"key1".to_string()).await, Some("value1".to_string()));
403        assert_eq!(cache.get(&"key2".to_string()).await, None);
404        assert_eq!(cache.get(&"key3".to_string()).await, Some("value3".to_string()));
405    }
406
407    #[tokio::test]
408    async fn test_cache_stats() {
409        let cache = Cache::new(10);
410
411        cache.insert("key1".to_string(), "value1".to_string(), None).await;
412        cache.get(&"key1".to_string()).await; // Hit
413        cache.get(&"key2".to_string()).await; // Miss
414
415        let stats = cache.stats().await;
416        assert_eq!(stats.hits, 1);
417        assert_eq!(stats.misses, 1);
418        assert_eq!(stats.insertions, 1);
419    }
420
421    #[tokio::test]
422    async fn test_response_cache() {
423        let response_cache = ResponseCache::new(100, Duration::from_secs(300));
424
425        let headers = HashMap::new();
426        let key = ResponseCache::generate_key("GET", "/api/users", "", &headers);
427
428        let response = CachedResponse {
429            status_code: 200,
430            headers: HashMap::new(),
431            body: "test response".to_string(),
432            content_type: Some("application/json".to_string()),
433        };
434
435        response_cache.cache_response(key.clone(), response.clone()).await;
436        let cached = response_cache.get_response(&key).await;
437
438        assert!(cached.is_some());
439        assert_eq!(cached.unwrap().body, "test response");
440    }
441}