Skip to main content

crates_docs/cache/
memory.rs

1//! Memory cache implementation
2//!
3//! Memory cache using `moka::sync::Cache` with `TinyLFU` eviction policy.
4//! This provides better performance and hit rate than simple LRU.
5
6use std::time::Duration;
7
8/// Cache entry with optional TTL
9#[derive(Clone, Debug)]
10struct CacheEntry {
11    value: String,
12    ttl: Option<Duration>,
13}
14
15/// Expiry implementation for per-entry TTL support
16#[derive(Debug, Clone, Default)]
17struct CacheExpiry;
18
19impl moka::Expiry<String, CacheEntry> for CacheExpiry {
20    fn expire_after_create(
21        &self,
22        _key: &String,
23        value: &CacheEntry,
24        _created_at: std::time::Instant,
25    ) -> Option<Duration> {
26        value.ttl
27    }
28}
29
30/// Memory cache implementation using `moka::sync::Cache`
31///
32/// Features:
33/// - Lock-free concurrent access
34/// - `TinyLFU` eviction policy (better hit rate than LRU)
35/// - Per-entry TTL support via Expiry trait
36/// - Automatic expiration cleanup
37pub struct MemoryCache {
38    cache: moka::sync::Cache<String, CacheEntry>,
39}
40
41impl MemoryCache {
42    /// Create a new memory cache
43    ///
44    /// # Arguments
45    /// * `max_size` - Maximum number of cache entries
46    #[must_use]
47    pub fn new(max_size: usize) -> Self {
48        Self {
49            cache: moka::sync::Cache::builder()
50                .max_capacity(max_size as u64)
51                .expire_after(CacheExpiry)
52                .build(),
53        }
54    }
55}
56
57#[async_trait::async_trait]
58impl super::Cache for MemoryCache {
59    #[tracing::instrument(skip(self), level = "trace")]
60    async fn get(&self, key: &str) -> Option<String> {
61        let result = self.cache.get(key).map(|entry| entry.value.clone());
62        if result.is_some() {
63            tracing::trace!(cache_type = "memory", key = %key, "Cache hit");
64        } else {
65            tracing::trace!(cache_type = "memory", key = %key, "Cache miss");
66        }
67        result
68    }
69
70    #[tracing::instrument(skip(self), level = "trace")]
71    async fn set(
72        &self,
73        key: String,
74        value: String,
75        ttl: Option<Duration>,
76    ) -> crate::error::Result<()> {
77        let entry = CacheEntry { value, ttl };
78        tracing::trace!(cache_type = "memory", key = %key, "Setting cache entry");
79        self.cache.insert(key, entry);
80        Ok(())
81    }
82
83    #[tracing::instrument(skip(self), level = "trace")]
84    async fn delete(&self, key: &str) -> crate::error::Result<()> {
85        tracing::trace!(cache_type = "memory", key = %key, "Deleting cache entry");
86        self.cache.invalidate(key);
87        Ok(())
88    }
89
90    #[tracing::instrument(skip(self), level = "trace")]
91    async fn clear(&self) -> crate::error::Result<()> {
92        tracing::trace!(cache_type = "memory", "Clearing all cache entries");
93        self.cache.invalidate_all();
94        Ok(())
95    }
96
97    #[tracing::instrument(skip(self), level = "trace")]
98    async fn exists(&self, key: &str) -> bool {
99        let result = self.cache.contains_key(key);
100        tracing::trace!(cache_type = "memory", key = %key, exists = result, "Checking cache entry existence");
101        result
102    }
103}
104
105#[cfg(test)]
106mod tests {
107    use super::*;
108    use crate::cache::Cache;
109    use tokio::time::sleep;
110
111    #[tokio::test]
112    async fn test_memory_cache_basic() {
113        let cache = MemoryCache::new(10);
114
115        // Test set and get
116        cache
117            .set("key1".to_string(), "value1".to_string(), None)
118            .await
119            .expect("set should succeed");
120        assert_eq!(cache.get("key1").await, Some("value1".to_string()));
121
122        // Test delete
123        cache.delete("key1").await.expect("delete should succeed");
124        assert_eq!(cache.get("key1").await, None);
125
126        // Test clear
127        cache
128            .set("key2".to_string(), "value2".to_string(), None)
129            .await
130            .expect("set should succeed");
131        cache.clear().await.expect("clear should succeed");
132        // Wait for async invalidation to complete
133        cache.cache.run_pending_tasks();
134        assert_eq!(cache.get("key2").await, None);
135    }
136
137    #[tokio::test]
138    async fn test_memory_cache_ttl() {
139        let cache = MemoryCache::new(10);
140
141        // Test cache with TTL
142        cache
143            .set(
144                "key1".to_string(),
145                "value1".to_string(),
146                Some(Duration::from_millis(100)),
147            )
148            .await
149            .expect("set should succeed");
150        assert_eq!(cache.get("key1").await, Some("value1".to_string()));
151
152        // Wait for expiration
153        sleep(Duration::from_millis(150)).await;
154        // Run pending tasks to ensure expiration is processed
155        cache.cache.run_pending_tasks();
156        assert_eq!(cache.get("key1").await, None);
157    }
158
159    #[tokio::test]
160    async fn test_memory_cache_eviction() {
161        // Test that cache respects max capacity
162        // Note: moka uses TinyLFU algorithm which may reject new entries
163        // based on frequency, so we test capacity constraint differently
164        let cache = MemoryCache::new(3);
165
166        // Fill cache with more entries than capacity
167        for i in 0..5 {
168            cache
169                .set(format!("key{i}"), format!("value{i}"), None)
170                .await
171                .expect("set should succeed");
172        }
173
174        // Run pending tasks to ensure eviction is processed
175        cache.cache.run_pending_tasks();
176
177        // Cache should not exceed max capacity significantly
178        let entry_count = cache.cache.entry_count();
179        assert!(
180            entry_count <= 5,
181            "Entry count should be at most 5, got {entry_count}"
182        );
183    }
184
185    #[tokio::test]
186    async fn test_memory_cache_exists() {
187        let cache = MemoryCache::new(10);
188
189        cache
190            .set("key1".to_string(), "value1".to_string(), None)
191            .await
192            .expect("set should succeed");
193        assert!(cache.exists("key1").await);
194        assert!(!cache.exists("key2").await);
195    }
196}