scirs2_core/cache.rs
1//! Cache and memoization utilities for improved performance
2//!
3//! This module provides caching and memoization utilities to improve performance
4//! by avoiding redundant computations.
5
6use cached::{proc_macro::cached, Cached, SizedCache};
7use std::hash::Hash;
8use std::time::{Duration, Instant};
9
10/// Cache configuration for the library
11#[derive(Debug, Clone, Copy)]
12pub struct CacheConfig {
13 /// Default cache size (number of items)
14 pub default_size: usize,
15 /// Default time-to-live for cached items (seconds)
16 pub default_ttl: u64,
17 /// Whether to enable caching by default
18 pub enable_caching: bool,
19}
20
21impl Default for CacheConfig {
22 fn default() -> Self {
23 Self {
24 default_size: 1024,
25 default_ttl: 3600, // 1 hour
26 enable_caching: true,
27 }
28 }
29}
30
31/// A sized cache with time-to-live (TTL) functionality
32pub struct TTLSizedCache<K, V> {
33 /// Internal cache
34 cache: SizedCache<K, (V, Instant)>,
35 /// Time-to-live for cache entries
36 ttl: Duration,
37}
38
39impl<K, V> TTLSizedCache<K, V>
40where
41 K: Hash + Eq + Clone,
42 V: Clone,
43{
44 /// Create a new TTL cache with specified size and TTL
45 #[must_use]
46 pub fn new(size: usize, ttlseconds: u64) -> Self {
47 Self {
48 cache: SizedCache::with_size(size),
49 ttl: Duration::from_secs(ttlseconds),
50 }
51 }
52
53 /// Insert a key-value pair into the cache
54 pub fn insert(&mut self, key: K, value: V) {
55 let now = Instant::now();
56 self.cache.cache_set(key, (value, now));
57 }
58
59 /// Get a value from the cache if it exists and hasn't expired
60 #[must_use]
61 pub fn get(&mut self, key: &K) -> Option<V> {
62 match self.cache.cache_get(key) {
63 Some((value, timestamp)) if timestamp.elapsed() < self.ttl => Some(value.clone()),
64 Some(_) => {
65 // Value has expired, remove it from cache
66 self.cache.cache_remove(key);
67 None
68 }
69 None => None,
70 }
71 }
72
73 /// Remove a key-value pair from the cache
74 pub fn remove(&mut self, key: &K) {
75 self.cache.cache_remove(key);
76 }
77
78 /// Clear the cache
79 pub fn clear(&mut self) {
80 self.cache.cache_clear();
81 }
82
83 /// Get the number of items in the cache
84 #[must_use]
85 pub fn len(&self) -> usize {
86 (self.cache.cache_misses().unwrap_or(0) + self.cache.cache_hits().unwrap_or(0)) as usize
87 }
88
89 /// Check if the cache is empty
90 #[must_use]
91 pub fn is_empty(&self) -> bool {
92 self.len() == 0
93 }
94}
95
96/// A thread-safe cache builder
97///
98/// This struct provides a fluent interface for configuring and building
99/// various types of caches.
100pub struct CacheBuilder {
101 /// Cache size
102 size: Option<usize>,
103 /// Time-to-live in seconds
104 ttl: Option<u64>,
105 /// Whether to make the cache thread-safe
106 thread_safe: bool,
107}
108
109impl Default for CacheBuilder {
110 fn default() -> Self {
111 Self::new()
112 }
113}
114
115impl CacheBuilder {
116 /// Create a new cache builder
117 #[must_use]
118 pub fn new() -> Self {
119 Self {
120 size: None,
121 ttl: None,
122 thread_safe: false,
123 }
124 }
125
126 /// Set the cache size
127 #[must_use]
128 pub fn with_size(mut self, size: usize) -> Self {
129 self.size = Some(size);
130 self
131 }
132
133 /// Set the time-to-live in seconds
134 #[must_use]
135 pub fn with_ttl(mut self, ttl: u64) -> Self {
136 self.ttl = Some(ttl);
137 self
138 }
139
140 /// Make the cache thread-safe
141 #[must_use]
142 pub fn thread_safe(mut self) -> Self {
143 self.thread_safe = true;
144 self
145 }
146
147 /// Build a sized cache
148 #[must_use]
149 pub fn build_sized_cache<K, V>(self) -> TTLSizedCache<K, V>
150 where
151 K: Hash + Eq + Clone,
152 V: Clone,
153 {
154 let config = CacheConfig::default();
155 let size = self.size.unwrap_or(config.default_size);
156 let ttl = self.ttl.unwrap_or(config.default_ttl);
157
158 TTLSizedCache::new(size, ttl)
159 }
160}
161
162/// Example of how to use the cached attribute
163///
164/// ```ignore
165/// // Example disabled due to missing cached dependency
166/// use cached::proc_macro::cached;
167///
168/// #[cached(size = 100)]
169/// pub fn expensive_calculation(x: u64) -> u64 {
170/// // Expensive computation here
171/// x * x
172/// }
173/// ```
174/// Example of using TTL cache for memoization
175///
176/// This example shows how to use the TTL cache for memoizing expensive operations.
177///
178/// ```rust
179/// use scirs2_core::cache::TTLSizedCache;
180/// use std::time::Duration;
181///
182/// // Create a TTL cache
183/// let mut cache = TTLSizedCache::<String, String>::new(100, 60);
184///
185/// // Cache a value
186/// let key = "example_key";
187/// let value = "example_value";
188/// cache.insert(key.to_string(), value.to_string());
189///
190/// // Retrieve a value
191/// let retrieved_value = cache.get(&key.to_string());
192/// ```
193/// Compute Fibonacci numbers with memoization
194///
195/// This function demonstrates the use of memoization for computing
196/// Fibonacci numbers efficiently.
197///
198/// # Example
199///
200/// ```ignore
201/// use cached::proc_macro::cached;
202///
203/// #[cached(size = 100)]
204/// pub fn fibonacci_prime_cache(n: u64) -> u64 {
205/// match n {
206/// 0 => 0,
207/// 1 => 1,
208/// n => fibonacci_prime_cache(n - 1) + fibonacci_prime_cache(n - 2),
209/// }
210/// }
211/// ```
212///
213/// # Arguments
214///
215/// * `n` - The Fibonacci number to compute
216///
217/// # Returns
218///
219/// * The nth Fibonacci number
220#[cached]
221#[must_use]
222#[allow(dead_code)]
223pub fn fibonacci(n: u64) -> u64 {
224 match n {
225 0 => 0,
226 1 => 1,
227 n => fibonacci(n - 1) + fibonacci(n - 2),
228 }
229}
230
231#[cfg(test)]
232mod tests {
233 use super::*;
234 use std::thread;
235
236 #[test]
237 fn test_ttl_sized_cache() {
238 let mut cache = TTLSizedCache::<i32, &str>::new(5, 1);
239
240 // Test insertion and retrieval
241 cache.insert(1, "one");
242 cache.insert(2, "two");
243
244 assert_eq!(cache.get(&1), Some("one"));
245 assert_eq!(cache.get(&2), Some("two"));
246 assert_eq!(cache.get(&3), None);
247
248 // Test TTL expiration
249 thread::sleep(Duration::from_secs(2));
250
251 assert_eq!(cache.get(&1), None);
252 assert_eq!(cache.get(&2), None);
253
254 // Test size limit
255 for i in 0..10 {
256 cache.insert(i, "test");
257 }
258
259 // Only the last 5 should be in the cache due to size limit
260 for i in 0..5 {
261 assert_eq!(cache.get(&i), None);
262 }
263
264 for i in 5..10 {
265 assert_eq!(cache.get(&i), Some("test"));
266 }
267 }
268
269 #[test]
270 fn test_cache_builder() {
271 let cache = CacheBuilder::new()
272 .with_size(10)
273 .with_ttl(60)
274 .build_sized_cache::<String, i32>();
275
276 assert!(cache.is_empty());
277 }
278
279 #[test]
280 fn test_fibonacci() {
281 // Compute fibonacci numbers with memoization
282 let fib10 = fibonacci(10);
283 let fib20 = fibonacci(20);
284
285 assert_eq!(fib10, 55);
286 assert_eq!(fib20, 6765);
287
288 // The second call should be much faster due to memoization
289 let start = Instant::now();
290 let fib20_again = fibonacci(20);
291 let _elapsed = start.elapsed();
292
293 assert_eq!(fib20_again, 6765);
294
295 // The second call should be very fast (less than 1ms)
296 assert!(_elapsed.as_millis() < 10);
297 }
298}