local_lru/lib.rs
1//! [`LocalCache`] is a thread safe and lock free implementation of LRU caching.
2//! Its speed and thread-safety is based on using thread-local storage rather than locking. This means that each thread has its own cache, and the cache is not shared between threads.
3//!
4//! # Example
5//!
6//! ```
7//! use local_lru::LocalCache;
8//! use bytes::Bytes;
9//! // Initialize the cache with a capacity of 2 items and a TTL of 60 seconds
10//! let cache = LocalCache::initialize(2, 60);
11//! cache.add_item("key1", Bytes::from("value1"));
12//! let item = cache.get_item("key1");
13//! ```
14//! One of the main challenges with LRU caching is that it invovles a lot of writings and updates of its internal data structures: each get and set operation in LRU cache requires updating of at least one pointer.
15//! This fact diminishes the famous O(1) complexity of LRU cache operations in multithreaded applications, such as web services, which require synchronization and locking mechanisms to ensure thread-safety.
16//!
17//! The thread-local strategy allows us to create a fast, thread-safe, and lock-free O(1) cache, while sacrificing memory.
18//! As such, the cache is suitable for applications that require a high-performance and thread-safe cache, but do not require a large memory footprint.
19//! To make a simple example estimation, a web service with 4 cores (4 threads) that caches 1,000,000 strings (each 128 bytes) will require about 1GB of memory. Caching 1M entries of 128 bytes each,
20//! will require about 250MB of memory. When using LocalCache with 4 cores, the memory footprint will be around 1GB.
21//!
22//
23//
24use bytes::Bytes;
25use serde::{de::DeserializeOwned, Serialize};
26use std::cell::RefCell;
27
28mod cache;
29use cache::LRUCache;
30
31thread_local! {
32 static CACHE: RefCell<Option<LRUCache>> = const { RefCell::new(None) };
33}
34
35#[derive(Clone)]
36pub struct LocalCache {
37 capacity: usize,
38 ttl: u64,
39}
40
41impl LocalCache {
42 /// Initilizalizes a new LocalCache with the given capacity and ttl.
43 /// Note that this only initializes the parameters that set the cache capacity and ttl.
44 /// The cache will be created with the given parameter only when a thread first accesses the cache with call to `get_item` or `add_item`.
45 /// Subsequent calls to `initialize` simply modify the cache parameters, which will only effect threads that did not previously access the cache.
46 ///
47 /// # Arguments
48 ///
49 /// * `capacity` - The maximum number of items the cache can hold before evicting the least recently used item.
50 /// * `ttl` - The time-to-live for each item in seconds. anything less than 1 means no expiration.
51 ///
52 /// # Example
53 ///
54 /// ```
55 /// use local_lru::LocalCache;
56 /// use bytes::Bytes;
57 /// let cache = LocalCache::initialize(2, 60);
58 /// cache.add_item("key1", Bytes::from("value1"));
59 /// assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
60 /// ```
61 pub fn initialize(capacity: usize, ttl: u64) -> Self {
62 LocalCache { capacity, ttl }
63 }
64
65 #[deprecated(since = "0.4.0", note = "Use initialize() instead")]
66 pub fn new(capacity: usize, ttl: u64) -> Self {
67 LocalCache { capacity, ttl }
68 }
69
70 fn initialize_cache_if_none(&self) {
71 CACHE.with(|cache| {
72 let mut cache = cache.borrow_mut();
73 if cache.is_none() {
74 *cache = Some(LRUCache::new(self.capacity, self.ttl));
75 }
76 });
77 }
78 /// Gets an item from the cache. In LRU cache fetching, the item is moved to the front of the list.
79 /// # Returns
80 ///
81 /// An Option containing the item if it exists, or None if it does not.
82 pub fn get_item(&self, key: &str) -> Option<Bytes> {
83 self.initialize_cache_if_none();
84 CACHE.with(|cache| {
85 if let Some(cache) = cache.borrow_mut().as_mut() {
86 cache.get_item(key)
87 } else {
88 None
89 }
90 })
91 }
92
93 /// Adds an item to the cache.
94 /// # Arguments
95 ///
96 /// * `key` - The key to add the item for.
97 /// * `value` - The value to add to the cache represented as `Bytes`.
98 ///
99 pub fn add_item(&self, key: &str, value: Bytes) {
100 self.initialize_cache_if_none();
101 CACHE.with(|cache| {
102 if let Some(cache) = cache.borrow_mut().as_mut() {
103 cache.add_item(key.to_string(), value)
104 }
105 })
106 }
107
108 /// Wrapper function to add a struct to the cache.
109 /// It simple uses bincode to serialize the struct and add it to the cache as a Bytes object.
110 /// # Arguments
111 ///
112 /// * `key` - The key to add the item for.
113 /// * `value` - Any struct that implements Serialize.
114 ///
115 pub fn add_struct<T: Serialize>(&self, key: &str, value: T) {
116 let bytes = bincode::serialize(&value).unwrap(); // TODO: handle error
117 self.add_item(key, Bytes::from(bytes));
118 }
119
120 /// Wrapper function to get a struct from the cache.
121 /// It uses bincode to deserialize the Bytes object back into a struct.
122 /// # Arguments
123 ///
124 /// * `key` - The key to get the item for.
125 ///
126 /// # Returns
127 ///
128 /// An Option containing the item if it exists, or None if it does not.
129 pub fn get_struct<T: DeserializeOwned>(&self, key: &str) -> Option<T> {
130 let bytes = self.get_item(key)?;
131 bincode::deserialize(&bytes).ok()
132 }
133}
134#[cfg(test)]
135mod tests {
136 use super::*;
137 use serde::Deserialize;
138 use serde::Serialize;
139 use std::thread;
140 use std::thread::sleep;
141 use std::time::Duration;
142
143 #[test]
144 fn test_capacity_based_eviction() {
145 let cache = LocalCache::initialize(3, 60);
146
147 cache.add_item("key1", Bytes::from("value1"));
148 cache.add_item("key2", Bytes::from("value2"));
149 cache.add_item("key3", Bytes::from("value3"));
150
151 assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
152 assert_eq!(cache.get_item("key2"), Some(Bytes::from("value2")));
153 assert_eq!(cache.get_item("key3"), Some(Bytes::from("value3")));
154
155 // Adding a fourth item should evict the least recently used item (key1)
156 cache.add_item("key4", Bytes::from("value4"));
157
158 assert_eq!(cache.get_item("key1"), None);
159 assert_eq!(cache.get_item("key2"), Some(Bytes::from("value2")));
160 assert_eq!(cache.get_item("key3"), Some(Bytes::from("value3")));
161 assert_eq!(cache.get_item("key4"), Some(Bytes::from("value4")));
162 }
163
164 #[test]
165 fn test_get_item_updates_order() {
166 let cache = LocalCache::initialize(3, 60);
167
168 cache.add_item("key1", Bytes::from("value1"));
169 cache.add_item("key2", Bytes::from("value2"));
170 cache.add_item("key3", Bytes::from("value3"));
171
172 // Access key1, making it the most recently used
173 cache.get_item("key1");
174
175 // Add a new item, which should evict the least recently used (now key2)
176 cache.add_item("key4", Bytes::from("value4"));
177
178 assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
179 assert_eq!(cache.get_item("key2"), None);
180 assert_eq!(cache.get_item("key3"), Some(Bytes::from("value3")));
181 assert_eq!(cache.get_item("key4"), Some(Bytes::from("value4")));
182 }
183
184 #[test]
185 fn test_ttl_expiration() {
186 let cache = LocalCache::initialize(3, 2); // TTL of 2 seconds
187
188 cache.add_item("key1", Bytes::from("value1"));
189
190 assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
191
192 // Wait for 3 seconds (longer than TTL)
193 sleep(Duration::from_secs(3));
194
195 // The item should now be expired
196 assert_eq!(cache.get_item("key1"), None);
197 }
198
199 #[test]
200 fn test_no_ttl_expiration() {
201 let cache = LocalCache::initialize(3, 0); // TTL of 0 seconds means no expiration
202
203 cache.add_item("key1", Bytes::from("value1"));
204
205 assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
206
207 // Wait for 3 seconds
208 sleep(Duration::from_secs(3));
209
210 // The item should still be present as there's no TTL
211 assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
212 }
213
214 #[test]
215 fn test_add_and_get_struct() {
216 #[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
217 struct TestStruct {
218 field1: String,
219 field2: i32,
220 }
221
222 let cache = LocalCache::initialize(3, 60);
223
224 let test_struct = TestStruct {
225 field1: "Hello".to_string(),
226 field2: 42,
227 };
228
229 // Add the struct to the cache
230 cache.add_struct("test_key", test_struct.clone());
231
232 // Retrieve the struct from the cache
233 let retrieved_struct: Option<TestStruct> = cache.get_struct("test_key");
234
235 // Assert that the retrieved struct matches the original
236 assert_eq!(retrieved_struct, Some(test_struct.clone()));
237
238 // Test with a non-existent key
239 let non_existent: Option<TestStruct> = cache.get_struct("non_existent_key");
240 assert_eq!(non_existent, None);
241 }
242
243 #[test]
244 fn test_thread_local_isolation() {
245 let cache = LocalCache::initialize(3, 60);
246 let cache_clone = cache.clone(); // Clone for the thread
247
248 // Add item in main thread
249 cache.add_item("main_key", Bytes::from("main_value"));
250
251 let thread_handle = thread::spawn(move || {
252 cache_clone.get_item("main_key"); // Use clone in thread
253 cache_clone.add_item("thread_key", Bytes::from("thread_value"));
254 });
255
256 thread_handle.join().unwrap();
257
258 assert_eq!(cache.get_item("main_key"), Some(Bytes::from("main_value")));
259 assert_eq!(cache.get_item("thread_key"), None);
260 }
261}