local_lru/lib.rs
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228
//! [`LocalCache`] is a thread safe and lock free implementation of LRU caching.
//! Its speed and thread-safety is based on using thread-local storage rather than locking. This means that each thread has its own cache, and the cache is not shared between threads.
//!
//! # Example
//!
//! ```
//! use local_lru::LocalCache;
//! use bytes::Bytes;
//! // Create a new cache with a capacity of 2 items and a TTL of 60 seconds
//! let cache = LocalCache::new(2, 60);
//! cache.add_item("key1", Bytes::from("value1"));
//! let item = cache.get_item("key1");
//! ```
//! One of the main challenges with LRU caching is that it invovles a lot of writings and updates of its internal data structures: each get and set operation in LRU cache requires updating of at least one pointer.
//! This fact diminishes the famous O(1) complexity of LRU cache operations in multithreaded applications, such as web services, which require synchronization and locking mechanisms to ensure thread-safety.
//!
//! The thread-local strategy allows us to create a fast, thread-safe, and lock-free O(1) cache, while sacrificing memory.
//! As such, the cache is suitable for applications that require a high-performance and thread-safe cache, but do not require a large memory footprint.
//! To make a simple example estimation, a web service with 4 cores (4 threads) that caches 1,000,000 strings (each 128 bytes) will require about 1GB of memory. Caching 1M entries of 128 bytes each,
//! will require about 250MB of memory. When using LocalCache with 4 cores, the memory footprint will be around 1GB.
//!
//
//
use bytes::Bytes;
use serde::{de::DeserializeOwned, Serialize};
use std::cell::RefCell;
mod cache;
use cache::LRUCache;
thread_local! {
static CACHE: RefCell<Option<LRUCache>> = RefCell::new(None);
}
pub struct LocalCache {
capacity: usize,
ttl: u64,
}
impl LocalCache {
/// Creates a new LocalCache with the given capacity and ttl.
///
/// # Arguments
///
/// * `capacity` - The maximum number of items the cache can hold before evicting the least recently used item.
/// * `ttl` - The time-to-live for each item in seconds. anything less than 1 means no expiration.
///
/// # Example
///
/// ```
/// use local_lru::LocalCache;
/// use bytes::Bytes;
/// let cache = LocalCache::new(2, 60);
/// cache.add_item("key1", Bytes::from("value1"));
/// assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
/// ```
pub fn new(capacity: usize, ttl: u64) -> Self {
LocalCache { capacity, ttl }
}
fn initialize_cache_if_none(&self) {
CACHE.with(|cache| {
let mut cache = cache.borrow_mut();
if cache.is_none() {
*cache = Some(LRUCache::new(self.capacity, self.ttl));
}
});
}
/// Gets an item from the cache. In LRU cache fetching, the item is moved to the front of the list.
/// # Returns
///
/// An Option containing the item if it exists, or None if it does not.
pub fn get_item(&self, key: &str) -> Option<Bytes> {
self.initialize_cache_if_none();
CACHE.with(|cache| cache.borrow_mut().as_mut().unwrap().get_item(key))
}
/// Adds an item to the cache.
/// # Arguments
///
/// * `key` - The key to add the item for.
/// * `value` - The value to add to the cache represented as `Bytes`.
///
pub fn add_item(&self, key: &str, value: Bytes) {
self.initialize_cache_if_none();
CACHE.with(|cache| {
cache
.borrow_mut()
.as_mut()
.unwrap()
.add_item(key.to_string(), value)
})
}
/// Wrapper function to add a struct to the cache.
/// It simple uses bincode to serialize the struct and add it to the cache as a Bytes object.
/// # Arguments
///
/// * `key` - The key to add the item for.
/// * `value` - Any struct that implements Serialize.
///
pub fn add_struct<T: Serialize>(&self, key: &str, value: T) {
let bytes = bincode::serialize(&value).unwrap(); // TODO: handle error
self.add_item(key, Bytes::from(bytes));
}
/// Wrapper function to get a struct from the cache.
/// It uses bincode to deserialize the Bytes object back into a struct.
/// # Arguments
///
/// * `key` - The key to get the item for.
///
/// # Returns
///
/// An Option containing the item if it exists, or None if it does not.
pub fn get_struct<T: DeserializeOwned>(&self, key: &str) -> Option<T> {
let bytes = self.get_item(key)?;
bincode::deserialize(&bytes).ok()
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Deserialize;
use serde::Serialize;
use std::thread::sleep;
use std::time::Duration;
#[test]
fn test_capacity_based_eviction() {
let cache = LocalCache::new(3, 60);
cache.add_item("key1", Bytes::from("value1"));
cache.add_item("key2", Bytes::from("value2"));
cache.add_item("key3", Bytes::from("value3"));
assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
assert_eq!(cache.get_item("key2"), Some(Bytes::from("value2")));
assert_eq!(cache.get_item("key3"), Some(Bytes::from("value3")));
// Adding a fourth item should evict the least recently used item (key1)
cache.add_item("key4", Bytes::from("value4"));
assert_eq!(cache.get_item("key1"), None);
assert_eq!(cache.get_item("key2"), Some(Bytes::from("value2")));
assert_eq!(cache.get_item("key3"), Some(Bytes::from("value3")));
assert_eq!(cache.get_item("key4"), Some(Bytes::from("value4")));
}
#[test]
fn test_get_item_updates_order() {
let cache = LocalCache::new(3, 60);
cache.add_item("key1", Bytes::from("value1"));
cache.add_item("key2", Bytes::from("value2"));
cache.add_item("key3", Bytes::from("value3"));
// Access key1, making it the most recently used
cache.get_item("key1");
// Add a new item, which should evict the least recently used (now key2)
cache.add_item("key4", Bytes::from("value4"));
assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
assert_eq!(cache.get_item("key2"), None);
assert_eq!(cache.get_item("key3"), Some(Bytes::from("value3")));
assert_eq!(cache.get_item("key4"), Some(Bytes::from("value4")));
}
#[test]
fn test_ttl_expiration() {
let cache = LocalCache::new(3, 2); // TTL of 2 seconds
cache.add_item("key1", Bytes::from("value1"));
assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
// Wait for 3 seconds (longer than TTL)
sleep(Duration::from_secs(3));
// The item should now be expired
assert_eq!(cache.get_item("key1"), None);
}
#[test]
fn test_no_ttl_expiration() {
let cache = LocalCache::new(3, 0); // TTL of 0 seconds means no expiration
cache.add_item("key1", Bytes::from("value1"));
assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
// Wait for 3 seconds
sleep(Duration::from_secs(3));
// The item should still be present as there's no TTL
assert_eq!(cache.get_item("key1"), Some(Bytes::from("value1")));
}
#[test]
fn test_add_and_get_struct() {
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
struct TestStruct {
field1: String,
field2: i32,
}
let cache = LocalCache::new(3, 60);
let test_struct = TestStruct {
field1: "Hello".to_string(),
field2: 42,
};
// Add the struct to the cache
cache.add_struct("test_key", test_struct.clone());
// Retrieve the struct from the cache
let retrieved_struct: Option<TestStruct> = cache.get_struct("test_key");
// Assert that the retrieved struct matches the original
assert_eq!(retrieved_struct, Some(test_struct.clone()));
// Test with a non-existent key
let non_existent: Option<TestStruct> = cache.get_struct("non_existent_key");
assert_eq!(non_existent, None);
}
}