1extern crate alloc;
12
13use crate::config::SlruCacheConfig;
14use crate::list::{Entry, List};
15use crate::metrics::{CacheMetrics, SlruCacheMetrics};
16use alloc::boxed::Box;
17use alloc::collections::BTreeMap;
18use alloc::string::String;
19use core::borrow::Borrow;
20use core::hash::{BuildHasher, Hash};
21use core::mem;
22use core::num::NonZeroUsize;
23
24#[cfg(feature = "hashbrown")]
25use hashbrown::hash_map::DefaultHashBuilder;
26#[cfg(feature = "hashbrown")]
27use hashbrown::HashMap;
28
29#[cfg(not(feature = "hashbrown"))]
30use std::collections::hash_map::RandomState as DefaultHashBuilder;
31#[cfg(not(feature = "hashbrown"))]
32use std::collections::HashMap;
33
34#[derive(Debug, Clone, Copy, PartialEq, Eq)]
36enum Location {
37 Probationary,
39 Protected,
41}
42
43pub(crate) struct SlruSegment<K, V, S = DefaultHashBuilder> {
57 config: SlruCacheConfig,
59
60 probationary: List<(K, V)>,
62
63 protected: List<(K, V)>,
65
66 #[allow(clippy::type_complexity)]
68 map: HashMap<K, (*mut Entry<(K, V)>, Location), S>,
69
70 metrics: SlruCacheMetrics,
72}
73
74unsafe impl<K: Send, V: Send, S: Send> Send for SlruSegment<K, V, S> {}
78
79unsafe impl<K: Send, V: Send, S: Sync> Sync for SlruSegment<K, V, S> {}
81
82impl<K: Hash + Eq, V: Clone, S: BuildHasher> SlruSegment<K, V, S> {
83 pub(crate) fn with_hasher(
85 total_cap: NonZeroUsize,
86 protected_cap: NonZeroUsize,
87 hash_builder: S,
88 ) -> Self {
89 let config = SlruCacheConfig::new(total_cap, protected_cap);
90
91 let probationary_max_size =
92 NonZeroUsize::new(config.capacity().get() - config.protected_capacity().get()).unwrap();
93
94 let max_cache_size_bytes = config.capacity().get() as u64 * 128;
95
96 SlruSegment {
97 config,
98 probationary: List::new(probationary_max_size),
99 protected: List::new(config.protected_capacity()),
100 map: HashMap::with_capacity_and_hasher(
101 config.capacity().get().next_power_of_two(),
102 hash_builder,
103 ),
104 metrics: SlruCacheMetrics::new(
105 max_cache_size_bytes,
106 config.protected_capacity().get() as u64,
107 ),
108 }
109 }
110
111 #[inline]
113 pub(crate) fn cap(&self) -> NonZeroUsize {
114 self.config.capacity()
115 }
116
117 #[inline]
119 pub(crate) fn protected_max_size(&self) -> NonZeroUsize {
120 self.config.protected_capacity()
121 }
122
123 #[inline]
125 pub(crate) fn len(&self) -> usize {
126 self.map.len()
127 }
128
129 #[inline]
131 pub(crate) fn is_empty(&self) -> bool {
132 self.map.is_empty()
133 }
134
135 fn estimate_object_size(&self, _key: &K, _value: &V) -> u64 {
137 mem::size_of::<K>() as u64 + mem::size_of::<V>() as u64 + 64
138 }
139
140 #[inline]
142 pub(crate) fn metrics(&self) -> &SlruCacheMetrics {
143 &self.metrics
144 }
145
146 unsafe fn promote_to_protected(&mut self, node: *mut Entry<(K, V)>) -> *mut Entry<(K, V)> {
151 let boxed_entry = self
153 .probationary
154 .remove(node)
155 .expect("Node should exist in probationary");
156
157 if self.protected.len() >= self.protected_max_size().get() {
159 if self.probationary.len() >= self.probationary.cap().get() {
161 if let Some(old_entry) = self.probationary.remove_last() {
163 let old_ptr = Box::into_raw(old_entry);
164 let (old_key, _) = (*old_ptr).get_value();
165 self.map.remove(old_key);
166 let _ = Box::from_raw(old_ptr);
167 }
168 }
169 self.demote_lru_protected();
170 }
171
172 let entry_ptr = Box::into_raw(boxed_entry);
174
175 let (key_ref, _) = (*entry_ptr).get_value();
177
178 if let Some(map_entry) = self.map.get_mut(key_ref) {
180 map_entry.0 = entry_ptr;
181 map_entry.1 = Location::Protected;
182 }
183
184 unsafe {
186 self.protected.attach_from_other_list(entry_ptr);
187 }
188
189 entry_ptr
190 }
191
192 unsafe fn demote_lru_protected(&mut self) {
194 if let Some(lru_protected) = self.protected.remove_last() {
195 let lru_ptr = Box::into_raw(lru_protected);
196 let (lru_key, _) = (*lru_ptr).get_value();
197
198 if let Some(entry) = self.map.get_mut(lru_key) {
200 entry.0 = lru_ptr;
201 entry.1 = Location::Probationary;
202 }
203
204 self.probationary.attach_from_other_list(lru_ptr);
206
207 self.metrics.record_demotion();
209 }
210 }
211
212 pub(crate) fn get<Q>(&mut self, key: &Q) -> Option<&V>
214 where
215 K: Borrow<Q>,
216 Q: ?Sized + Hash + Eq,
217 {
218 let (node, location) = self.map.get(key).copied()?;
219
220 match location {
221 Location::Probationary => unsafe {
222 let (key_ref, value) = (*node).get_value();
224 let object_size = self.estimate_object_size(key_ref, value);
225 self.metrics.record_probationary_hit(object_size);
226
227 let entry_ptr = self.promote_to_protected(node);
229
230 self.metrics.record_promotion();
232
233 self.metrics.update_segment_sizes(
235 self.probationary.len() as u64,
236 self.protected.len() as u64,
237 );
238
239 let (_, v) = (*entry_ptr).get_value();
241 Some(v)
242 },
243 Location::Protected => unsafe {
244 let (key_ref, value) = (*node).get_value();
246 let object_size = self.estimate_object_size(key_ref, value);
247 self.metrics.record_protected_hit(object_size);
248
249 self.protected.move_to_front(node);
251 let (_, v) = (*node).get_value();
252 Some(v)
253 },
254 }
255 }
256
257 pub(crate) fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
259 where
260 K: Borrow<Q>,
261 Q: ?Sized + Hash + Eq,
262 {
263 let (node, location) = self.map.get(key).copied()?;
264
265 match location {
266 Location::Probationary => unsafe {
267 let (key_ref, value) = (*node).get_value();
269 let object_size = self.estimate_object_size(key_ref, value);
270 self.metrics.record_probationary_hit(object_size);
271
272 let entry_ptr = self.promote_to_protected(node);
274
275 self.metrics.record_promotion();
277
278 self.metrics.update_segment_sizes(
280 self.probationary.len() as u64,
281 self.protected.len() as u64,
282 );
283
284 let (_, v) = (*entry_ptr).get_value_mut();
286 Some(v)
287 },
288 Location::Protected => unsafe {
289 let (key_ref, value) = (*node).get_value();
291 let object_size = self.estimate_object_size(key_ref, value);
292 self.metrics.record_protected_hit(object_size);
293
294 self.protected.move_to_front(node);
296 let (_, v) = (*node).get_value_mut();
298 Some(v)
299 },
300 }
301 }
302
303 #[inline]
305 pub(crate) fn record_miss(&mut self, object_size: u64) {
306 self.metrics.core.record_miss(object_size);
307 }
308}
309
310impl<K: Hash + Eq + Clone, V, S: BuildHasher> SlruSegment<K, V, S> {
311 pub(crate) fn put(&mut self, key: K, value: V) -> Option<(K, V)>
313 where
314 V: Clone,
315 {
316 let object_size = self.estimate_object_size(&key, &value);
317
318 if let Some(&(node, location)) = self.map.get(&key) {
320 match location {
321 Location::Probationary => unsafe {
322 self.probationary.move_to_front(node);
324 let old_entry = self.probationary.update(node, (key.clone(), value), true);
325 self.metrics.core.record_insertion(object_size);
326 return old_entry.0;
327 },
328 Location::Protected => unsafe {
329 self.protected.move_to_front(node);
331 let old_entry = self.protected.update(node, (key.clone(), value), true);
332 self.metrics.core.record_insertion(object_size);
333 return old_entry.0;
334 },
335 }
336 }
337
338 let mut evicted = None;
339
340 if self.len() >= self.cap().get() {
342 if !self.probationary.is_empty() {
344 if let Some(old_entry) = self.probationary.remove_last() {
345 unsafe {
346 let entry_ptr = Box::into_raw(old_entry);
347 let (old_key, old_value) = (*entry_ptr).get_value().clone();
348 let evicted_size = self.estimate_object_size(&old_key, &old_value);
349 self.metrics.record_probationary_eviction(evicted_size);
350 self.map.remove(&old_key);
351 evicted = Some((old_key, old_value));
352 let _ = Box::from_raw(entry_ptr);
353 }
354 }
355 } else if !self.protected.is_empty() {
356 if let Some(old_entry) = self.protected.remove_last() {
358 unsafe {
359 let entry_ptr = Box::into_raw(old_entry);
360 let (old_key, old_value) = (*entry_ptr).get_value().clone();
361 let evicted_size = self.estimate_object_size(&old_key, &old_value);
362 self.metrics.record_protected_eviction(evicted_size);
363 self.map.remove(&old_key);
364 evicted = Some((old_key, old_value));
365 let _ = Box::from_raw(entry_ptr);
366 }
367 }
368 }
369 }
370
371 if self.len() < self.cap().get() {
373 let node = self.probationary.add_unchecked((key.clone(), value));
375 self.map.insert(key, (node, Location::Probationary));
376 } else {
377 if let Some(node) = self.probationary.add((key.clone(), value.clone())) {
379 self.map.insert(key, (node, Location::Probationary));
380 } else {
381 if let Some(old_entry) = self.probationary.remove_last() {
383 unsafe {
384 let entry_ptr = Box::into_raw(old_entry);
385 let (old_key, old_value) = (*entry_ptr).get_value().clone();
386 let evicted_size = self.estimate_object_size(&old_key, &old_value);
387 self.metrics.record_probationary_eviction(evicted_size);
388 self.map.remove(&old_key);
389 evicted = Some((old_key, old_value));
390 let _ = Box::from_raw(entry_ptr);
391 }
392 }
393
394 if let Some(node) = self.probationary.add((key.clone(), value)) {
396 self.map.insert(key, (node, Location::Probationary));
397 }
398 }
399 }
400
401 self.metrics.core.record_insertion(object_size);
403 self.metrics
404 .update_segment_sizes(self.probationary.len() as u64, self.protected.len() as u64);
405
406 evicted
407 }
408
409 pub(crate) fn remove<Q>(&mut self, key: &Q) -> Option<V>
411 where
412 K: Borrow<Q>,
413 Q: ?Sized + Hash + Eq,
414 V: Clone,
415 {
416 let (node, location) = self.map.remove(key)?;
417
418 match location {
419 Location::Probationary => unsafe {
420 let boxed_entry = self.probationary.remove(node)?;
422 let entry_ptr = Box::into_raw(boxed_entry);
423 let value = (*entry_ptr).get_value().1.clone();
424 let _ = Box::from_raw(entry_ptr);
425 Some(value)
426 },
427 Location::Protected => unsafe {
428 let boxed_entry = self.protected.remove(node)?;
430 let entry_ptr = Box::into_raw(boxed_entry);
431 let value = (*entry_ptr).get_value().1.clone();
432 let _ = Box::from_raw(entry_ptr);
433 Some(value)
434 },
435 }
436 }
437
438 pub(crate) fn clear(&mut self) {
440 self.map.clear();
441 self.probationary.clear();
442 self.protected.clear();
443 }
444}
445
446impl<K, V, S> core::fmt::Debug for SlruSegment<K, V, S> {
448 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
449 f.debug_struct("SlruSegment")
450 .field("capacity", &self.config.capacity())
451 .field("protected_capacity", &self.config.protected_capacity())
452 .field("len", &self.map.len())
453 .finish()
454 }
455}
456
457#[derive(Debug)]
495pub struct SlruCache<K, V, S = DefaultHashBuilder> {
496 segment: SlruSegment<K, V, S>,
497}
498
499impl<K: Hash + Eq, V: Clone, S: BuildHasher> SlruCache<K, V, S> {
500 pub fn with_hasher(
513 total_cap: NonZeroUsize,
514 protected_cap: NonZeroUsize,
515 hash_builder: S,
516 ) -> Self {
517 Self {
518 segment: SlruSegment::with_hasher(total_cap, protected_cap, hash_builder),
519 }
520 }
521
522 #[inline]
524 pub fn cap(&self) -> NonZeroUsize {
525 self.segment.cap()
526 }
527
528 #[inline]
530 pub fn protected_max_size(&self) -> NonZeroUsize {
531 self.segment.protected_max_size()
532 }
533
534 #[inline]
536 pub fn len(&self) -> usize {
537 self.segment.len()
538 }
539
540 #[inline]
542 pub fn is_empty(&self) -> bool {
543 self.segment.is_empty()
544 }
545
546 #[inline]
555 pub fn get<Q>(&mut self, key: &Q) -> Option<&V>
556 where
557 K: Borrow<Q>,
558 Q: ?Sized + Hash + Eq,
559 {
560 self.segment.get(key)
561 }
562
563 #[inline]
572 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
573 where
574 K: Borrow<Q>,
575 Q: ?Sized + Hash + Eq,
576 {
577 self.segment.get_mut(key)
578 }
579
580 #[inline]
582 pub fn record_miss(&mut self, object_size: u64) {
583 self.segment.record_miss(object_size);
584 }
585}
586
587impl<K: Hash + Eq + Clone, V, S: BuildHasher> SlruCache<K, V, S> {
588 #[inline]
598 pub fn put(&mut self, key: K, value: V) -> Option<(K, V)>
599 where
600 V: Clone,
601 {
602 self.segment.put(key, value)
603 }
604
605 #[inline]
611 pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
612 where
613 K: Borrow<Q>,
614 Q: ?Sized + Hash + Eq,
615 V: Clone,
616 {
617 self.segment.remove(key)
618 }
619
620 #[inline]
622 pub fn clear(&mut self) {
623 self.segment.clear()
624 }
625}
626
627impl<K: Hash + Eq, V> SlruCache<K, V>
628where
629 V: Clone,
630{
631 pub fn new(
639 capacity: NonZeroUsize,
640 protected_capacity: NonZeroUsize,
641 ) -> SlruCache<K, V, DefaultHashBuilder> {
642 let config = SlruCacheConfig::new(capacity, protected_capacity);
643 SlruCache::with_hasher(
644 config.capacity(),
645 config.protected_capacity(),
646 DefaultHashBuilder::default(),
647 )
648 }
649}
650
651impl<K: Hash + Eq, V: Clone, S: BuildHasher> CacheMetrics for SlruCache<K, V, S> {
652 fn metrics(&self) -> BTreeMap<String, f64> {
653 self.segment.metrics().metrics()
654 }
655
656 fn algorithm_name(&self) -> &'static str {
657 self.segment.metrics().algorithm_name()
658 }
659}
660
661#[cfg(test)]
662mod tests {
663 extern crate std;
664 use std::string::ToString;
665
666 use super::*;
667 use alloc::string::String;
668
669 #[test]
670 fn test_slru_basic() {
671 let mut cache =
674 SlruCache::new(NonZeroUsize::new(4).unwrap(), NonZeroUsize::new(2).unwrap());
675
676 assert_eq!(cache.put("a", 1), None);
678 assert_eq!(cache.put("b", 2), None);
679 assert_eq!(cache.put("c", 3), None);
680 assert_eq!(cache.put("d", 4), None);
681
682 assert_eq!(cache.len(), 4);
684
685 assert_eq!(cache.get(&"a"), Some(&1));
687 assert_eq!(cache.get(&"b"), Some(&2));
688
689 let evicted = cache.put("e", 5);
691 assert!(evicted.is_some());
692 let (evicted_key, evicted_val) = evicted.unwrap();
693 assert_eq!(evicted_key, "c");
694 assert_eq!(evicted_val, 3);
695
696 let evicted = cache.put("f", 6);
698 assert!(evicted.is_some());
699 let (evicted_key, evicted_val) = evicted.unwrap();
700 assert_eq!(evicted_key, "d");
701 assert_eq!(evicted_val, 4);
702
703 assert_eq!(cache.get(&"a"), Some(&1)); assert_eq!(cache.get(&"b"), Some(&2)); assert_eq!(cache.get(&"c"), None); assert_eq!(cache.get(&"d"), None); assert_eq!(cache.get(&"e"), Some(&5)); assert_eq!(cache.get(&"f"), Some(&6)); }
711
712 #[test]
713 fn test_slru_update() {
714 let mut cache =
716 SlruCache::new(NonZeroUsize::new(4).unwrap(), NonZeroUsize::new(2).unwrap());
717
718 cache.put("a", 1);
720 cache.put("b", 2);
721
722 assert_eq!(cache.get(&"a"), Some(&1));
724
725 assert_eq!(cache.put("a", 10).unwrap().1, 1);
727 assert_eq!(cache.put("b", 20).unwrap().1, 2);
728
729 assert_eq!(cache.get(&"a"), Some(&10));
731 assert_eq!(cache.get(&"b"), Some(&20));
732 }
733
734 #[test]
735 fn test_slru_remove() {
736 let mut cache =
738 SlruCache::new(NonZeroUsize::new(4).unwrap(), NonZeroUsize::new(2).unwrap());
739
740 cache.put("a", 1);
742 cache.put("b", 2);
743
744 assert_eq!(cache.get(&"a"), Some(&1));
746
747 assert_eq!(cache.remove(&"a"), Some(1)); assert_eq!(cache.remove(&"b"), Some(2)); assert_eq!(cache.get(&"a"), None);
753 assert_eq!(cache.get(&"b"), None);
754
755 assert_eq!(cache.remove(&"c"), None);
757 }
758
759 #[test]
760 fn test_slru_clear() {
761 let mut cache =
763 SlruCache::new(NonZeroUsize::new(4).unwrap(), NonZeroUsize::new(2).unwrap());
764
765 cache.put("a", 1);
767 cache.put("b", 2);
768 cache.put("c", 3);
769 cache.put("d", 4);
770
771 cache.clear();
773
774 assert_eq!(cache.len(), 0);
776 assert!(cache.is_empty());
777
778 assert_eq!(cache.get(&"a"), None);
780 assert_eq!(cache.get(&"b"), None);
781 assert_eq!(cache.get(&"c"), None);
782 assert_eq!(cache.get(&"d"), None);
783 }
784
785 #[test]
786 fn test_slru_complex_values() {
787 let mut cache =
789 SlruCache::new(NonZeroUsize::new(4).unwrap(), NonZeroUsize::new(2).unwrap());
790
791 #[derive(Debug, Clone, PartialEq)]
792 struct ComplexValue {
793 id: usize,
794 data: String,
795 }
796
797 cache.put(
799 "a",
800 ComplexValue {
801 id: 1,
802 data: "a-data".to_string(),
803 },
804 );
805 cache.put(
806 "b",
807 ComplexValue {
808 id: 2,
809 data: "b-data".to_string(),
810 },
811 );
812
813 if let Some(value) = cache.get_mut(&"a") {
815 value.id = 100;
816 value.data = "a-modified".to_string();
817 }
818
819 let a = cache.get(&"a").unwrap();
821 assert_eq!(a.id, 100);
822 assert_eq!(a.data, "a-modified");
823 }
824
825 #[test]
826 fn test_slru_with_ratio() {
827 let mut cache =
829 SlruCache::new(NonZeroUsize::new(4).unwrap(), NonZeroUsize::new(2).unwrap());
830
831 assert_eq!(cache.cap().get(), 4);
832 assert_eq!(cache.protected_max_size().get(), 2);
833
834 assert_eq!(cache.put("a", 1), None);
836 assert_eq!(cache.put("b", 2), None);
837
838 assert_eq!(cache.get(&"a"), Some(&1));
840
841 assert_eq!(cache.put("c", 3), None);
843 assert_eq!(cache.put("d", 4), None);
844
845 let result = cache.put("e", 5);
847 assert_eq!(result.unwrap().0, "b");
848
849 assert_eq!(cache.get(&"a"), Some(&1));
851 }
852
853 #[test]
855 fn test_slru_segment_directly() {
856 let mut segment: SlruSegment<&str, i32, DefaultHashBuilder> = SlruSegment::with_hasher(
857 NonZeroUsize::new(4).unwrap(),
858 NonZeroUsize::new(2).unwrap(),
859 DefaultHashBuilder::default(),
860 );
861
862 assert_eq!(segment.len(), 0);
863 assert!(segment.is_empty());
864 assert_eq!(segment.cap().get(), 4);
865 assert_eq!(segment.protected_max_size().get(), 2);
866
867 segment.put("a", 1);
868 segment.put("b", 2);
869 assert_eq!(segment.len(), 2);
870
871 assert_eq!(segment.get(&"a"), Some(&1));
873 assert_eq!(segment.get(&"b"), Some(&2));
874 }
875
876 #[test]
877 fn test_slru_concurrent_access() {
878 extern crate std;
879 use std::sync::{Arc, Mutex};
880 use std::thread;
881 use std::vec::Vec;
882
883 let cache = Arc::new(Mutex::new(SlruCache::new(
884 NonZeroUsize::new(100).unwrap(),
885 NonZeroUsize::new(50).unwrap(),
886 )));
887 let num_threads = 4;
888 let ops_per_thread = 100;
889
890 let mut handles: Vec<std::thread::JoinHandle<()>> = Vec::new();
891
892 for t in 0..num_threads {
893 let cache = Arc::clone(&cache);
894 handles.push(thread::spawn(move || {
895 for i in 0..ops_per_thread {
896 let key = std::format!("key_{}_{}", t, i);
897 let mut guard = cache.lock().unwrap();
898 guard.put(key.clone(), i);
899 let _ = guard.get(&key);
900 }
901 }));
902 }
903
904 for handle in handles {
905 handle.join().unwrap();
906 }
907
908 let mut guard = cache.lock().unwrap();
909 assert!(guard.len() <= 100);
910 guard.clear(); }
912}