1#![no_std]
2use {
3 alloc::vec::Vec,
4 core::{
5 borrow::Borrow,
6 cmp::Reverse,
7 hash::{BuildHasher, Hash},
8 iter::FusedIterator,
9 sync::atomic::{AtomicU64, Ordering},
10 },
11 hashbrown::{DefaultHashBuilder, HashMap},
12};
13
14extern crate alloc;
15
16#[derive(Debug)]
23pub struct LruCache<K, V, S = DefaultHashBuilder> {
24 cache: HashMap<K, (AtomicU64, V), S>,
25 counter: AtomicU64,
26 capacity: usize,
27}
28
29#[derive(Clone)]
34pub struct Iter<'a, K: 'a, V: 'a>(hashbrown::hash_map::Iter<'a, K, (AtomicU64, V)>);
35
36pub struct IterMut<'a, K: 'a, V: 'a>(hashbrown::hash_map::IterMut<'a, K, (AtomicU64, V)>);
41
42pub struct IntoIter<K, V>(hashbrown::hash_map::IntoIter<K, (AtomicU64, V)>);
48
49impl<K, V> LruCache<K, V, DefaultHashBuilder> {
50 #[inline]
51 pub fn new(capacity: usize) -> Self {
52 Self {
53 cache: HashMap::with_capacity(capacity.saturating_mul(2)),
54 counter: AtomicU64::default(),
55 capacity,
56 }
57 }
58}
59
60impl<K, V, S> LruCache<K, V, S> {
61 #[inline]
62 pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self {
63 Self {
64 cache: HashMap::with_capacity_and_hasher(capacity.saturating_mul(2), hasher),
65 counter: AtomicU64::default(),
66 capacity,
67 }
68 }
69
70 #[inline]
71 pub fn hasher(&self) -> &S {
72 self.cache.hasher()
73 }
74}
75
76impl<K, V, S> LruCache<K, V, S> {
77 #[inline]
80 pub fn iter(&self) -> Iter<'_, K, V> {
81 Iter(self.cache.iter())
82 }
83
84 #[inline]
88 pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
89 IterMut(self.cache.iter_mut())
90 }
91}
92
93impl<K: Eq + Hash + PartialEq, V, S: BuildHasher> LruCache<K, V, S> {
94 #[inline]
100 pub fn put(&mut self, key: K, value: V) -> Option<V> {
101 let ordinal = self.counter.fetch_add(1, Ordering::Relaxed);
102 let old = self
103 .cache
104 .insert(key, (AtomicU64::new(ordinal), value))
105 .map(|(_, value)| value);
106 self.maybe_evict();
107 old
108 }
109
110 fn maybe_evict(&mut self) {
113 if self.cache.len() < self.capacity.saturating_mul(2) {
114 return;
115 }
116 let mut entries: Vec<(K, (u64, V))> = self
117 .cache
118 .drain()
119 .map(|(key, (ordinal, value))| (key, (ordinal.into_inner(), value)))
120 .collect();
121 entries
122 .select_nth_unstable_by_key(self.capacity.saturating_sub(1), |&(_, (ordinal, _))| {
123 Reverse(ordinal)
124 });
125 self.cache.extend(
126 entries
127 .into_iter()
128 .take(self.capacity)
129 .map(|(key, (ordinal, value))| (key, (AtomicU64::new(ordinal), value))),
130 );
131 }
132
133 #[inline]
140 pub fn contains_key<Q>(&self, key: &Q) -> bool
141 where
142 K: Borrow<Q>,
143 Q: Hash + Eq + ?Sized,
144 {
145 self.cache.contains_key(key)
146 }
147
148 #[inline]
154 pub fn get<Q>(&self, key: &Q) -> Option<&V>
155 where
156 K: Borrow<Q>,
157 Q: Hash + Eq + ?Sized,
158 {
159 let (ordinal, value) = self.cache.get(key)?;
160 ordinal.fetch_max(
163 self.counter.fetch_add(1, Ordering::Relaxed),
164 Ordering::Relaxed,
165 );
166 Some(value)
167 }
168
169 #[inline]
175 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
176 where
177 K: Borrow<Q>,
178 Q: Hash + Eq + ?Sized,
179 {
180 let (ordinal, value) = self.cache.get_mut(key)?;
181 ordinal.store(
184 self.counter.fetch_add(1, Ordering::Relaxed),
185 Ordering::Relaxed,
186 );
187 Some(value)
188 }
189
190 #[inline]
196 pub fn get_key_value<Q>(&self, key: &Q) -> Option<(&K, &V)>
197 where
198 K: Borrow<Q>,
199 Q: Hash + Eq + ?Sized,
200 {
201 let (key, (ordinal, value)) = self.cache.get_key_value(key)?;
202 ordinal.fetch_max(
205 self.counter.fetch_add(1, Ordering::Relaxed),
206 Ordering::Relaxed,
207 );
208 Some((key, value))
209 }
210
211 #[inline]
217 pub fn peek<Q>(&self, key: &Q) -> Option<&V>
218 where
219 K: Borrow<Q>,
220 Q: Hash + Eq + ?Sized,
221 {
222 self.cache.get(key).map(|(_, value)| value)
223 }
224
225 #[inline]
234 pub fn peek_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
235 where
236 K: Borrow<Q>,
237 Q: Hash + Eq + ?Sized,
238 {
239 self.cache.get_mut(key).map(|(_, value)| value)
240 }
241
242 #[inline]
251 pub fn peek_key_value<Q>(&self, key: &Q) -> Option<(&K, &V)>
252 where
253 K: Borrow<Q>,
254 Q: Hash + Eq + ?Sized,
255 {
256 self.cache
257 .get_key_value(key)
258 .map(|(key, (_, value))| (key, value))
259 }
260
261 #[inline]
267 pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
268 where
269 K: Borrow<Q>,
270 Q: Hash + Eq + ?Sized,
271 {
272 self.cache.remove(key).map(|(_, value)| value)
273 }
274
275 #[inline]
281 pub fn remove_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
282 where
283 K: Borrow<Q>,
284 Q: Hash + Eq + ?Sized,
285 {
286 self.cache
287 .remove_entry(key)
288 .map(|(key, (_, value))| (key, value))
289 }
290
291 #[inline]
295 pub fn pop<Q>(&mut self, key: &Q) -> Option<V>
296 where
297 K: Borrow<Q>,
298 Q: Hash + Eq + ?Sized,
299 {
300 self.remove(key)
301 }
302
303 #[inline]
307 pub fn pop_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
308 where
309 K: Borrow<Q>,
310 Q: Hash + Eq + ?Sized,
311 {
312 self.remove_entry(key)
313 }
314}
315
316impl<K, V, S> LruCache<K, V, S> {
317 #[inline]
319 pub fn len(&self) -> usize {
320 self.cache.len()
321 }
322
323 #[inline]
325 pub fn is_empty(&self) -> bool {
326 self.cache.is_empty()
327 }
328
329 #[inline]
331 pub fn clear(&mut self) {
332 self.cache.clear();
333 }
334
335 #[inline]
340 pub fn retain<F>(&mut self, mut f: F)
341 where
342 F: FnMut(&K, &mut V) -> bool,
343 {
344 self.cache.retain(|key, (_, value)| f(key, value));
345 }
346}
347
348impl<K: Clone + Eq + Hash + PartialEq, V: Clone, S: Clone + BuildHasher> LruCache<K, V, S> {
349 #[inline]
354 pub fn clone(&mut self) -> Self {
355 let mut cache =
356 HashMap::with_capacity_and_hasher(self.cache.capacity(), self.cache.hasher().clone());
357 cache.extend(self.cache.iter().map(|(key, (ordinal, value))| {
358 let ordinal = AtomicU64::new(ordinal.load(Ordering::Relaxed));
359 (key.clone(), (ordinal, value.clone()))
360 }));
361 Self {
362 cache,
363 counter: AtomicU64::new(self.counter.load(Ordering::Relaxed)),
364 ..*self
365 }
366 }
367}
368
369impl<'a, K, V, S> IntoIterator for &'a LruCache<K, V, S> {
370 type Item = (&'a K, &'a V);
371 type IntoIter = Iter<'a, K, V>;
372
373 #[inline]
374 fn into_iter(self) -> Iter<'a, K, V> {
375 self.iter()
376 }
377}
378
379impl<'a, K, V, S> IntoIterator for &'a mut LruCache<K, V, S> {
380 type Item = (&'a K, &'a mut V);
381 type IntoIter = IterMut<'a, K, V>;
382
383 #[inline]
384 fn into_iter(self) -> IterMut<'a, K, V> {
385 self.iter_mut()
386 }
387}
388
389impl<K, V, S> IntoIterator for LruCache<K, V, S> {
390 type Item = (K, V);
391 type IntoIter = IntoIter<K, V>;
392
393 #[inline]
394 fn into_iter(self) -> IntoIter<K, V> {
395 IntoIter(self.cache.into_iter())
396 }
397}
398
399impl<'a, K, V> Iterator for Iter<'a, K, V> {
400 type Item = (&'a K, &'a V);
401
402 #[inline]
403 fn next(&mut self) -> Option<(&'a K, &'a V)> {
404 self.0.next().map(|(key, (_, value))| (key, value))
405 }
406
407 #[inline]
408 fn size_hint(&self) -> (usize, Option<usize>) {
409 self.0.size_hint()
410 }
411
412 #[inline]
413 fn fold<B, F>(self, init: B, mut f: F) -> B
414 where
415 Self: Sized,
416 F: FnMut(B, Self::Item) -> B,
417 {
418 self.0.fold(init, |acc, entry| {
419 let (key, (_, value)) = entry;
420 f(acc, (key, value))
421 })
422 }
423}
424
425impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
426 #[inline]
427 fn len(&self) -> usize {
428 self.0.len()
429 }
430}
431
432impl<K, V> FusedIterator for Iter<'_, K, V> {}
433
434impl<'a, K, V> Iterator for IterMut<'a, K, V> {
435 type Item = (&'a K, &'a mut V);
436
437 #[inline]
438 fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
439 self.0.next().map(|(key, (_, value))| (key, value))
440 }
441
442 #[inline]
443 fn size_hint(&self) -> (usize, Option<usize>) {
444 self.0.size_hint()
445 }
446
447 #[inline]
448 fn fold<B, F>(self, init: B, mut f: F) -> B
449 where
450 Self: Sized,
451 F: FnMut(B, Self::Item) -> B,
452 {
453 self.0.fold(init, |acc, entry| {
454 let (key, (_, value)) = entry;
455 f(acc, (key, value))
456 })
457 }
458}
459
460impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
461 #[inline]
462 fn len(&self) -> usize {
463 self.0.len()
464 }
465}
466
467impl<K, V> FusedIterator for IterMut<'_, K, V> {}
468
469impl<K, V> Iterator for IntoIter<K, V> {
470 type Item = (K, V);
471
472 #[inline]
473 fn next(&mut self) -> Option<(K, V)> {
474 self.0.next().map(|(key, (_, value))| (key, value))
475 }
476
477 #[inline]
478 fn size_hint(&self) -> (usize, Option<usize>) {
479 self.0.size_hint()
480 }
481
482 #[inline]
483 fn fold<B, F>(self, init: B, mut f: F) -> B
484 where
485 Self: Sized,
486 F: FnMut(B, Self::Item) -> B,
487 {
488 self.0.fold(init, |acc, entry| {
489 let (key, (_, value)) = entry;
490 f(acc, (key, value))
491 })
492 }
493}
494
495impl<K, V> ExactSizeIterator for IntoIter<K, V> {
496 #[inline]
497 fn len(&self) -> usize {
498 self.0.len()
499 }
500}
501
502impl<K, V> FusedIterator for IntoIter<K, V> {}
503
504#[cfg(test)]
505mod tests {
506 use {
507 super::*,
508 ahash::RandomState as AHashRandomState,
509 core::{fmt::Debug, num::NonZeroUsize},
510 rand::Rng,
511 test_case::test_case,
512 };
513
514 fn check_entry<K, V, S: BuildHasher, Q>(
515 cache: &LruCache<K, V, S>,
516 key: &Q,
517 ordinal: u64,
518 value: V,
519 ) where
520 K: Hash + Eq + Borrow<Q>,
521 Q: Hash + Eq + ?Sized,
522 V: Debug + PartialEq<V>,
523 {
524 let (entry_ordinal, entry_value) = cache.cache.get(key).unwrap();
525 assert_eq!(entry_value, &value);
526 assert_eq!(entry_ordinal.load(Ordering::Relaxed), ordinal);
527 }
528
529 #[test]
530 fn test_capacity_zero() {
531 let mut cache = LruCache::new(0);
532
533 cache.put("apple", 8);
534 assert_eq!(cache.len(), 0);
535 assert_eq!(cache.get("apple"), None);
536 }
537
538 #[test_case(DefaultHashBuilder::default())]
539 #[test_case(AHashRandomState::default())]
540 fn test_capacity_zero_with_hasher<S: BuildHasher>(hasher: S) {
541 let mut cache = LruCache::with_capacity_and_hasher(0, hasher);
542
543 cache.put("apple", 8);
544 assert_eq!(cache.len(), 0);
545 assert_eq!(cache.get("apple"), None);
546 }
547
548 #[test_case(DefaultHashBuilder::default())]
549 #[test_case(AHashRandomState::default())]
550 fn test_basics<S: BuildHasher>(hasher: S) {
551 let mut cache = LruCache::with_capacity_and_hasher(2, hasher);
552
553 cache.put("apple", 8);
554 assert_eq!(cache.len(), 1);
555 check_entry(&cache, "apple", 0, 8);
556 assert_eq!(cache.counter.load(Ordering::Relaxed), 1);
557
558 assert_eq!(cache.peek("apple"), Some(&8));
559 check_entry(&cache, "apple", 0, 8);
560 assert_eq!(cache.counter.load(Ordering::Relaxed), 1);
561
562 assert_eq!(cache.peek_mut("apple"), Some(&mut 8));
563 check_entry(&cache, "apple", 0, 8);
564 assert_eq!(cache.counter.load(Ordering::Relaxed), 1);
565
566 assert_eq!(cache.get("apple"), Some(&8));
567 check_entry(&cache, "apple", 1, 8);
568 assert_eq!(cache.counter.load(Ordering::Relaxed), 2);
569
570 assert_eq!(cache.get_mut("apple"), Some(&mut 8));
571 check_entry(&cache, "apple", 2, 8);
572 assert_eq!(cache.counter.load(Ordering::Relaxed), 3);
573
574 cache.put("banana", 4);
575 assert_eq!(cache.len(), 2);
576 check_entry(&cache, "banana", 3, 4);
577 assert_eq!(cache.counter.load(Ordering::Relaxed), 4);
578
579 cache.put("pear", 2);
580 assert_eq!(cache.len(), 3);
581 check_entry(&cache, "pear", 4, 2);
582 assert_eq!(cache.counter.load(Ordering::Relaxed), 5);
583
584 cache.put("banana", 6);
585 assert_eq!(cache.len(), 3);
586 check_entry(&cache, "banana", 5, 6);
587 assert_eq!(cache.counter.load(Ordering::Relaxed), 6);
588
589 cache.put("orange", 3); assert_eq!(cache.len(), 2);
591 check_entry(&cache, "banana", 5, 6);
592 check_entry(&cache, "orange", 6, 3);
593 assert_eq!(cache.counter.load(Ordering::Relaxed), 7);
594
595 assert!(cache.contains_key("banana"));
596 assert!(cache.contains_key("orange"));
597 assert!(!cache.contains_key("apple"));
598 assert!(!cache.contains_key("pear"));
599
600 assert_eq!(cache.remove("banana"), Some(6));
601 assert_eq!(cache.remove("banana"), None);
602
603 assert_eq!(cache.remove_entry("orange"), Some(("orange", 3)));
604 assert_eq!(cache.remove_entry("orange"), None);
605
606 assert_eq!(cache.len(), 0);
607 assert!(cache.is_empty());
608 }
609
610 #[test_case(10, 10, DefaultHashBuilder::default())]
611 #[test_case(10, 100, DefaultHashBuilder::default())]
612 #[test_case(10, 1_000, DefaultHashBuilder::default())]
613 #[test_case(10, 10_000, DefaultHashBuilder::default())]
614 #[test_case(100, 10, DefaultHashBuilder::default())]
615 #[test_case(100, 100, DefaultHashBuilder::default())]
616 #[test_case(100, 1_000, DefaultHashBuilder::default())]
617 #[test_case(100, 10_000, DefaultHashBuilder::default())]
618 #[test_case(10, 10, AHashRandomState::default())]
619 #[test_case(10, 100, AHashRandomState::default())]
620 #[test_case(10, 1_000, AHashRandomState::default())]
621 #[test_case(10, 10_000, AHashRandomState::default())]
622 #[test_case(100, 10, AHashRandomState::default())]
623 #[test_case(100, 100, AHashRandomState::default())]
624 #[test_case(100, 1_000, AHashRandomState::default())]
625 #[test_case(100, 10_000, AHashRandomState::default())]
626 fn test_lru_cache_cross_check_subset<S: BuildHasher>(
627 capacity: usize,
628 num_keys: usize,
629 hasher: S,
630 ) {
631 let mut rng = rand::thread_rng();
632 let mut cache = LruCache::<usize, u8, S>::with_capacity_and_hasher(capacity, hasher);
633 let mut other = lru::LruCache::<usize, u8>::new(NonZeroUsize::new(capacity).unwrap());
634 for _ in 0..10_000_000 {
635 let key: usize = rng.gen_range(0..num_keys);
636 if rng.gen_ratio(1, 2) {
637 let val = other.get(&key);
638 assert!(val.is_none() || cache.get(&key) == val);
639 } else {
640 let val = rng.gen();
641 let old = other.put(key, val);
642 assert!(cache.put(key, val) == old || old.is_none());
643 }
644 }
645 }
646
647 #[test_case(10, 10, DefaultHashBuilder::default())]
648 #[test_case(10, 100, DefaultHashBuilder::default())]
649 #[test_case(10, 1_000, DefaultHashBuilder::default())]
650 #[test_case(10, 10_000, DefaultHashBuilder::default())]
651 #[test_case(100, 10, DefaultHashBuilder::default())]
652 #[test_case(100, 100, DefaultHashBuilder::default())]
653 #[test_case(100, 1_000, DefaultHashBuilder::default())]
654 #[test_case(100, 10_000, DefaultHashBuilder::default())]
655 #[test_case(10, 10, AHashRandomState::default())]
656 #[test_case(10, 100, AHashRandomState::default())]
657 #[test_case(10, 1_000, AHashRandomState::default())]
658 #[test_case(10, 10_000, AHashRandomState::default())]
659 #[test_case(100, 10, AHashRandomState::default())]
660 #[test_case(100, 100, AHashRandomState::default())]
661 #[test_case(100, 1_000, AHashRandomState::default())]
662 #[test_case(100, 10_000, AHashRandomState::default())]
663 fn test_lru_cache_cross_check_superset<S: BuildHasher>(
664 capacity: usize,
665 num_keys: usize,
666 hasher: S,
667 ) {
668 let mut rng = rand::thread_rng();
669 let mut cache = LruCache::<usize, u8, S>::with_capacity_and_hasher(capacity, hasher);
670 let mut other = lru::LruCache::<usize, u8>::new(NonZeroUsize::new(2 * capacity).unwrap());
671 for _ in 0..10_000_000 {
672 let key: usize = rng.gen_range(0..num_keys);
673 if rng.gen_ratio(1, 2) {
674 let val = cache.get(&key);
675 assert!(val.is_none() || other.get(&key) == val);
676 } else {
677 let val = rng.gen();
678 let old = cache.put(key, val);
679 assert!(other.put(key, val) == old || old.is_none());
680 }
681 }
682 }
683}