1#![cfg(feature = "lru")]
2use core::marker::PhantomData;
5use core::mem::ManuallyDrop;
6use core::num::NonZeroUsize;
7use core::ptr;
8use lru::LruCache;
9use std::borrow::Borrow;
10use std::fmt::{self, Debug};
11use std::hash::Hash;
12
13use crate::HeaplessBTreeLruCache;
14use crate::IndexType;
15
16pub trait AnyLruCache<K, V> {
18 fn len(&self) -> usize;
20 fn is_empty(&self) -> bool {
22 self.len() == 0
23 }
24 fn cap(&self) -> NonZeroUsize;
26 fn put(&mut self, key: K, value: V) -> Option<V>;
29 fn put_with_cap(
32 &mut self,
33 key: K,
34 value: V,
35 cap: NonZeroUsize,
36 ) -> (Option<V>, Result<(), (K, V)>);
37 fn get<Q>(&mut self, key: &Q) -> Option<&V>
39 where
40 K: Borrow<Q>,
41 Q: Hash + Eq + Ord + ?Sized;
42 fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
44 where
45 K: Borrow<Q>,
46 Q: Hash + Eq + Ord + ?Sized;
47 fn peek<Q>(&self, key: &Q) -> Option<&V>
49 where
50 K: Borrow<Q>,
51 Q: Hash + Eq + Ord + ?Sized;
52 fn clear(&mut self);
54 fn pop_lru(&mut self) -> Option<(K, V)>;
56 fn contains<Q>(&self, key: &Q) -> bool
58 where
59 K: Borrow<Q>,
60 Q: Hash + Eq + Ord + ?Sized;
61 fn push(&mut self, key: K, value: V) -> Option<(K, V)>;
64 fn pop<Q>(&mut self, key: &Q) -> Option<V>
66 where
67 K: Borrow<Q>,
68 Q: Hash + Eq + Ord + ?Sized;
69 fn pop_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
71 where
72 K: Borrow<Q>,
73 Q: Hash + Eq + Ord + ?Sized;
74 fn promote<Q>(&mut self, key: &Q)
76 where
77 K: Borrow<Q>,
78 Q: Hash + Eq + Ord + ?Sized;
79 fn demote<Q>(&mut self, key: &Q)
81 where
82 K: Borrow<Q>,
83 Q: Hash + Eq + Ord + ?Sized;
84 fn peek_lru(&self) -> Option<(&K, &V)>;
86}
87
88pub trait LruIteratorSupport<'a, K: 'a, V: 'a> {
90 type Iter: Iterator<Item = (&'a K, &'a V)>;
92 type IterMut: Iterator<Item = (&'a K, &'a mut V)>;
94 fn iter(&'a self) -> Self::Iter;
96 fn iter_mut(&'a mut self) -> Self::IterMut;
98}
99
100impl<K: Hash + Eq + Ord, V> AnyLruCache<K, V> for LruCache<K, V> {
101 fn len(&self) -> usize {
102 self.len()
103 }
104 fn cap(&self) -> NonZeroUsize {
105 self.cap()
106 }
107 fn put(&mut self, key: K, value: V) -> Option<V> {
108 self.put(key, value)
109 }
110 fn put_with_cap(
111 &mut self,
112 key: K,
113 value: V,
114 _cap: NonZeroUsize,
115 ) -> (Option<V>, Result<(), (K, V)>) {
116 (self.put(key, value), Ok(()))
117 }
118 fn get<Q>(&mut self, key: &Q) -> Option<&V>
119 where
120 K: Borrow<Q>,
121 Q: Hash + Eq + Ord + ?Sized,
122 {
123 self.get(key)
124 }
125 fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
126 where
127 K: Borrow<Q>,
128 Q: Hash + Eq + Ord + ?Sized,
129 {
130 self.get_mut(key)
131 }
132 fn peek<Q>(&self, key: &Q) -> Option<&V>
133 where
134 K: Borrow<Q>,
135 Q: Hash + Eq + Ord + ?Sized,
136 {
137 self.peek(key)
138 }
139 fn clear(&mut self) {
140 self.clear();
141 }
142 fn pop_lru(&mut self) -> Option<(K, V)> {
143 self.pop_lru()
144 }
145 fn contains<Q>(&self, key: &Q) -> bool
146 where
147 K: Borrow<Q>,
148 Q: Hash + Eq + Ord + ?Sized,
149 {
150 self.contains(key)
151 }
152 fn push(&mut self, key: K, value: V) -> Option<(K, V)> {
153 self.push(key, value)
154 }
155 fn pop<Q>(&mut self, key: &Q) -> Option<V>
156 where
157 K: Borrow<Q>,
158 Q: Hash + Eq + Ord + ?Sized,
159 {
160 self.pop(key)
161 }
162 fn pop_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
163 where
164 K: Borrow<Q>,
165 Q: Hash + Eq + Ord + ?Sized,
166 {
167 self.pop_entry(key)
168 }
169 fn promote<Q>(&mut self, key: &Q)
170 where
171 K: Borrow<Q>,
172 Q: Hash + Eq + Ord + ?Sized,
173 {
174 self.promote(key)
175 }
176 fn demote<Q>(&mut self, key: &Q)
177 where
178 K: Borrow<Q>,
179 Q: Hash + Eq + Ord + ?Sized,
180 {
181 self.demote(key)
182 }
183 fn peek_lru(&self) -> Option<(&K, &V)> {
184 self.peek_lru()
185 }
186}
187
188impl<'a, K: Hash + Eq + Ord + 'a, V: 'a> LruIteratorSupport<'a, K, V> for LruCache<K, V> {
189 type Iter = lru::Iter<'a, K, V>;
190 type IterMut = lru::IterMut<'a, K, V>;
191 fn iter(&'a self) -> Self::Iter {
192 self.iter()
193 }
194 fn iter_mut(&'a mut self) -> Self::IterMut {
195 self.iter_mut()
196 }
197}
198
199pub union LruData<K, V, S> {
201 pub stack: ManuallyDrop<S>,
203 pub heap: ManuallyDrop<LruCache<K, V>>,
205}
206
207pub struct SmallLruCache<
213 K,
214 V,
215 const N: usize,
216 I: IndexType = u8,
217 S = HeaplessBTreeLruCache<K, V, N, I>,
218> where
219 S: AnyLruCache<K, V>,
220{
221 num_entries: usize,
222 capacity: NonZeroUsize,
223 on_stack: bool,
224 data: LruData<K, V, S>,
225 _phantom_i: PhantomData<I>,
226}
227
228impl<K, V, const N: usize, I, S> SmallLruCache<K, V, N, I, S>
229where
230 K: Hash + Eq + Ord + Clone,
231 I: IndexType,
232 S: AnyLruCache<K, V>,
233{
234 pub fn len(&self) -> usize {
236 self.num_entries
237 }
238 pub fn is_empty(&self) -> bool {
240 self.num_entries == 0
241 }
242 pub fn capacity(&self) -> NonZeroUsize {
244 self.capacity
245 }
246
247 pub fn get<Q>(&mut self, key: &Q) -> Option<&V>
249 where
250 K: Borrow<Q>,
251 Q: Hash + Eq + Ord + ?Sized,
252 {
253 if self.on_stack {
254 unsafe { (*self.data.stack).get(key) }
255 } else {
256 unsafe { (*self.data.heap).get(key) }
257 }
258 }
259
260 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
262 where
263 K: Borrow<Q>,
264 Q: Hash + Eq + Ord + ?Sized,
265 {
266 if self.on_stack {
267 unsafe { (*self.data.stack).get_mut(key) }
268 } else {
269 unsafe { (*self.data.heap).get_mut(key) }
270 }
271 }
272
273 pub fn peek<Q>(&self, key: &Q) -> Option<&V>
275 where
276 K: Borrow<Q>,
277 Q: Hash + Eq + Ord + ?Sized,
278 {
279 if self.on_stack {
280 unsafe { (*self.data.stack).peek(key) }
281 } else {
282 unsafe { (*self.data.heap).peek(key) }
283 }
284 }
285
286 pub fn put(&mut self, key: K, value: V) -> Option<V> {
291 if self.on_stack {
292 let (old_v, res) =
293 unsafe { (*self.data.stack).put_with_cap(key, value, self.capacity) };
294 if res.is_ok() {
295 self.num_entries = unsafe { (*self.data.stack).len() };
296 return old_v;
297 } else {
298 let (k, v) = res.err().unwrap();
299 self.spill_to_heap();
300 let old = unsafe { (*self.data.heap).put(k, v) };
301 self.num_entries = unsafe { (*self.data.heap).len() };
302 return old;
303 }
304 }
305 let old = unsafe { (*self.data.heap).put(key, value) };
306 self.num_entries = unsafe { (*self.data.heap).len() };
307 old
308 }
309
310 fn spill_to_heap(&mut self) {
312 if !self.on_stack {
313 return;
314 }
315 let mut heap = LruCache::new(self.capacity);
316 unsafe {
317 let mut stack = ManuallyDrop::take(&mut self.data.stack);
318 while let Some((k, v)) = stack.pop_lru() {
319 heap.put(k, v);
320 }
321 ptr::write(&mut self.data.heap, ManuallyDrop::new(heap));
322 self.on_stack = false;
323 }
324 }
325
326 pub fn clear(&mut self) {
328 if self.on_stack {
329 unsafe { (*self.data.stack).clear() }
330 } else {
331 unsafe { (*self.data.heap).clear() }
332 }
333 self.num_entries = 0;
334 }
335
336 pub fn is_on_stack(&self) -> bool {
338 self.on_stack
339 }
340
341 pub fn contains<Q>(&self, key: &Q) -> bool
343 where
344 K: Borrow<Q>,
345 Q: Hash + Eq + Ord + ?Sized,
346 {
347 if self.on_stack {
348 unsafe { (*self.data.stack).contains(key) }
349 } else {
350 unsafe { (*self.data.heap).contains(key) }
351 }
352 }
353
354 pub fn push(&mut self, key: K, value: V) -> Option<(K, V)> {
357 if self.on_stack {
358 let res = unsafe { (*self.data.stack).push(key, value) };
359 self.num_entries = unsafe { (*self.data.stack).len() };
360 res
361 } else {
362 let res = unsafe { (*self.data.heap).push(key, value) };
363 self.num_entries = unsafe { (*self.data.heap).len() };
364 res
365 }
366 }
367
368 pub fn pop<Q>(&mut self, key: &Q) -> Option<V>
370 where
371 K: Borrow<Q>,
372 Q: Hash + Eq + Ord + ?Sized,
373 {
374 let res = if self.on_stack {
375 unsafe { (*self.data.stack).pop(key) }
376 } else {
377 unsafe { (*self.data.heap).pop(key) }
378 };
379 if res.is_some() {
380 self.num_entries -= 1;
381 }
382 res
383 }
384
385 pub fn pop_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
387 where
388 K: Borrow<Q>,
389 Q: Hash + Eq + Ord + ?Sized,
390 {
391 let res = if self.on_stack {
392 unsafe { (*self.data.stack).pop_entry(key) }
393 } else {
394 unsafe { (*self.data.heap).pop_entry(key) }
395 };
396 if res.is_some() {
397 self.num_entries -= 1;
398 }
399 res
400 }
401
402 pub fn pop_lru(&mut self) -> Option<(K, V)> {
404 let res = if self.on_stack {
405 unsafe { (*self.data.stack).pop_lru() }
406 } else {
407 unsafe { (*self.data.heap).pop_lru() }
408 };
409 if res.is_some() {
410 self.num_entries -= 1;
411 }
412 res
413 }
414
415 pub fn promote<Q>(&mut self, key: &Q)
417 where
418 K: Borrow<Q>,
419 Q: Hash + Eq + Ord + ?Sized,
420 {
421 if self.on_stack {
422 unsafe { (*self.data.stack).promote(key) }
423 } else {
424 unsafe { (*self.data.heap).promote(key) }
425 }
426 }
427
428 pub fn demote<Q>(&mut self, key: &Q)
430 where
431 K: Borrow<Q>,
432 Q: Hash + Eq + Ord + ?Sized,
433 {
434 if self.on_stack {
435 unsafe { (*self.data.stack).demote(key) }
436 } else {
437 unsafe { (*self.data.heap).demote(key) }
438 }
439 }
440
441 pub fn peek_lru(&self) -> Option<(&K, &V)> {
443 if self.on_stack {
444 unsafe { (*self.data.stack).peek_lru() }
445 } else {
446 unsafe { (*self.data.heap).peek_lru() }
447 }
448 }
449
450 pub fn resize(&mut self, cap: NonZeroUsize) {
453 if self.on_stack {
454 if cap.get() > N {
455 self.spill_to_heap();
456 unsafe { (*self.data.heap).resize(cap) };
457 }
458 } else {
459 unsafe { (*self.data.heap).resize(cap) };
460 }
461 self.capacity = cap;
462 }
463
464 pub fn get_or_insert<F>(&mut self, key: K, f: F) -> &V
466 where
467 F: FnOnce() -> V,
468 {
469 if self.contains(&key) {
470 self.get(&key).unwrap()
471 } else {
472 self.put(key.clone(), f());
473 self.get(&key).unwrap()
474 }
475 }
476
477 pub fn get_or_insert_mut<F>(&mut self, key: K, f: F) -> &mut V
479 where
480 F: FnOnce() -> V,
481 {
482 if self.contains(&key) {
483 self.get_mut(&key).unwrap()
484 } else {
485 self.put(key.clone(), f());
486 self.get_mut(&key).unwrap()
487 }
488 }
489
490 pub fn iter(&self) -> Iter<'_, K, V, N, I, S>
492 where
493 for<'a> S: LruIteratorSupport<'a, K, V>,
494 {
495 if self.on_stack {
496 Iter::Stack(unsafe { (*self.data.stack).iter() })
497 } else {
498 Iter::Heap(unsafe { (*self.data.heap).iter() })
499 }
500 }
501
502 pub fn iter_mut(&mut self) -> IterMut<'_, K, V, N, I, S>
504 where
505 for<'a> S: LruIteratorSupport<'a, K, V>,
506 {
507 if self.on_stack {
508 IterMut::Stack(unsafe { (*self.data.stack).iter_mut() })
509 } else {
510 IterMut::Heap(unsafe { (*self.data.heap).iter_mut() })
511 }
512 }
513}
514
515impl<'a, K, V, const N: usize, I, S> LruIteratorSupport<'a, K, V> for SmallLruCache<K, V, N, I, S>
516where
517 K: Hash + Eq + Ord + Clone + 'a,
518 V: 'a,
519 I: IndexType,
520 S: AnyLruCache<K, V> + for<'b> LruIteratorSupport<'b, K, V>,
521{
522 type Iter = Iter<'a, K, V, N, I, S>;
523 type IterMut = IterMut<'a, K, V, N, I, S>;
524 fn iter(&'a self) -> Self::Iter {
525 self.iter()
526 }
527 fn iter_mut(&'a mut self) -> Self::IterMut {
528 self.iter_mut()
529 }
530}
531
532impl<K, V, const N: usize, I, S> SmallLruCache<K, V, N, I, S>
533where
534 K: Hash + Eq + Ord + Clone,
535 I: IndexType,
536 S: AnyLruCache<K, V> + Default,
537{
538 pub fn new(cap: NonZeroUsize) -> Self {
540 const {
541 assert!(
542 std::mem::size_of::<Self>() <= 16 * 1024,
543 "SmallLruCache is too large! The struct size exceeds the 16KB limit. Reduce N."
544 );
545 }
546 Self {
547 num_entries: 0,
548 capacity: cap,
549 on_stack: true,
550 data: LruData {
551 stack: ManuallyDrop::new(S::default()),
552 },
553 _phantom_i: PhantomData,
554 }
555 }
556}
557
558impl<K: Hash + Eq + Ord + Clone, V, const N: usize, I: IndexType, S> Default
559 for SmallLruCache<K, V, N, I, S>
560where
561 S: AnyLruCache<K, V> + Default,
562{
563 fn default() -> Self {
564 Self::new(NonZeroUsize::new(N.max(1)).unwrap())
565 }
566}
567
568impl<K, V, const N: usize, I, S> AnyLruCache<K, V> for SmallLruCache<K, V, N, I, S>
569where
570 K: Hash + Eq + Ord + Clone,
571 I: IndexType,
572 S: AnyLruCache<K, V>,
573{
574 fn len(&self) -> usize {
575 self.num_entries
576 }
577 fn cap(&self) -> NonZeroUsize {
578 self.capacity
579 }
580 fn put(&mut self, key: K, value: V) -> Option<V> {
581 self.put(key, value)
582 }
583 fn put_with_cap(
584 &mut self,
585 key: K,
586 value: V,
587 cap: NonZeroUsize,
588 ) -> (Option<V>, Result<(), (K, V)>) {
589 let (old, res) = if self.on_stack {
590 unsafe { (*self.data.stack).put_with_cap(key, value, cap) }
591 } else {
592 (unsafe { (*self.data.heap).put(key, value) }, Ok(()))
593 };
594 if res.is_ok() && old.is_none() {
595 self.num_entries += 1;
596 }
597 (old, res)
598 }
599 fn get<Q>(&mut self, key: &Q) -> Option<&V>
600 where
601 K: Borrow<Q>,
602 Q: Hash + Eq + Ord + ?Sized,
603 {
604 self.get(key)
605 }
606 fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
607 where
608 K: Borrow<Q>,
609 Q: Hash + Eq + Ord + ?Sized,
610 {
611 self.get_mut(key)
612 }
613 fn peek<Q>(&self, key: &Q) -> Option<&V>
614 where
615 K: Borrow<Q>,
616 Q: Hash + Eq + Ord + ?Sized,
617 {
618 self.peek(key)
619 }
620 fn clear(&mut self) {
621 self.clear();
622 }
623 fn pop_lru(&mut self) -> Option<(K, V)> {
624 self.pop_lru()
625 }
626 fn contains<Q>(&self, key: &Q) -> bool
627 where
628 K: Borrow<Q>,
629 Q: Hash + Eq + Ord + ?Sized,
630 {
631 self.contains(key)
632 }
633 fn push(&mut self, key: K, value: V) -> Option<(K, V)> {
634 self.push(key, value)
635 }
636 fn pop<Q>(&mut self, key: &Q) -> Option<V>
637 where
638 K: Borrow<Q>,
639 Q: Hash + Eq + Ord + ?Sized,
640 {
641 self.pop(key)
642 }
643 fn pop_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
644 where
645 K: Borrow<Q>,
646 Q: Hash + Eq + Ord + ?Sized,
647 {
648 self.pop_entry(key)
649 }
650 fn promote<Q>(&mut self, key: &Q)
651 where
652 K: Borrow<Q>,
653 Q: Hash + Eq + Ord + ?Sized,
654 {
655 self.promote(key)
656 }
657 fn demote<Q>(&mut self, key: &Q)
658 where
659 K: Borrow<Q>,
660 Q: Hash + Eq + Ord + ?Sized,
661 {
662 self.demote(key)
663 }
664 fn peek_lru(&self) -> Option<(&K, &V)> {
665 self.peek_lru()
666 }
667}
668
669impl<K, V, const N: usize, I, S> Drop for SmallLruCache<K, V, N, I, S>
670where
671 I: IndexType,
672 S: AnyLruCache<K, V>,
673{
674 fn drop(&mut self) {
675 unsafe {
676 if self.on_stack {
677 ManuallyDrop::drop(&mut self.data.stack);
678 } else {
679 ManuallyDrop::drop(&mut self.data.heap);
680 }
681 }
682 }
683}
684
685impl<K, V, const N: usize, I, S> Clone for SmallLruCache<K, V, N, I, S>
686where
687 K: Hash + Eq + Ord + Clone,
688 V: Clone,
689 I: IndexType,
690 S: AnyLruCache<K, V> + Clone,
691{
692 fn clone(&self) -> Self {
693 if self.on_stack {
694 Self {
695 num_entries: self.num_entries,
696 capacity: self.capacity,
697 on_stack: true,
698 data: LruData {
699 stack: ManuallyDrop::new(unsafe { (*self.data.stack).clone() }),
700 },
701 _phantom_i: PhantomData,
702 }
703 } else {
704 Self {
705 num_entries: self.num_entries,
706 capacity: self.capacity,
707 on_stack: false,
708 data: LruData {
709 heap: ManuallyDrop::new(unsafe { (*self.data.heap).clone() }),
710 },
711 _phantom_i: PhantomData,
712 }
713 }
714 }
715}
716
717impl<K, V, const N: usize, I, S> Debug for SmallLruCache<K, V, N, I, S>
718where
719 K: Hash + Eq + Ord + Clone + Debug,
720 V: Debug,
721 I: IndexType,
722 S: AnyLruCache<K, V>,
723{
724 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
725 f.debug_struct("SmallLruCache")
726 .field("on_stack", &self.on_stack)
727 .field("capacity", &self.capacity)
728 .field("num_entries", &self.num_entries)
729 .finish()
730 }
731}
732
733impl<K, V, const N: usize, I, S> FromIterator<(K, V)> for SmallLruCache<K, V, N, I, S>
734where
735 K: Hash + Eq + Ord + Clone,
736 I: IndexType,
737 S: AnyLruCache<K, V> + Default,
738{
739 fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
740 let mut cache = Self::default();
741 for (k, v) in iter {
742 cache.put(k, v);
743 }
744 cache
745 }
746}
747
748impl<K, V, const N: usize, I, S> Extend<(K, V)> for SmallLruCache<K, V, N, I, S>
749where
750 K: Hash + Eq + Ord + Clone,
751 I: IndexType,
752 S: AnyLruCache<K, V>,
753{
754 fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
755 for (k, v) in iter {
756 self.put(k, v);
757 }
758 }
759}
760
761impl<'a, K: 'a, V: 'a, const N: usize, I: IndexType, S> Debug for Iter<'a, K, V, N, I, S>
762where
763 S: AnyLruCache<K, V> + LruIteratorSupport<'a, K, V>,
764 S::Iter: Debug,
765{
766 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
767 match self {
768 Iter::Stack(it) => f.debug_tuple("Iter::Stack").field(it).finish(),
769 Iter::Heap(_) => f.write_str("Iter::Heap(lru::Iter)"),
770 Iter::_Phantom(_) => unreachable!(),
771 }
772 }
773}
774
775pub enum Iter<'a, K: 'a, V: 'a, const N: usize, I: IndexType, S>
777where
778 S: AnyLruCache<K, V> + LruIteratorSupport<'a, K, V>,
779{
780 Stack(S::Iter),
782 Heap(lru::Iter<'a, K, V>),
784 _Phantom(PhantomData<I>),
786}
787
788impl<'a, K, V, const N: usize, I, S> Iterator for Iter<'a, K, V, N, I, S>
789where
790 K: Hash + Eq + Ord + Clone,
791 I: IndexType,
792 S: AnyLruCache<K, V> + LruIteratorSupport<'a, K, V>,
793{
794 type Item = (&'a K, &'a V);
795 fn next(&mut self) -> Option<Self::Item> {
796 match self {
797 Iter::Stack(it) => it.next(),
798 Iter::Heap(it) => it.next(),
799 Iter::_Phantom(_) => unreachable!(),
800 }
801 }
802}
803
804impl<'a, K: 'a, V: 'a, const N: usize, I: IndexType, S> Debug for IterMut<'a, K, V, N, I, S>
805where
806 S: AnyLruCache<K, V> + LruIteratorSupport<'a, K, V>,
807 S::IterMut: Debug,
808{
809 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
810 match self {
811 IterMut::Stack(it) => f.debug_tuple("IterMut::Stack").field(it).finish(),
812 IterMut::Heap(_) => f.write_str("IterMut::Heap(lru::IterMut)"),
813 IterMut::_Phantom(_) => unreachable!(),
814 }
815 }
816}
817
818pub enum IterMut<'a, K: 'a, V: 'a, const N: usize, I: IndexType, S>
820where
821 S: AnyLruCache<K, V> + LruIteratorSupport<'a, K, V>,
822{
823 Stack(S::IterMut),
825 Heap(lru::IterMut<'a, K, V>),
827 _Phantom(PhantomData<I>),
829}
830
831impl<'a, K, V, const N: usize, I, S> Iterator for IterMut<'a, K, V, N, I, S>
832where
833 K: Hash + Eq + Ord + Clone,
834 I: IndexType,
835 S: AnyLruCache<K, V> + LruIteratorSupport<'a, K, V>,
836{
837 type Item = (&'a K, &'a mut V);
838 fn next(&mut self) -> Option<Self::Item> {
839 match self {
840 IterMut::Stack(it) => it.next(),
841 IterMut::Heap(it) => it.next(),
842 IterMut::_Phantom(_) => unreachable!(),
843 }
844 }
845}
846
847pub enum IntoIter<K, V, const N: usize, I: IndexType, S>
849where
850 K: Hash + Eq + Ord + Clone,
851 I: IndexType,
852 S: AnyLruCache<K, V> + IntoIterator<Item = (K, V)>,
853{
854 Stack(S::IntoIter),
856 Heap(lru::IntoIter<K, V>),
858 _Phantom(PhantomData<I>),
860}
861
862impl<K, V, const N: usize, I, S> Iterator for IntoIter<K, V, N, I, S>
863where
864 K: Hash + Eq + Ord + Clone,
865 I: IndexType,
866 S: AnyLruCache<K, V> + IntoIterator<Item = (K, V)>,
867{
868 type Item = (K, V);
869 fn next(&mut self) -> Option<Self::Item> {
870 match self {
871 IntoIter::Stack(it) => it.next(),
872 IntoIter::Heap(it) => it.next(),
873 IntoIter::_Phantom(_) => unreachable!(),
874 }
875 }
876}
877
878impl<K, V, const N: usize, I, S> IntoIterator for SmallLruCache<K, V, N, I, S>
879where
880 K: Hash + Eq + Ord + Clone,
881 I: IndexType,
882 S: AnyLruCache<K, V> + IntoIterator<Item = (K, V)>,
883{
884 type Item = (K, V);
885 type IntoIter = IntoIter<K, V, N, I, S>;
886 fn into_iter(self) -> Self::IntoIter {
887 let on_stack = self.on_stack;
888 let data = unsafe { ptr::read(&self.data) };
891 core::mem::forget(self);
892
893 if on_stack {
894 IntoIter::Stack(unsafe { ManuallyDrop::into_inner(data.stack) }.into_iter())
895 } else {
896 IntoIter::Heap(unsafe { ManuallyDrop::into_inner(data.heap) }.into_iter())
897 }
898 }
899}
900
901impl<'a, K, V, const N: usize, I, S> IntoIterator for &'a SmallLruCache<K, V, N, I, S>
902where
903 K: Hash + Eq + Ord + Clone,
904 I: IndexType,
905 S: AnyLruCache<K, V> + for<'b> LruIteratorSupport<'b, K, V>,
906{
907 type Item = (&'a K, &'a V);
908 type IntoIter = Iter<'a, K, V, N, I, S>;
909 fn into_iter(self) -> Self::IntoIter {
910 self.iter()
911 }
912}
913
914impl<'a, K, V, const N: usize, I, S> IntoIterator for &'a mut SmallLruCache<K, V, N, I, S>
915where
916 K: Hash + Eq + Ord + Clone,
917 I: IndexType,
918 S: AnyLruCache<K, V> + for<'b> LruIteratorSupport<'b, K, V>,
919{
920 type Item = (&'a K, &'a mut V);
921 type IntoIter = IterMut<'a, K, V, N, I, S>;
922 fn into_iter(self) -> Self::IntoIter {
923 self.iter_mut()
924 }
925}
926
927#[cfg(test)]
928mod lru_cache_basic_tests {
929 use super::*;
930 use crate::HeaplessBTreeLruCache;
931 use crate::HeaplessLinearLruCache;
932 use crate::HeaplessLruCache;
933
934 fn test_basic_lru<S: AnyLruCache<i32, i32> + Default>() {
935 let mut cache = S::default();
936 cache.put(1, 10);
937 cache.put(2, 20);
938 cache.put(3, 30);
939
940 assert_eq!(cache.len(), 3);
941 assert_eq!(cache.get(&1), Some(&10));
942 assert_eq!(cache.get(&2), Some(&20));
943 assert_eq!(cache.get(&3), Some(&30));
944
945 cache.get(&1);
947
948 assert_eq!(cache.peek_lru(), Some((&2, &20)));
950 assert_eq!(cache.pop_lru(), Some((2, 20)));
951 assert_eq!(cache.pop_lru(), Some((3, 30)));
952 assert_eq!(cache.pop_lru(), Some((1, 10)));
953 assert!(cache.is_empty());
954 }
955
956 #[test]
957 fn test_heapless_lru_basic() {
958 test_basic_lru::<HeaplessLruCache<i32, i32, 8>>();
959 }
960
961 #[test]
962 fn test_heapless_btree_lru_basic() {
963 test_basic_lru::<HeaplessBTreeLruCache<i32, i32, 10>>();
964 }
965
966 #[test]
967 fn test_heapless_linear_lru_basic() {
968 test_basic_lru::<HeaplessLinearLruCache<i32, i32, 10>>();
969 }
970
971 #[test]
972 fn test_small_lru_spill() {
973 let mut cache: SmallLruCache<i32, i32, 2> = SmallLruCache::default();
974 cache.put(1, 10);
975 cache.put(2, 20);
976 assert!(cache.is_on_stack());
977
978 cache.resize(NonZeroUsize::new(5).unwrap());
979 cache.put(3, 30);
980 assert!(!cache.is_on_stack());
981 assert_eq!(cache.len(), 3);
982 assert_eq!(cache.get(&1), Some(&10));
983 assert_eq!(cache.get(&2), Some(&20));
984 assert_eq!(cache.get(&3), Some(&30));
985 }
986
987 #[test]
988 fn test_small_lru_eviction() {
989 let mut cache: SmallLruCache<i32, i32, 2> = SmallLruCache::default();
990 cache.put(1, 10);
991 cache.put(2, 20);
992 cache.put(3, 30);
993
994 assert_eq!(cache.len(), 2);
995 assert!(!cache.contains(&1));
996 }
997
998 #[test]
999 fn test_iteration_order() {
1000 let mut cache: SmallLruCache<i32, i32, 5> = SmallLruCache::default();
1001 cache.put(1, 10);
1002 cache.put(2, 20);
1003 cache.put(3, 30);
1004
1005 let items: Vec<_> = cache.iter().map(|(&k, &v)| (k, v)).collect();
1006 assert_eq!(items, vec![(3, 30), (2, 20), (1, 10)]);
1007
1008 let items_into: Vec<_> = cache.into_iter().collect();
1009 assert_eq!(items_into, vec![(3, 30), (2, 20), (1, 10)]);
1010 }
1011
1012 #[test]
1013 fn test_promote_demote() {
1014 let mut cache: SmallLruCache<i32, i32, 3> = SmallLruCache::default();
1015 cache.put(1, 10);
1016 cache.put(2, 20);
1017 cache.put(3, 30);
1018
1019 cache.demote(&3);
1020 assert_eq!(cache.peek_lru(), Some((&3, &30)));
1021
1022 cache.promote(&1);
1023 let items: Vec<_> = cache.iter().map(|(&k, _)| k).collect();
1024 assert_eq!(items, vec![1, 2, 3]);
1025 }
1026}
1027
1028#[cfg(test)]
1029mod lru_cache_coverage_tests {
1030 use super::*;
1031 use crate::HeaplessBTreeLruCache;
1032 use crate::HeaplessLinearLruCache;
1033 use crate::HeaplessLruCache;
1034 use std::num::NonZeroUsize;
1035
1036 #[test]
1037 fn test_any_lru_cache_trait_implementation() {
1038 let mut cache: SmallLruCache<i32, i32, 2> = SmallLruCache::default();
1039
1040 fn check<S: AnyLruCache<i32, i32>>(any_cache: &mut S) {
1042 assert_eq!(any_cache.len(), 0);
1043 assert_eq!(any_cache.cap(), NonZeroUsize::new(2).unwrap());
1044
1045 any_cache.put(1, 10);
1046 assert_eq!(any_cache.len(), 1);
1047 assert_eq!(any_cache.get(&1), Some(&10));
1048 assert_eq!(any_cache.peek(&1), Some(&10));
1049 assert!(any_cache.contains(&1));
1050
1051 let _ = any_cache.put_with_cap(2, 20, NonZeroUsize::new(2).unwrap());
1052 assert_eq!(any_cache.len(), 2);
1053
1054 any_cache.promote(&1);
1055 any_cache.demote(&1);
1056 assert_eq!(any_cache.peek_lru(), Some((&1, &10)));
1057
1058 any_cache.push(3, 30); assert!(!any_cache.contains(&1));
1060
1061 any_cache.pop(&2);
1062 assert_eq!(any_cache.len(), 1);
1063
1064 any_cache.clear();
1065 assert!(any_cache.is_empty());
1066 }
1067
1068 check(&mut cache);
1069 }
1070
1071 #[test]
1072 fn test_debug_implementations() {
1073 let cache_hash: HeaplessLruCache<i32, i32, 8> = HeaplessLruCache::new();
1074 let cache_btree: HeaplessBTreeLruCache<i32, i32, 8> = HeaplessBTreeLruCache::new();
1075 let cache_linear: HeaplessLinearLruCache<i32, i32, 8> = HeaplessLinearLruCache::new();
1076 let cache_small: SmallLruCache<i32, i32, 8> = SmallLruCache::default();
1077
1078 println!("{:?}", cache_hash);
1079 println!("{:?}", cache_btree);
1080 println!("{:?}", cache_linear);
1081 println!("{:?}", cache_small);
1082 }
1083
1084 #[test]
1085 fn test_clear_all_backends() {
1086 fn test_clear<S: AnyLruCache<i32, i32> + Default>() {
1087 let mut cache = S::default();
1088 cache.put(1, 10);
1089 cache.clear();
1090 assert!(cache.is_empty());
1091 }
1092
1093 test_clear::<HeaplessLruCache<i32, i32, 8>>();
1094 test_clear::<HeaplessBTreeLruCache<i32, i32, 8>>();
1095 test_clear::<HeaplessLinearLruCache<i32, i32, 8>>();
1096 test_clear::<SmallLruCache<i32, i32, 8>>();
1097 }
1098
1099 #[test]
1100 fn test_clone_all_backends() {
1101 fn test_clone<S: AnyLruCache<i32, i32> + Default + Clone>() {
1102 let mut cache = S::default();
1103 cache.put(1, 10);
1104 let mut cloned = cache.clone();
1105 assert_eq!(cloned.len(), 1);
1106 cloned.put(2, 20);
1107 assert_eq!(cache.len(), 1);
1108 }
1109
1110 test_clone::<HeaplessLruCache<i32, i32, 8>>();
1111 test_clone::<HeaplessBTreeLruCache<i32, i32, 8>>();
1112 test_clone::<HeaplessLinearLruCache<i32, i32, 8>>();
1113 test_clone::<SmallLruCache<i32, i32, 8>>();
1114 }
1115
1116 #[test]
1117 fn test_itermut_all_backends() {
1118 fn test_itermut<
1119 S: AnyLruCache<i32, i32> + Default + for<'a> LruIteratorSupport<'a, i32, i32>,
1120 >() {
1121 let mut cache = S::default();
1122 cache.put(1, 10);
1123 cache.put(2, 20);
1124 for (_, v) in cache.iter_mut() {
1125 *v += 1;
1126 }
1127 assert_eq!(cache.get(&1), Some(&11));
1128 assert_eq!(cache.get(&2), Some(&21));
1129 }
1130
1131 test_itermut::<HeaplessLruCache<i32, i32, 8>>();
1132 test_itermut::<HeaplessBTreeLruCache<i32, i32, 8>>();
1133 test_itermut::<HeaplessLinearLruCache<i32, i32, 8>>();
1134 test_itermut::<SmallLruCache<i32, i32, 8>>();
1135 }
1136
1137 #[test]
1138 fn test_small_lru_heap_mode_coverage() {
1139 let mut cache: SmallLruCache<i32, i32, 2> =
1141 SmallLruCache::new(NonZeroUsize::new(5).unwrap());
1142 cache.put(1, 10);
1144 cache.put(2, 20);
1145 cache.put(3, 30); assert!(!cache.is_on_stack());
1147
1148 fn check_heap<S: AnyLruCache<i32, i32>>(any_cache: &mut S) {
1150 any_cache.get(&1);
1151 }
1152 check_heap(&mut cache);
1153
1154 assert_eq!(cache.len(), 3);
1155 assert_eq!(cache.get(&1), Some(&10));
1156 assert_eq!(cache.get_mut(&1), Some(&mut 10));
1157 assert_eq!(cache.peek(&1), Some(&10));
1158 assert!(cache.contains(&1));
1159
1160 cache.promote(&3);
1161 cache.demote(&3);
1162 assert_eq!(cache.peek_lru(), Some((&3, &30)));
1163
1164 cache.push(4, 40);
1165 cache.pop_lru();
1166 cache.pop(&1);
1167 cache.pop_entry(&2);
1168
1169 cache.clear();
1170 assert!(cache.is_empty());
1171 }
1172
1173 #[test]
1174 fn test_lru_cache_backend_direct_coverage() {
1175 let mut cache = LruCache::new(NonZeroUsize::new(10).unwrap());
1177
1178 fn check_lru<S: AnyLruCache<i32, i32>>(any: &mut S) {
1179 any.put(1, 10);
1180 let _ = any.put_with_cap(2, 20, NonZeroUsize::new(5).unwrap());
1181 any.get(&1);
1182 any.get_mut(&1);
1183 any.peek(&1);
1184 any.contains(&1);
1185 any.promote(&1);
1186 any.demote(&1);
1187 any.peek_lru();
1188 any.pop_lru();
1189 any.push(3, 30);
1190 any.pop(&3);
1191 any.pop_entry(&2);
1192 any.clear();
1193 }
1194
1195 check_lru(&mut cache);
1196 }
1197 #[test]
1198 fn test_lru_cache_fuzz_operations() {
1199 struct SimpleRng(u64);
1201 impl SimpleRng {
1202 fn next(&mut self) -> u64 {
1203 self.0 = self.0.wrapping_mul(6364136223846793005).wrapping_add(1);
1204 self.0
1205 }
1206 fn gen_range(&mut self, min: i32, max: i32) -> i32 {
1207 let range = (max - min) as u64;
1208 if range == 0 {
1209 return min;
1210 }
1211 min + (self.next() % range) as i32
1212 }
1213 }
1214
1215 fn stress<
1216 S: AnyLruCache<i32, i32>
1217 + Default
1218 + Clone
1219 + for<'a> LruIteratorSupport<'a, i32, i32>
1220 + Debug
1221 + IntoIterator<Item = (i32, i32)>
1222 + Extend<(i32, i32)>,
1223 >(
1224 rng: &mut SimpleRng,
1225 ) {
1226 let mut cache = S::default();
1227 let cap = cache.cap().get();
1228
1229 for _ in 0..500 {
1230 let op = rng.gen_range(0, 13);
1231 let key = rng.gen_range(0, cap as i32 * 2);
1232 let val = rng.gen_range(0, 100);
1233
1234 match op {
1235 0 => {
1236 let _ = cache.put(key, val);
1237 }
1238 1 => {
1239 let _ = cache.get(&key);
1240 }
1241 2 => {
1242 let _ = cache.get_mut(&key);
1243 }
1244 3 => {
1245 let _ = cache.peek(&key);
1246 }
1247 4 => {
1248 let _ = cache.contains(&key);
1249 }
1250 5 => {
1251 let _ = cache.pop(&key);
1252 }
1253 6 => {
1254 cache.promote(&key);
1255 }
1256 7 => {
1257 cache.demote(&key);
1258 }
1259 8 => {
1260 let _ = cache.clone();
1261 }
1262 9 => {
1263 let _ = cache.iter().count();
1264 let _ = cache.iter_mut().count();
1265 let _ = format!("{:?}", cache);
1266 }
1267 10 => {
1268 let mut c = cache.clone();
1269 let _ = c.pop_entry(&key);
1270 let _ = c.pop_lru();
1271 let _ = c.peek_lru();
1272 c.clear();
1273 }
1274 11 => {
1275 let c = cache.clone();
1276 let _ = c.into_iter().count();
1277 }
1278 12 => {
1279 let mut c = cache.clone();
1280 c.extend(vec![(key, val), (key + 1, val + 1)]);
1281 }
1282 _ => {}
1283 }
1284 }
1285 }
1286
1287 let mut rng = SimpleRng(42);
1288 stress::<HeaplessLruCache<i32, i32, 16>>(&mut rng);
1289 stress::<HeaplessBTreeLruCache<i32, i32, 16>>(&mut rng);
1290 stress::<HeaplessLinearLruCache<i32, i32, 16>>(&mut rng);
1291 stress::<SmallLruCache<i32, i32, 4>>(&mut rng);
1292 }
1293
1294 #[test]
1295 fn test_lru_cache_get_or_insert_variants() {
1296 let mut cache: SmallLruCache<i32, i32, 2> = SmallLruCache::default();
1297
1298 let val = cache.get_or_insert(1, || 10);
1300 assert_eq!(*val, 10);
1301
1302 let val = cache.get_or_insert(1, || 20);
1304 assert_eq!(*val, 10);
1305
1306 let val = cache.get_or_insert_mut(2, || 20);
1308 assert_eq!(*val, 20);
1309
1310 let val = cache.get_or_insert_mut(2, || 30);
1312 assert_eq!(*val, 20);
1313 }
1314
1315 #[test]
1316 fn test_lru_cache_debug_impls_and_raw_heap_ops() {
1317 let mut cache: SmallLruCache<i32, i32, 2> = SmallLruCache::default();
1318 let _ = cache.capacity();
1319 cache.put(1, 10);
1320 cache.put(1, 11); let mut lru = lru::LruCache::new(NonZeroUsize::new(5).unwrap());
1324 lru.put(1, 10);
1325 let _ = lru.iter().count();
1326 let _ = lru.iter_mut().count();
1327 let _ = lru.cap();
1328 }
1329}