1#![cfg(feature = "lru")]
2use core::mem::MaybeUninit;
5use core::num::NonZeroUsize;
6use core::ptr;
7use std::borrow::Borrow;
8use std::hash::Hash;
9
10use crate::AnyLruCache;
11use crate::IndexType;
12use heapless::IndexMap;
13use heapless::index_map::FnvIndexMap;
14
15pub struct HeaplessLruCache<K, V, const N: usize, I: IndexType = u8> {
45 pub map: FnvIndexMap<K, I, N>,
47 pub keys: [MaybeUninit<K>; N],
49 pub values: [MaybeUninit<V>; N],
51 pub prevs: [I; N],
53 pub nexts: [I; N],
55 pub free_head: I,
57 pub head: I,
59 pub tail: I,
61 pub num_entries: I,
63}
64
65impl<K, V, const N: usize, I: IndexType> HeaplessLruCache<K, V, N, I>
66where
67 K: Hash + Eq + Clone,
68{
69 pub fn new() -> Self {
71 const {
72 assert!(
73 std::mem::size_of::<Self>() <= 16 * 1024,
74 "HeaplessLruCache is too large! The struct size exceeds the 16KB limit. Reduce N."
75 );
76 }
77 let prevs = [I::NONE; N];
78 let mut nexts = [I::NONE; N];
79 let mut idx = I::ZERO;
80 for slot in nexts.iter_mut() {
81 idx = idx.inc();
82 *slot = idx;
83 }
84 if N > 0 {
85 nexts[N - 1] = I::NONE;
86 }
87
88 Self {
89 map: IndexMap::new(),
90 keys: unsafe { MaybeUninit::uninit().assume_init() },
91 values: unsafe { MaybeUninit::uninit().assume_init() },
92 prevs,
93 nexts,
94 free_head: if N > 0 { I::ZERO } else { I::NONE },
95 head: I::NONE,
96 tail: I::NONE,
97 num_entries: I::ZERO,
98 }
99 }
100
101 #[inline(always)]
103 pub fn len(&self) -> usize {
104 self.num_entries.as_usize()
105 }
106
107 #[inline(always)]
109 pub fn is_empty(&self) -> bool {
110 self.num_entries.is_zero()
111 }
112
113 pub fn get<Q>(&mut self, key: &Q) -> Option<&V>
115 where
116 K: Borrow<Q>,
117 Q: Hash + Eq + ?Sized,
118 {
119 if let Some(&idx) = self.map.get(key) {
120 self.promote_idx(idx);
121 Some(unsafe { &*self.values[idx.as_usize()].as_ptr() })
122 } else {
123 None
124 }
125 }
126
127 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
129 where
130 K: Borrow<Q>,
131 Q: Hash + Eq + ?Sized,
132 {
133 if let Some(&idx) = self.map.get(key) {
134 self.promote_idx(idx);
135 Some(unsafe { &mut *self.values[idx.as_usize()].as_mut_ptr() })
136 } else {
137 None
138 }
139 }
140
141 pub fn peek<Q>(&self, key: &Q) -> Option<&V>
143 where
144 K: Borrow<Q>,
145 Q: Hash + Eq + ?Sized,
146 {
147 if let Some(&idx) = self.map.get(key) {
148 Some(unsafe { &*self.values[idx.as_usize()].as_ptr() })
149 } else {
150 None
151 }
152 }
153
154 pub fn peek_lru(&self) -> Option<(&K, &V)> {
156 if self.tail != I::NONE {
157 let idx = self.tail.as_usize();
158 unsafe { Some((&*self.keys[idx].as_ptr(), &*self.values[idx].as_ptr())) }
159 } else {
160 None
161 }
162 }
163
164 pub fn iter(&self) -> Iter<'_, K, V, N, I> {
166 Iter {
167 cache: self,
168 curr: self.head,
169 remaining: self.num_entries.as_usize(),
170 }
171 }
172
173 pub fn iter_mut(&mut self) -> IterMut<'_, K, V, N, I> {
175 IterMut {
176 curr: self.head,
177 remaining: self.num_entries.as_usize(),
178 keys: &self.keys as *const [MaybeUninit<K>; N],
179 values: &mut self.values as *mut [MaybeUninit<V>; N],
180 nexts: &self.nexts as *const [I; N],
181 _marker: core::marker::PhantomData,
182 }
183 }
184
185 pub fn put(&mut self, key: K, value: V, cap: usize) -> (Option<V>, Result<(), (K, V)>) {
187 if let Some(&idx) = self.map.get(key.borrow()) {
188 let old = unsafe { ptr::replace(self.values[idx.as_usize()].as_mut_ptr(), value) };
189 self.promote_idx(idx);
190 return (Some(old), Ok(()));
191 }
192
193 if self.len() >= cap {
194 self.pop_lru_internal();
195 }
196
197 if self.len() >= N {
198 return (None, Err((key, value)));
199 }
200
201 let idx = self.free_head;
202 self.free_head = self.nexts[idx.as_usize()];
203 self.map.insert(key.clone(), idx).ok();
204 unsafe {
205 ptr::write(self.keys[idx.as_usize()].as_mut_ptr(), key);
206 ptr::write(self.values[idx.as_usize()].as_mut_ptr(), value);
207 }
208 self.attach_front(idx);
209 self.num_entries = self.num_entries.inc();
210 (None, Ok(()))
211 }
212
213 fn promote_idx(&mut self, idx: I) {
214 if idx != self.head {
215 self.detach(idx);
216 self.attach_front(idx);
217 }
218 }
219
220 fn detach(&mut self, idx: I) {
221 let (p, n) = (self.prevs[idx.as_usize()], self.nexts[idx.as_usize()]);
222 if p != I::NONE {
223 self.nexts[p.as_usize()] = n;
224 } else {
225 self.head = n;
226 }
227 if n != I::NONE {
228 self.prevs[n.as_usize()] = p;
229 } else {
230 self.tail = p;
231 }
232 }
233
234 fn attach_front(&mut self, idx: I) {
235 self.prevs[idx.as_usize()] = I::NONE;
236 self.nexts[idx.as_usize()] = self.head;
237 if self.head != I::NONE {
238 self.prevs[self.head.as_usize()] = idx;
239 } else {
240 self.tail = idx;
241 }
242 self.head = idx;
243 }
244
245 fn attach_back(&mut self, idx: I) {
246 self.nexts[idx.as_usize()] = I::NONE;
247 self.prevs[idx.as_usize()] = self.tail;
248 if self.tail != I::NONE {
249 self.nexts[self.tail.as_usize()] = idx;
250 } else {
251 self.head = idx;
252 }
253 self.tail = idx;
254 }
255
256 fn pop_lru_internal(&mut self) -> Option<(K, V)> {
257 if self.tail != I::NONE {
258 let idx = self.tail;
259 let key = unsafe { ptr::read(self.keys[idx.as_usize()].as_ptr()) };
260 let val = unsafe { ptr::read(self.values[idx.as_usize()].as_ptr()) };
261 self.map.remove(key.borrow());
262 self.detach(idx);
263 self.nexts[idx.as_usize()] = self.free_head;
264 self.free_head = idx;
265 self.num_entries = self.num_entries.dec();
266 Some((key, val))
267 } else {
268 None
269 }
270 }
271
272 fn pop_mru_internal(&mut self) -> Option<(K, V)> {
273 if self.head != I::NONE {
274 let idx = self.head;
275 let key = unsafe { ptr::read(self.keys[idx.as_usize()].as_ptr()) };
276 let val = unsafe { ptr::read(self.values[idx.as_usize()].as_ptr()) };
277 self.map.remove(key.borrow());
278 self.detach(idx);
279 self.nexts[idx.as_usize()] = self.free_head;
280 self.free_head = idx;
281 self.num_entries = self.num_entries.dec();
282 Some((key, val))
283 } else {
284 None
285 }
286 }
287}
288
289impl<K: Hash + Eq + Clone, V, const N: usize, I: IndexType> Default
290 for HeaplessLruCache<K, V, N, I>
291{
292 fn default() -> Self {
293 Self::new()
294 }
295}
296
297impl<K, V, const N: usize, I: IndexType> AnyLruCache<K, V> for HeaplessLruCache<K, V, N, I>
298where
299 K: Hash + Eq + Ord + Clone,
300{
301 fn len(&self) -> usize {
302 self.num_entries.as_usize()
303 }
304 fn cap(&self) -> NonZeroUsize {
305 NonZeroUsize::new(N.max(1)).unwrap()
306 }
307 fn put(&mut self, key: K, value: V) -> Option<V> {
308 self.put(key, value, N).0
309 }
310 fn put_with_cap(
311 &mut self,
312 key: K,
313 value: V,
314 cap: NonZeroUsize,
315 ) -> (Option<V>, Result<(), (K, V)>) {
316 self.put(key, value, cap.get())
317 }
318 fn get<Q>(&mut self, key: &Q) -> Option<&V>
319 where
320 K: Borrow<Q>,
321 Q: Hash + Eq + Ord + ?Sized,
322 {
323 self.get(key)
324 }
325 fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
326 where
327 K: Borrow<Q>,
328 Q: Hash + Eq + Ord + ?Sized,
329 {
330 self.get_mut(key)
331 }
332 fn peek<Q>(&self, key: &Q) -> Option<&V>
333 where
334 K: Borrow<Q>,
335 Q: Hash + Eq + Ord + ?Sized,
336 {
337 self.peek(key)
338 }
339 fn clear(&mut self) {
340 let mut curr = self.head;
341 while curr != I::NONE {
342 let idx = curr.as_usize();
343 let next = self.nexts[idx];
344 unsafe {
345 ptr::drop_in_place(self.keys[idx].as_mut_ptr());
346 ptr::drop_in_place(self.values[idx].as_mut_ptr());
347 }
348 curr = next;
349 }
350 self.map.clear();
351 self.head = I::NONE;
352 self.tail = I::NONE;
353 self.num_entries = I::ZERO;
354 self.free_head = if N > 0 { I::ZERO } else { I::NONE };
355 if N > 0 {
356 let mut idx = I::ZERO;
357 for slot in self.nexts.iter_mut() {
358 idx = idx.inc();
359 *slot = idx;
360 }
361 self.nexts[N - 1] = I::NONE;
362 }
363 }
364 fn pop_lru(&mut self) -> Option<(K, V)> {
365 self.pop_lru_internal()
366 }
367 fn contains<Q>(&self, key: &Q) -> bool
368 where
369 K: Borrow<Q>,
370 Q: Hash + Eq + Ord + ?Sized,
371 {
372 self.map.contains_key(key)
373 }
374 fn push(&mut self, key: K, value: V) -> Option<(K, V)> {
375 match self.put(key, value, N) {
376 (Some(_v), Ok(())) => None, (None, Ok(())) => None,
378 (None, Err(item)) => Some(item),
379 _ => None,
380 }
381 }
382 fn pop<Q>(&mut self, key: &Q) -> Option<V>
383 where
384 K: Borrow<Q>,
385 Q: Hash + Eq + Ord + ?Sized,
386 {
387 if let Some(idx) = self.map.remove(key) {
388 unsafe {
389 ptr::drop_in_place(self.keys[idx.as_usize()].as_mut_ptr());
390 let val = ptr::read(self.values[idx.as_usize()].as_ptr());
391 self.detach(idx);
392 self.nexts[idx.as_usize()] = self.free_head;
393 self.free_head = idx;
394 self.num_entries = self.num_entries.dec();
395 Some(val)
396 }
397 } else {
398 None
399 }
400 }
401 fn pop_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
402 where
403 K: Borrow<Q>,
404 Q: Hash + Eq + Ord + ?Sized,
405 {
406 if let Some(idx) = self.map.remove(key) {
407 unsafe {
408 let key = ptr::read(self.keys[idx.as_usize()].as_ptr());
409 let val = ptr::read(self.values[idx.as_usize()].as_ptr());
410 self.detach(idx);
411 self.nexts[idx.as_usize()] = self.free_head;
412 self.free_head = idx;
413 self.num_entries = self.num_entries.dec();
414 Some((key, val))
415 }
416 } else {
417 None
418 }
419 }
420 fn promote<Q>(&mut self, key: &Q)
421 where
422 K: Borrow<Q>,
423 Q: Hash + Eq + Ord + ?Sized,
424 {
425 if let Some(&idx) = self.map.get(key) {
426 self.promote_idx(idx);
427 }
428 }
429 fn demote<Q>(&mut self, key: &Q)
430 where
431 K: Borrow<Q>,
432 Q: Hash + Eq + Ord + ?Sized,
433 {
434 if let Some(&idx) = self.map.get(key) {
435 if idx != self.tail {
436 self.detach(idx);
437 self.attach_back(idx);
438 }
439 }
440 }
441 fn peek_lru(&self) -> Option<(&K, &V)> {
442 self.peek_lru()
443 }
444}
445
446impl<'a, K, V, const N: usize, I: IndexType> crate::cache::lru_cache::LruIteratorSupport<'a, K, V>
447 for HeaplessLruCache<K, V, N, I>
448where
449 K: Hash + Eq + Ord + Clone + 'a,
450 V: 'a,
451{
452 type Iter = Iter<'a, K, V, N, I>;
453 type IterMut = IterMut<'a, K, V, N, I>;
454 fn iter(&'a self) -> Self::Iter {
455 self.iter()
456 }
457 fn iter_mut(&'a mut self) -> Self::IterMut {
458 self.iter_mut()
459 }
460}
461
462#[derive(Debug)]
464pub struct Iter<'a, K: 'a, V: 'a, const N: usize, I: IndexType> {
465 cache: &'a HeaplessLruCache<K, V, N, I>,
466 curr: I,
467 remaining: usize,
468}
469
470impl<'a, K, V, const N: usize, I: IndexType> Iterator for Iter<'a, K, V, N, I> {
471 type Item = (&'a K, &'a V);
472 fn next(&mut self) -> Option<Self::Item> {
473 if self.curr == I::NONE {
474 return None;
475 }
476 let idx = self.curr.as_usize();
477 let (key, val) = unsafe {
478 (
479 &*self.cache.keys[idx].as_ptr(),
480 &*self.cache.values[idx].as_ptr(),
481 )
482 };
483 self.curr = self.cache.nexts[idx];
484 self.remaining -= 1;
485 Some((key, val))
486 }
487 fn size_hint(&self) -> (usize, Option<usize>) {
488 (self.remaining, Some(self.remaining))
489 }
490}
491
492impl<'a, K, V, const N: usize, I: IndexType> ExactSizeIterator for Iter<'a, K, V, N, I> {}
493
494#[derive(Debug)]
496pub struct IterMut<'a, K: 'a, V: 'a, const N: usize, I: IndexType> {
497 curr: I,
498 remaining: usize,
499 keys: *const [MaybeUninit<K>; N],
500 values: *mut [MaybeUninit<V>; N],
501 nexts: *const [I; N],
502 _marker: core::marker::PhantomData<&'a mut V>,
503}
504
505impl<'a, K, V, const N: usize, I: IndexType> Iterator for IterMut<'a, K, V, N, I> {
506 type Item = (&'a K, &'a mut V);
507 fn next(&mut self) -> Option<Self::Item> {
508 if self.curr == I::NONE {
509 return None;
510 }
511 let idx = self.curr.as_usize();
512 unsafe {
513 let key = &*(*self.keys)[idx].as_ptr();
514 let val = &mut *(*self.values)[idx].as_mut_ptr();
515 self.curr = (*self.nexts)[idx];
516 self.remaining -= 1;
517 Some((key, val))
518 }
519 }
520 fn size_hint(&self) -> (usize, Option<usize>) {
521 (self.remaining, Some(self.remaining))
522 }
523}
524
525impl<'a, K, V, const N: usize, I: IndexType> ExactSizeIterator for IterMut<'a, K, V, N, I> {}
526
527pub struct IntoIter<K, V, const N: usize, I: IndexType> {
529 cache: HeaplessLruCache<K, V, N, I>,
530}
531
532impl<K, V, const N: usize, I: IndexType> Iterator for IntoIter<K, V, N, I>
533where
534 K: Hash + Eq + Ord + Clone,
535{
536 type Item = (K, V);
537 fn next(&mut self) -> Option<Self::Item> {
538 self.cache.pop_mru_internal()
539 }
540 fn size_hint(&self) -> (usize, Option<usize>) {
541 let len = self.cache.len();
542 (len, Some(len))
543 }
544}
545
546impl<K, V, const N: usize, I: IndexType> ExactSizeIterator for IntoIter<K, V, N, I> where
547 K: Hash + Eq + Ord + Clone
548{
549}
550
551impl<K, V, const N: usize, I: IndexType> IntoIterator for HeaplessLruCache<K, V, N, I>
552where
553 K: Hash + Eq + Ord + Clone,
554{
555 type Item = (K, V);
556 type IntoIter = IntoIter<K, V, N, I>;
557 fn into_iter(self) -> Self::IntoIter {
558 IntoIter { cache: self }
559 }
560}
561
562impl<'a, K, V, const N: usize, I: IndexType> IntoIterator for &'a HeaplessLruCache<K, V, N, I>
563where
564 K: Hash + Eq + Clone,
565{
566 type Item = (&'a K, &'a V);
567 type IntoIter = Iter<'a, K, V, N, I>;
568 fn into_iter(self) -> Self::IntoIter {
569 self.iter()
570 }
571}
572
573impl<'a, K, V, const N: usize, I: IndexType> IntoIterator for &'a mut HeaplessLruCache<K, V, N, I>
574where
575 K: Hash + Eq + Clone,
576{
577 type Item = (&'a K, &'a mut V);
578 type IntoIter = IterMut<'a, K, V, N, I>;
579 fn into_iter(self) -> Self::IntoIter {
580 self.iter_mut()
581 }
582}
583
584impl<K, V, const N: usize, I> FromIterator<(K, V)> for HeaplessLruCache<K, V, N, I>
585where
586 K: Hash + Eq + Ord + Clone,
587 I: IndexType,
588{
589 fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
590 let mut cache = Self::default();
591 for (k, v) in iter {
592 let _ = cache.put(k, v, N);
593 }
594 cache
595 }
596}
597
598impl<K, V, const N: usize, I> Extend<(K, V)> for HeaplessLruCache<K, V, N, I>
599where
600 K: Hash + Eq + Ord + Clone,
601 I: IndexType,
602{
603 fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
604 for (k, v) in iter {
605 let _ = self.put(k, v, N);
606 }
607 }
608}
609
610impl<K, V, const N: usize, I: IndexType> Clone for HeaplessLruCache<K, V, N, I>
611where
612 K: Hash + Eq + Clone,
613 V: Clone,
614{
615 fn clone(&self) -> Self {
616 let mut new_keys: [MaybeUninit<K>; N] = unsafe { MaybeUninit::uninit().assume_init() };
617 let mut new_vals: [MaybeUninit<V>; N] = unsafe { MaybeUninit::uninit().assume_init() };
618 let mut curr = self.head;
619 while curr != I::NONE {
620 let idx = curr.as_usize();
621 let k = unsafe { &*self.keys[idx].as_ptr() };
622 let v = unsafe { &*self.values[idx].as_ptr() };
623 unsafe {
624 ptr::write(new_keys[idx].as_mut_ptr(), k.clone());
625 ptr::write(new_vals[idx].as_mut_ptr(), v.clone());
626 }
627 curr = self.nexts[idx];
628 }
629 Self {
630 map: self.map.clone(),
631 keys: new_keys,
632 values: new_vals,
633 prevs: self.prevs,
634 nexts: self.nexts,
635 free_head: self.free_head,
636 head: self.head,
637 tail: self.tail,
638 num_entries: self.num_entries,
639 }
640 }
641}
642
643impl<K, V, const N: usize, I: IndexType> Drop for HeaplessLruCache<K, V, N, I> {
644 fn drop(&mut self) {
645 let mut curr = self.head;
646 while curr != I::NONE {
647 let idx = curr.as_usize();
648 let next = self.nexts[idx];
649 unsafe {
650 ptr::drop_in_place(self.keys[idx].as_mut_ptr());
651 ptr::drop_in_place(self.values[idx].as_mut_ptr());
652 }
653 curr = next;
654 }
655 }
656}
657
658impl<K, V, const N: usize, I: IndexType> std::fmt::Debug for HeaplessLruCache<K, V, N, I> {
659 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
660 f.debug_struct("HeaplessLruCache")
661 .field("num_entries", &self.num_entries)
662 .field("head", &self.head)
663 .field("tail", &self.tail)
664 .finish()
665 }
666}
667
668#[cfg(test)]
669mod tests {
670 use super::*;
671
672 #[test]
673 fn test_capacity_eviction() {
674 let mut cache: HeaplessLruCache<i32, i32, 4> = HeaplessLruCache::new();
675 let _ = cache.put(1, 10, 4);
676 let _ = cache.put(2, 20, 4);
677 let _ = cache.put(3, 30, 4);
678 let _ = cache.put(4, 40, 4);
679 assert_eq!(cache.len(), 4);
680
681 let _ = cache.put(5, 50, 4);
683 assert_eq!(cache.len(), 4);
684 assert!(!cache.map.contains_key(&1));
685 assert_eq!(cache.get(&2), Some(&20));
686
687 let _ = cache.put(6, 60, 4);
689 assert!(!cache.map.contains_key(&3));
690 }
691
692 #[test]
693 fn test_promotion_demote() {
694 let mut cache: HeaplessLruCache<i32, i32, 4> = HeaplessLruCache::new();
695 let _ = cache.put(1, 10, 4);
696 let _ = cache.put(2, 20, 4);
697 let _ = cache.put(3, 30, 4);
698
699 cache.demote(&3);
701 assert_eq!(cache.peek_lru(), Some((&3, &30)));
703
704 cache.promote(&1);
705 let keys: Vec<_> = cache.iter().map(|(k, _)| *k).collect();
707 assert_eq!(keys, vec![1, 2, 3]);
708 }
709
710 #[test]
711 fn test_clone_independence() {
712 let mut cache: HeaplessLruCache<i32, i32, 4> = HeaplessLruCache::new();
713 let _ = cache.put(1, 10, 4);
714 let _ = cache.put(2, 20, 4);
715
716 let mut cloned = cache.clone();
717 let _ = cloned.put(3, 30, 4);
718
719 assert_eq!(cloned.len(), 3);
720 assert_eq!(cache.len(), 2);
721 }
722
723 #[test]
724 fn test_clear() {
725 let mut cache: HeaplessLruCache<i32, i32, 4> = HeaplessLruCache::new();
726 let _ = cache.put(1, 10, 4);
727 let _ = cache.put(2, 20, 4);
728 cache.clear();
729 assert!(cache.is_empty());
730 assert_eq!(cache.free_head.as_usize(), 0);
731
732 let _ = cache.put(1, 10, 4);
734 assert_eq!(cache.len(), 1);
735 }
736
737 #[test]
738 fn test_from_iter_and_extend() {
739 let mut cache = HeaplessLruCache::<i32, i32, 4>::from_iter(vec![(1, 10), (2, 20)]);
740 cache.extend(vec![(3, 30), (4, 40)]);
741 assert_eq!(cache.len(), 4);
742 assert_eq!(cache.get(&1), Some(&10));
743 }
744}