1#![cfg(feature = "lru")]
2use core::mem::MaybeUninit;
5use core::num::NonZeroUsize;
6use core::ptr;
7use heapless::Vec as HeaplessVec;
8use std::borrow::Borrow;
9use std::hash::Hash;
10
11use crate::AnyLruCache;
12use crate::IndexType;
13
14pub struct HeaplessBTreeLruCache<K, V, const N: usize, I: IndexType = u8> {
48 pub keys: [MaybeUninit<K>; N],
50 pub values: [MaybeUninit<V>; N],
52 pub prevs: [I; N],
54 pub nexts: [I; N],
56 pub sorted_indices: HeaplessVec<I, N>,
58 pub head: I,
60 pub tail: I,
62 pub num_entries: I,
64 pub free_head: I,
66}
67
68impl<K, V, const N: usize, I: IndexType> HeaplessBTreeLruCache<K, V, N, I>
69where
70 K: Hash + Eq + Ord + Clone,
71{
72 pub fn new() -> Self {
74 const {
75 assert!(
76 std::mem::size_of::<Self>() <= 16 * 1024,
77 "HeaplessBTreeLruCache is too large! The struct size exceeds the 16KB limit. Reduce N."
78 );
79 }
80 let prevs = [I::NONE; N];
81 let mut nexts = [I::NONE; N];
82 let mut idx = I::ZERO;
83 for slot in nexts.iter_mut() {
84 idx = idx.inc();
85 *slot = idx;
86 }
87 if N > 0 {
88 nexts[N - 1] = I::NONE;
89 }
90
91 Self {
92 keys: unsafe { MaybeUninit::uninit().assume_init() },
93 values: unsafe { MaybeUninit::uninit().assume_init() },
94 prevs,
95 nexts,
96 sorted_indices: HeaplessVec::new(),
97 head: I::NONE,
98 tail: I::NONE,
99 num_entries: I::ZERO,
100 free_head: if N > 0 { I::ZERO } else { I::NONE },
101 }
102 }
103
104 #[inline(always)]
106 pub fn len(&self) -> usize {
107 self.num_entries.as_usize()
108 }
109
110 #[inline(always)]
112 pub fn is_empty(&self) -> bool {
113 self.num_entries.is_zero()
114 }
115
116 pub fn get<Q>(&mut self, key: &Q) -> Option<&V>
118 where
119 K: Borrow<Q>,
120 Q: Hash + Eq + Ord + ?Sized,
121 {
122 if let Ok(pos) = self.binary_search(key) {
123 let idx = self.sorted_indices[pos];
124 self.promote_idx(idx);
125 Some(unsafe { &*self.values[idx.as_usize()].as_ptr() })
126 } else {
127 None
128 }
129 }
130
131 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
133 where
134 K: Borrow<Q>,
135 Q: Hash + Eq + Ord + ?Sized,
136 {
137 if let Ok(pos) = self.binary_search(key) {
138 let idx = self.sorted_indices[pos];
139 self.promote_idx(idx);
140 Some(unsafe { &mut *self.values[idx.as_usize()].as_mut_ptr() })
141 } else {
142 None
143 }
144 }
145
146 pub fn peek<Q>(&self, key: &Q) -> Option<&V>
148 where
149 K: Borrow<Q>,
150 Q: Hash + Eq + Ord + ?Sized,
151 {
152 if let Ok(pos) = self.binary_search(key) {
153 let idx = self.sorted_indices[pos];
154 Some(unsafe { &*self.values[idx.as_usize()].as_ptr() })
155 } else {
156 None
157 }
158 }
159
160 pub fn peek_lru(&self) -> Option<(&K, &V)> {
162 if self.tail != I::NONE {
163 let idx = self.tail.as_usize();
164 unsafe { Some((&*self.keys[idx].as_ptr(), &*self.values[idx].as_ptr())) }
165 } else {
166 None
167 }
168 }
169
170 pub fn iter(&self) -> Iter<'_, K, V, N, I> {
172 Iter {
173 cache: self,
174 curr: self.head,
175 remaining: self.num_entries.as_usize(),
176 }
177 }
178
179 pub fn iter_mut(&mut self) -> IterMut<'_, K, V, N, I> {
181 IterMut {
182 curr: self.head,
183 remaining: self.num_entries.as_usize(),
184 keys: &self.keys as *const [MaybeUninit<K>; N],
185 values: &mut self.values as *mut [MaybeUninit<V>; N],
186 nexts: &self.nexts as *const [I; N],
187 _marker: core::marker::PhantomData,
188 }
189 }
190
191 pub fn put(&mut self, key: K, value: V, cap: usize) -> (Option<V>, Result<(), (K, V)>) {
193 match self.binary_search(key.borrow()) {
194 Ok(pos) => {
195 let idx = self.sorted_indices[pos];
196 let old = unsafe { ptr::replace(self.values[idx.as_usize()].as_mut_ptr(), value) };
197 self.promote_idx(idx);
198 (Some(old), Ok(()))
199 }
200 Err(_insert_pos) => {
201 if self.len() >= cap {
202 self.pop_lru_internal();
203 }
204
205 if self.len() >= N {
206 return (None, Err((key, value)));
207 }
208
209 let idx = self.free_head;
210 self.free_head = self.nexts[idx.as_usize()];
211
212 let final_pos = match self.binary_search(key.borrow()) {
214 Ok(_) => unreachable!(),
215 Err(p) => p,
216 };
217
218 self.sorted_indices.insert(final_pos, idx).ok();
219 unsafe {
220 ptr::write(self.keys[idx.as_usize()].as_mut_ptr(), key);
221 ptr::write(self.values[idx.as_usize()].as_mut_ptr(), value);
222 }
223 self.attach_front(idx);
224 self.num_entries = self.num_entries.inc();
225 (None, Ok(()))
226 }
227 }
228 }
229
230 fn binary_search<Q>(&self, key: &Q) -> Result<usize, usize>
231 where
232 K: Borrow<Q>,
233 Q: Hash + Eq + Ord + ?Sized,
234 {
235 self.sorted_indices.as_slice().binary_search_by(|&idx| {
236 let k = unsafe { &*self.keys[idx.as_usize()].as_ptr() };
237 k.borrow().cmp(key)
238 })
239 }
240
241 fn promote_idx(&mut self, idx: I) {
242 if idx != self.head {
243 self.detach(idx);
244 self.attach_front(idx);
245 }
246 }
247
248 fn detach(&mut self, idx: I) {
249 let (p, n) = (self.prevs[idx.as_usize()], self.nexts[idx.as_usize()]);
250 if p != I::NONE {
251 self.nexts[p.as_usize()] = n;
252 } else {
253 self.head = n;
254 }
255 if n != I::NONE {
256 self.prevs[n.as_usize()] = p;
257 } else {
258 self.tail = p;
259 }
260 }
261
262 fn attach_front(&mut self, idx: I) {
263 self.prevs[idx.as_usize()] = I::NONE;
264 self.nexts[idx.as_usize()] = self.head;
265 if self.head != I::NONE {
266 self.prevs[self.head.as_usize()] = idx;
267 } else {
268 self.tail = idx;
269 }
270 self.head = idx;
271 }
272
273 fn attach_back(&mut self, idx: I) {
274 self.nexts[idx.as_usize()] = I::NONE;
275 self.prevs[idx.as_usize()] = self.tail;
276 if self.tail != I::NONE {
277 self.nexts[self.tail.as_usize()] = idx;
278 } else {
279 self.head = idx;
280 }
281 self.tail = idx;
282 }
283
284 fn pop_lru_internal(&mut self) -> Option<(K, V)> {
285 if self.tail != I::NONE {
286 let idx = self.tail;
287 let key = unsafe { ptr::read(self.keys[idx.as_usize()].as_ptr()) };
288 let val = unsafe { ptr::read(self.values[idx.as_usize()].as_ptr()) };
289 self.detach(idx);
290 if let Ok(pos) = self.binary_search(key.borrow()) {
291 self.sorted_indices.remove(pos);
292 }
293 self.nexts[idx.as_usize()] = self.free_head;
294 self.free_head = idx;
295 self.num_entries = self.num_entries.dec();
296 Some((key, val))
297 } else {
298 None
299 }
300 }
301
302 fn pop_mru_internal(&mut self) -> Option<(K, V)> {
303 if self.head != I::NONE {
304 let idx = self.head;
305 let key = unsafe { ptr::read(self.keys[idx.as_usize()].as_ptr()) };
306 let val = unsafe { ptr::read(self.values[idx.as_usize()].as_ptr()) };
307 self.detach(idx);
308 if let Ok(pos) = self.binary_search(key.borrow()) {
309 self.sorted_indices.remove(pos);
310 }
311 self.nexts[idx.as_usize()] = self.free_head;
312 self.free_head = idx;
313 self.num_entries = self.num_entries.dec();
314 Some((key, val))
315 } else {
316 None
317 }
318 }
319}
320
321impl<K: Hash + Eq + Ord + Clone, V, const N: usize, I: IndexType> Default
322 for HeaplessBTreeLruCache<K, V, N, I>
323{
324 fn default() -> Self {
325 Self::new()
326 }
327}
328
329impl<K, V, const N: usize, I: IndexType> AnyLruCache<K, V> for HeaplessBTreeLruCache<K, V, N, I>
330where
331 K: Hash + Eq + Ord + Clone,
332{
333 fn len(&self) -> usize {
334 self.num_entries.as_usize()
335 }
336 fn cap(&self) -> NonZeroUsize {
337 NonZeroUsize::new(N.max(1)).unwrap()
338 }
339 fn put(&mut self, key: K, value: V) -> Option<V> {
340 self.put(key, value, N).0
341 }
342 fn put_with_cap(
343 &mut self,
344 key: K,
345 value: V,
346 cap: NonZeroUsize,
347 ) -> (Option<V>, Result<(), (K, V)>) {
348 self.put(key, value, cap.get())
349 }
350 fn get<Q>(&mut self, key: &Q) -> Option<&V>
351 where
352 K: Borrow<Q>,
353 Q: Hash + Eq + Ord + ?Sized,
354 {
355 self.get(key)
356 }
357 fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
358 where
359 K: Borrow<Q>,
360 Q: Hash + Eq + Ord + ?Sized,
361 {
362 self.get_mut(key)
363 }
364 fn peek<Q>(&self, key: &Q) -> Option<&V>
365 where
366 K: Borrow<Q>,
367 Q: Hash + Eq + Ord + ?Sized,
368 {
369 self.peek(key)
370 }
371 fn clear(&mut self) {
372 let mut curr = self.head;
373 while curr != I::NONE {
374 let idx = curr.as_usize();
375 let next = self.nexts[idx];
376 unsafe {
377 ptr::drop_in_place(self.keys[idx].as_mut_ptr());
378 ptr::drop_in_place(self.values[idx].as_mut_ptr());
379 }
380 curr = next;
381 }
382 self.sorted_indices.clear();
383 self.head = I::NONE;
384 self.tail = I::NONE;
385 self.num_entries = I::ZERO;
386 self.free_head = if N > 0 { I::ZERO } else { I::NONE };
387 if N > 0 {
388 let mut idx = I::ZERO;
389 for slot in self.nexts.iter_mut() {
390 idx = idx.inc();
391 *slot = idx;
392 }
393 self.nexts[N - 1] = I::NONE;
394 }
395 }
396 fn pop_lru(&mut self) -> Option<(K, V)> {
397 self.pop_lru_internal()
398 }
399 fn contains<Q>(&self, key: &Q) -> bool
400 where
401 K: Borrow<Q>,
402 Q: Hash + Eq + Ord + ?Sized,
403 {
404 self.binary_search(key).is_ok()
405 }
406 fn push(&mut self, key: K, value: V) -> Option<(K, V)> {
407 match self.put(key, value, N) {
408 (None, Err(item)) => Some(item),
409 _ => None,
410 }
411 }
412 fn pop<Q>(&mut self, key: &Q) -> Option<V>
413 where
414 K: Borrow<Q>,
415 Q: Hash + Eq + Ord + ?Sized,
416 {
417 if let Ok(pos) = self.binary_search(key) {
418 let idx = self.sorted_indices.remove(pos);
419 unsafe {
420 ptr::drop_in_place(self.keys[idx.as_usize()].as_mut_ptr());
421 let val = ptr::read(self.values[idx.as_usize()].as_ptr());
422 self.detach(idx);
423 self.nexts[idx.as_usize()] = self.free_head;
424 self.free_head = idx;
425 self.num_entries = self.num_entries.dec();
426 Some(val)
427 }
428 } else {
429 None
430 }
431 }
432 fn pop_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
433 where
434 K: Borrow<Q>,
435 Q: Hash + Eq + Ord + ?Sized,
436 {
437 if let Ok(pos) = self.binary_search(key) {
438 let idx = self.sorted_indices.remove(pos);
439 unsafe {
440 let key = ptr::read(self.keys[idx.as_usize()].as_ptr());
441 let val = ptr::read(self.values[idx.as_usize()].as_ptr());
442 self.detach(idx);
443 self.nexts[idx.as_usize()] = self.free_head;
444 self.free_head = idx;
445 self.num_entries = self.num_entries.dec();
446 Some((key, val))
447 }
448 } else {
449 None
450 }
451 }
452 fn promote<Q>(&mut self, key: &Q)
453 where
454 K: Borrow<Q>,
455 Q: Hash + Eq + Ord + ?Sized,
456 {
457 if let Ok(pos) = self.binary_search(key) {
458 self.promote_idx(self.sorted_indices[pos]);
459 }
460 }
461 fn demote<Q>(&mut self, key: &Q)
462 where
463 K: Borrow<Q>,
464 Q: Hash + Eq + Ord + ?Sized,
465 {
466 if let Ok(pos) = self.binary_search(key) {
467 let idx = self.sorted_indices[pos];
468 if idx != self.tail {
469 self.detach(idx);
470 self.attach_back(idx);
471 }
472 }
473 }
474 fn peek_lru(&self) -> Option<(&K, &V)> {
475 self.peek_lru()
476 }
477}
478
479impl<'a, K, V, const N: usize, I: IndexType> crate::cache::lru_cache::LruIteratorSupport<'a, K, V>
480 for HeaplessBTreeLruCache<K, V, N, I>
481where
482 K: Hash + Eq + Ord + Clone + 'a,
483 V: 'a,
484{
485 type Iter = Iter<'a, K, V, N, I>;
486 type IterMut = IterMut<'a, K, V, N, I>;
487 fn iter(&'a self) -> Self::Iter {
488 self.iter()
489 }
490 fn iter_mut(&'a mut self) -> Self::IterMut {
491 self.iter_mut()
492 }
493}
494
495#[derive(Debug)]
497pub struct Iter<'a, K: 'a, V: 'a, const N: usize, I: IndexType> {
498 cache: &'a HeaplessBTreeLruCache<K, V, N, I>,
499 curr: I,
500 remaining: usize,
501}
502
503impl<'a, K, V, const N: usize, I: IndexType> Iterator for Iter<'a, K, V, N, I> {
504 type Item = (&'a K, &'a V);
505 fn next(&mut self) -> Option<Self::Item> {
506 if self.curr == I::NONE {
507 return None;
508 }
509 let idx = self.curr.as_usize();
510 let (key, val) = unsafe {
511 (
512 &*self.cache.keys[idx].as_ptr(),
513 &*self.cache.values[idx].as_ptr(),
514 )
515 };
516 self.curr = self.cache.nexts[idx];
517 self.remaining -= 1;
518 Some((key, val))
519 }
520 fn size_hint(&self) -> (usize, Option<usize>) {
521 (self.remaining, Some(self.remaining))
522 }
523}
524
525impl<'a, K, V, const N: usize, I: IndexType> ExactSizeIterator for Iter<'a, K, V, N, I> {}
526
527#[derive(Debug)]
529pub struct IterMut<'a, K: 'a, V: 'a, const N: usize, I: IndexType> {
530 curr: I,
531 remaining: usize,
532 keys: *const [MaybeUninit<K>; N],
533 values: *mut [MaybeUninit<V>; N],
534 nexts: *const [I; N],
535 _marker: core::marker::PhantomData<&'a mut V>,
536}
537
538impl<'a, K, V, const N: usize, I: IndexType> Iterator for IterMut<'a, K, V, N, I> {
539 type Item = (&'a K, &'a mut V);
540 fn next(&mut self) -> Option<Self::Item> {
541 if self.curr == I::NONE {
542 return None;
543 }
544 let idx = self.curr.as_usize();
545 unsafe {
546 let key = &*(*self.keys)[idx].as_ptr();
547 let val = &mut *(*self.values)[idx].as_mut_ptr();
548 self.curr = (*self.nexts)[idx];
549 self.remaining -= 1;
550 Some((key, val))
551 }
552 }
553 fn size_hint(&self) -> (usize, Option<usize>) {
554 (self.remaining, Some(self.remaining))
555 }
556}
557
558impl<'a, K, V, const N: usize, I: IndexType> ExactSizeIterator for IterMut<'a, K, V, N, I> {}
559
560pub struct IntoIter<K, V, const N: usize, I: IndexType> {
562 cache: HeaplessBTreeLruCache<K, V, N, I>,
563}
564
565impl<K, V, const N: usize, I: IndexType> Iterator for IntoIter<K, V, N, I>
566where
567 K: Hash + Eq + Ord + Clone,
568{
569 type Item = (K, V);
570 fn next(&mut self) -> Option<Self::Item> {
571 self.cache.pop_mru_internal()
572 }
573 fn size_hint(&self) -> (usize, Option<usize>) {
574 let len = self.cache.len();
575 (len, Some(len))
576 }
577}
578
579impl<K, V, const N: usize, I: IndexType> ExactSizeIterator for IntoIter<K, V, N, I> where
580 K: Hash + Eq + Ord + Clone
581{
582}
583
584impl<K, V, const N: usize, I: IndexType> IntoIterator for HeaplessBTreeLruCache<K, V, N, I>
585where
586 K: Hash + Eq + Ord + Clone,
587{
588 type Item = (K, V);
589 type IntoIter = IntoIter<K, V, N, I>;
590 fn into_iter(self) -> Self::IntoIter {
591 IntoIter { cache: self }
592 }
593}
594
595impl<'a, K, V, const N: usize, I: IndexType> IntoIterator for &'a HeaplessBTreeLruCache<K, V, N, I>
596where
597 K: Hash + Eq + Ord + Clone,
598{
599 type Item = (&'a K, &'a V);
600 type IntoIter = Iter<'a, K, V, N, I>;
601 fn into_iter(self) -> Self::IntoIter {
602 self.iter()
603 }
604}
605
606impl<'a, K, V, const N: usize, I: IndexType> IntoIterator
607 for &'a mut HeaplessBTreeLruCache<K, V, N, I>
608where
609 K: Hash + Eq + Ord + Clone,
610{
611 type Item = (&'a K, &'a mut V);
612 type IntoIter = IterMut<'a, K, V, N, I>;
613 fn into_iter(self) -> Self::IntoIter {
614 self.iter_mut()
615 }
616}
617
618impl<K, V, const N: usize, I> FromIterator<(K, V)> for HeaplessBTreeLruCache<K, V, N, I>
619where
620 K: Hash + Eq + Ord + Clone,
621 I: IndexType,
622{
623 fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
624 let mut cache = Self::default();
625 for (k, v) in iter {
626 let _ = cache.put(k, v, N);
627 }
628 cache
629 }
630}
631
632impl<K, V, const N: usize, I> Extend<(K, V)> for HeaplessBTreeLruCache<K, V, N, I>
633where
634 K: Hash + Eq + Ord + Clone,
635 I: IndexType,
636{
637 fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
638 for (k, v) in iter {
639 let _ = self.put(k, v, N);
640 }
641 }
642}
643
644impl<K, V, const N: usize, I: IndexType> Clone for HeaplessBTreeLruCache<K, V, N, I>
645where
646 K: Hash + Eq + Ord + Clone,
647 V: Clone,
648{
649 fn clone(&self) -> Self {
650 let mut new_keys: [MaybeUninit<K>; N] = unsafe { MaybeUninit::uninit().assume_init() };
651 let mut new_vals: [MaybeUninit<V>; N] = unsafe { MaybeUninit::uninit().assume_init() };
652 let mut curr = self.head;
653 while curr != I::NONE {
654 let idx = curr.as_usize();
655 let k = unsafe { &*self.keys[idx].as_ptr() };
656 let v = unsafe { &*self.values[idx].as_ptr() };
657 unsafe {
658 ptr::write(new_keys[idx].as_mut_ptr(), k.clone());
659 ptr::write(new_vals[idx].as_mut_ptr(), v.clone());
660 }
661 curr = self.nexts[idx];
662 }
663 Self {
664 keys: new_keys,
665 values: new_vals,
666 prevs: self.prevs,
667 nexts: self.nexts,
668 sorted_indices: self.sorted_indices.clone(),
669 head: self.head,
670 tail: self.tail,
671 num_entries: self.num_entries,
672 free_head: self.free_head,
673 }
674 }
675}
676
677impl<K, V, const N: usize, I: IndexType> Drop for HeaplessBTreeLruCache<K, V, N, I> {
678 fn drop(&mut self) {
679 let mut curr = self.head;
680 while curr != I::NONE {
681 let idx = curr.as_usize();
682 let next = self.nexts[idx];
683 unsafe {
684 ptr::drop_in_place(self.keys[idx].as_mut_ptr());
685 ptr::drop_in_place(self.values[idx].as_mut_ptr());
686 }
687 curr = next;
688 }
689 }
690}
691
692impl<K, V, const N: usize, I: IndexType> std::fmt::Debug for HeaplessBTreeLruCache<K, V, N, I> {
693 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
694 f.debug_struct("HeaplessBTreeLruCache")
695 .field("num_entries", &self.num_entries)
696 .field("head", &self.head)
697 .field("tail", &self.tail)
698 .finish()
699 }
700}
701
702#[cfg(test)]
703mod tests {
704 use super::*;
705
706 #[test]
707 fn test_sorted_consistency() {
708 let mut cache: HeaplessBTreeLruCache<i32, i32, 4> = HeaplessBTreeLruCache::new();
709 let _ = cache.put(4, 40, 4);
711 let _ = cache.put(2, 20, 4);
712 let _ = cache.put(3, 30, 4);
713 let _ = cache.put(1, 10, 4);
714
715 assert_eq!(cache.len(), 4);
716 let sorted_keys: Vec<_> = cache
718 .sorted_indices
719 .iter()
720 .map(|&idx| unsafe { *cache.keys[idx.as_usize()].as_ptr() })
721 .collect();
722 assert_eq!(sorted_keys, vec![1, 2, 3, 4]);
723 }
724
725 #[test]
726 fn test_eviction_with_sorted_lookup() {
727 let mut cache: HeaplessBTreeLruCache<i32, i32, 2> = HeaplessBTreeLruCache::new();
728 let _ = cache.put(1, 10, 2);
729 let _ = cache.put(2, 20, 2);
730
731 cache.get(&1);
733
734 let _ = cache.put(3, 30, 2); assert!(!cache.contains(&2));
736 assert!(cache.contains(&1));
737 assert!(cache.contains(&3));
738
739 let sorted_keys: Vec<_> = cache
740 .sorted_indices
741 .iter()
742 .map(|&idx| unsafe { *cache.keys[idx.as_usize()].as_ptr() })
743 .collect();
744 assert_eq!(sorted_keys, vec![1, 3]);
745 }
746
747 #[test]
748 fn test_clone() {
749 let mut cache: HeaplessBTreeLruCache<i32, i32, 4> = HeaplessBTreeLruCache::new();
750 let _ = cache.put(1, 10, 4);
751 let cloned = cache.clone();
752 assert_eq!(cloned.len(), 1);
753 assert!(cloned.contains(&1));
754 }
755}