1#![cfg(feature = "lru")]
2use core::mem::MaybeUninit;
5use core::num::NonZeroUsize;
6use core::ptr;
7use std::borrow::Borrow;
8use std::hash::Hash;
9
10use crate::AnyLruCache;
11use crate::IndexType;
12
13pub struct HeaplessLinearLruCache<K, V, const N: usize, I: IndexType = u8> {
43 pub keys: [MaybeUninit<K>; N],
45 pub values: [MaybeUninit<V>; N],
47 pub prevs: [I; N],
49 pub nexts: [I; N],
51 pub head: I,
53 pub tail: I,
55 pub num_entries: I,
57 pub free_head: I,
59}
60
61impl<K, V, const N: usize, I: IndexType> HeaplessLinearLruCache<K, V, N, I>
62where
63 K: Hash + Eq + Clone,
64{
65 pub fn new() -> Self {
67 const {
68 assert!(
69 std::mem::size_of::<Self>() <= 16 * 1024,
70 "HeaplessLinearLruCache is too large! The struct size exceeds the 16KB limit. Reduce N."
71 );
72 }
73 let prevs = [I::NONE; N];
74 let mut nexts = [I::NONE; N];
75 let mut idx = I::ZERO;
76 for slot in nexts.iter_mut() {
77 idx = idx.inc();
78 *slot = idx;
79 }
80 if N > 0 {
81 nexts[N - 1] = I::NONE;
82 }
83
84 Self {
85 keys: unsafe { MaybeUninit::uninit().assume_init() },
86 values: unsafe { MaybeUninit::uninit().assume_init() },
87 prevs,
88 nexts,
89 head: I::NONE,
90 tail: I::NONE,
91 num_entries: I::ZERO,
92 free_head: if N > 0 { I::ZERO } else { I::NONE },
93 }
94 }
95
96 #[inline(always)]
98 pub fn len(&self) -> usize {
99 self.num_entries.as_usize()
100 }
101
102 #[inline(always)]
104 pub fn is_empty(&self) -> bool {
105 self.num_entries.is_zero()
106 }
107
108 pub fn get<Q>(&mut self, key: &Q) -> Option<&V>
110 where
111 K: Borrow<Q>,
112 Q: Hash + Eq + ?Sized,
113 {
114 if let Some(idx) = self.find_index(key) {
115 self.promote_idx(idx);
116 Some(unsafe { &*self.values[idx.as_usize()].as_ptr() })
117 } else {
118 None
119 }
120 }
121
122 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
124 where
125 K: Borrow<Q>,
126 Q: Hash + Eq + ?Sized,
127 {
128 if let Some(idx) = self.find_index(key) {
129 self.promote_idx(idx);
130 Some(unsafe { &mut *self.values[idx.as_usize()].as_mut_ptr() })
131 } else {
132 None
133 }
134 }
135
136 pub fn peek<Q>(&self, key: &Q) -> Option<&V>
138 where
139 K: Borrow<Q>,
140 Q: Hash + Eq + ?Sized,
141 {
142 if let Some(idx) = self.find_index(key) {
143 Some(unsafe { &*self.values[idx.as_usize()].as_ptr() })
144 } else {
145 None
146 }
147 }
148
149 pub fn peek_lru(&self) -> Option<(&K, &V)> {
151 if self.tail != I::NONE {
152 let idx = self.tail.as_usize();
153 unsafe { Some((&*self.keys[idx].as_ptr(), &*self.values[idx].as_ptr())) }
154 } else {
155 None
156 }
157 }
158
159 pub fn iter(&self) -> Iter<'_, K, V, N, I> {
161 Iter {
162 cache: self,
163 curr: self.head,
164 remaining: self.num_entries.as_usize(),
165 }
166 }
167
168 pub fn iter_mut(&mut self) -> IterMut<'_, K, V, N, I> {
170 IterMut {
171 curr: self.head,
172 remaining: self.num_entries.as_usize(),
173 keys: &self.keys as *const [MaybeUninit<K>; N],
174 values: &mut self.values as *mut [MaybeUninit<V>; N],
175 nexts: &self.nexts as *const [I; N],
176 _marker: core::marker::PhantomData,
177 }
178 }
179
180 pub fn put(&mut self, key: K, value: V, cap: usize) -> (Option<V>, Result<(), (K, V)>) {
182 if let Some(idx) = self.find_index(key.borrow()) {
183 let old = unsafe { ptr::replace(self.values[idx.as_usize()].as_mut_ptr(), value) };
184 self.promote_idx(idx);
185 (Some(old), Ok(()))
186 } else {
187 if self.len() >= cap {
188 self.pop_lru_internal();
189 }
190
191 if self.len() >= N {
192 return (None, Err((key, value)));
193 }
194
195 let idx = self.free_head;
196 self.free_head = self.nexts[idx.as_usize()];
197 unsafe {
198 ptr::write(self.keys[idx.as_usize()].as_mut_ptr(), key);
199 ptr::write(self.values[idx.as_usize()].as_mut_ptr(), value);
200 }
201 self.attach_front(idx);
202 self.num_entries = self.num_entries.inc();
203 (None, Ok(()))
204 }
205 }
206
207 fn find_index<Q>(&self, key: &Q) -> Option<I>
208 where
209 K: Borrow<Q>,
210 Q: Hash + Eq + ?Sized,
211 {
212 let mut curr = self.head;
213 while curr != I::NONE {
214 let idx = curr.as_usize();
215 let k = unsafe { &*self.keys[idx].as_ptr() };
216 if k.borrow() == key {
217 return Some(curr);
218 }
219 curr = self.nexts[idx];
220 }
221 None
222 }
223
224 fn promote_idx(&mut self, idx: I) {
225 if idx != self.head {
226 self.detach(idx);
227 self.attach_front(idx);
228 }
229 }
230
231 fn detach(&mut self, idx: I) {
232 let (p, n) = (self.prevs[idx.as_usize()], self.nexts[idx.as_usize()]);
233 if p != I::NONE {
234 self.nexts[p.as_usize()] = n;
235 } else {
236 self.head = n;
237 }
238 if n != I::NONE {
239 self.prevs[n.as_usize()] = p;
240 } else {
241 self.tail = p;
242 }
243 }
244
245 fn attach_front(&mut self, idx: I) {
246 self.prevs[idx.as_usize()] = I::NONE;
247 self.nexts[idx.as_usize()] = self.head;
248 if self.head != I::NONE {
249 self.prevs[self.head.as_usize()] = idx;
250 } else {
251 self.tail = idx;
252 }
253 self.head = idx;
254 }
255
256 fn attach_back(&mut self, idx: I) {
257 self.nexts[idx.as_usize()] = I::NONE;
258 self.prevs[idx.as_usize()] = self.tail;
259 if self.tail != I::NONE {
260 self.nexts[self.tail.as_usize()] = idx;
261 } else {
262 self.head = idx;
263 }
264 self.tail = idx;
265 }
266
267 fn pop_lru_internal(&mut self) -> Option<(K, V)> {
268 if self.tail != I::NONE {
269 let idx = self.tail;
270 let key = unsafe { ptr::read(self.keys[idx.as_usize()].as_ptr()) };
271 let val = unsafe { ptr::read(self.values[idx.as_usize()].as_ptr()) };
272 self.detach(idx);
273 self.nexts[idx.as_usize()] = self.free_head;
274 self.free_head = idx;
275 self.num_entries = self.num_entries.dec();
276 Some((key, val))
277 } else {
278 None
279 }
280 }
281
282 fn pop_mru_internal(&mut self) -> Option<(K, V)> {
283 if self.head != I::NONE {
284 let idx = self.head;
285 let key = unsafe { ptr::read(self.keys[idx.as_usize()].as_ptr()) };
286 let val = unsafe { ptr::read(self.values[idx.as_usize()].as_ptr()) };
287 self.detach(idx);
288 self.nexts[idx.as_usize()] = self.free_head;
289 self.free_head = idx;
290 self.num_entries = self.num_entries.dec();
291 Some((key, val))
292 } else {
293 None
294 }
295 }
296}
297
298impl<K: Hash + Eq + Clone, V, const N: usize, I: IndexType> Default
299 for HeaplessLinearLruCache<K, V, N, I>
300{
301 fn default() -> Self {
302 Self::new()
303 }
304}
305
306impl<K, V, const N: usize, I: IndexType> AnyLruCache<K, V> for HeaplessLinearLruCache<K, V, N, I>
307where
308 K: Hash + Eq + Ord + Clone,
309{
310 fn len(&self) -> usize {
311 self.num_entries.as_usize()
312 }
313 fn cap(&self) -> NonZeroUsize {
314 NonZeroUsize::new(N.max(1)).unwrap()
315 }
316 fn put(&mut self, key: K, value: V) -> Option<V> {
317 self.put(key, value, N).0
318 }
319 fn put_with_cap(
320 &mut self,
321 key: K,
322 value: V,
323 cap: NonZeroUsize,
324 ) -> (Option<V>, Result<(), (K, V)>) {
325 self.put(key, value, cap.get())
326 }
327 fn get<Q>(&mut self, key: &Q) -> Option<&V>
328 where
329 K: Borrow<Q>,
330 Q: Hash + Eq + Ord + ?Sized,
331 {
332 self.get(key)
333 }
334 fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
335 where
336 K: Borrow<Q>,
337 Q: Hash + Eq + Ord + ?Sized,
338 {
339 self.get_mut(key)
340 }
341 fn peek<Q>(&self, key: &Q) -> Option<&V>
342 where
343 K: Borrow<Q>,
344 Q: Hash + Eq + Ord + ?Sized,
345 {
346 self.peek(key)
347 }
348 fn clear(&mut self) {
349 let mut curr = self.head;
350 while curr != I::NONE {
351 let idx = curr.as_usize();
352 let next = self.nexts[idx];
353 unsafe {
354 ptr::drop_in_place(self.keys[idx].as_mut_ptr());
355 ptr::drop_in_place(self.values[idx].as_mut_ptr());
356 }
357 curr = next;
358 }
359 self.head = I::NONE;
360 self.tail = I::NONE;
361 self.num_entries = I::ZERO;
362 self.free_head = if N > 0 { I::ZERO } else { I::NONE };
363 if N > 0 {
364 let mut idx = I::ZERO;
365 for slot in self.nexts.iter_mut() {
366 idx = idx.inc();
367 *slot = idx;
368 }
369 self.nexts[N - 1] = I::NONE;
370 }
371 }
372 fn pop_lru(&mut self) -> Option<(K, V)> {
373 self.pop_lru_internal()
374 }
375 fn contains<Q>(&self, key: &Q) -> bool
376 where
377 K: Borrow<Q>,
378 Q: Hash + Eq + Ord + ?Sized,
379 {
380 self.find_index(key).is_some()
381 }
382 fn push(&mut self, key: K, value: V) -> Option<(K, V)> {
383 match self.put(key, value, N) {
384 (None, Err(item)) => Some(item),
385 _ => None,
386 }
387 }
388 fn pop<Q>(&mut self, key: &Q) -> Option<V>
389 where
390 K: Borrow<Q>,
391 Q: Hash + Eq + Ord + ?Sized,
392 {
393 if let Some(idx) = self.find_index(key) {
394 unsafe {
395 ptr::drop_in_place(self.keys[idx.as_usize()].as_mut_ptr());
396 let val = ptr::read(self.values[idx.as_usize()].as_ptr());
397 self.detach(idx);
398 self.nexts[idx.as_usize()] = self.free_head;
399 self.free_head = idx;
400 self.num_entries = self.num_entries.dec();
401 Some(val)
402 }
403 } else {
404 None
405 }
406 }
407 fn pop_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
408 where
409 K: Borrow<Q>,
410 Q: Hash + Eq + Ord + ?Sized,
411 {
412 if let Some(idx) = self.find_index(key) {
413 unsafe {
414 let key = ptr::read(self.keys[idx.as_usize()].as_ptr());
415 let val = ptr::read(self.values[idx.as_usize()].as_ptr());
416 self.detach(idx);
417 self.nexts[idx.as_usize()] = self.free_head;
418 self.free_head = idx;
419 self.num_entries = self.num_entries.dec();
420 Some((key, val))
421 }
422 } else {
423 None
424 }
425 }
426 fn promote<Q>(&mut self, key: &Q)
427 where
428 K: Borrow<Q>,
429 Q: Hash + Eq + Ord + ?Sized,
430 {
431 if let Some(idx) = self.find_index(key) {
432 self.promote_idx(idx);
433 }
434 }
435 fn demote<Q>(&mut self, key: &Q)
436 where
437 K: Borrow<Q>,
438 Q: Hash + Eq + Ord + ?Sized,
439 {
440 if let Some(idx) = self.find_index(key) {
441 if idx != self.tail {
442 self.detach(idx);
443 self.attach_back(idx);
444 }
445 }
446 }
447 fn peek_lru(&self) -> Option<(&K, &V)> {
448 self.peek_lru()
449 }
450}
451
452impl<'a, K, V, const N: usize, I: IndexType> crate::cache::lru_cache::LruIteratorSupport<'a, K, V>
453 for HeaplessLinearLruCache<K, V, N, I>
454where
455 K: Hash + Eq + Ord + Clone + 'a,
456 V: 'a,
457{
458 type Iter = Iter<'a, K, V, N, I>;
459 type IterMut = IterMut<'a, K, V, N, I>;
460 fn iter(&'a self) -> Self::Iter {
461 self.iter()
462 }
463 fn iter_mut(&'a mut self) -> Self::IterMut {
464 self.iter_mut()
465 }
466}
467
468pub struct IntoIter<K, V, const N: usize, I: IndexType> {
470 cache: HeaplessLinearLruCache<K, V, N, I>,
471}
472
473#[derive(Debug)]
475pub struct Iter<'a, K: 'a, V: 'a, const N: usize, I: IndexType> {
476 cache: &'a HeaplessLinearLruCache<K, V, N, I>,
477 curr: I,
478 remaining: usize,
479}
480
481impl<'a, K, V, const N: usize, I: IndexType> Iterator for Iter<'a, K, V, N, I> {
482 type Item = (&'a K, &'a V);
483 fn next(&mut self) -> Option<Self::Item> {
484 if self.curr == I::NONE {
485 return None;
486 }
487 let idx = self.curr.as_usize();
488 let (key, val) = unsafe {
489 (
490 &*self.cache.keys[idx].as_ptr(),
491 &*self.cache.values[idx].as_ptr(),
492 )
493 };
494 self.curr = self.cache.nexts[idx];
495 self.remaining -= 1;
496 Some((key, val))
497 }
498 fn size_hint(&self) -> (usize, Option<usize>) {
499 (self.remaining, Some(self.remaining))
500 }
501}
502
503impl<'a, K, V, const N: usize, I: IndexType> ExactSizeIterator for Iter<'a, K, V, N, I> {}
504
505#[derive(Debug)]
507pub struct IterMut<'a, K: 'a, V: 'a, const N: usize, I: IndexType> {
508 curr: I,
509 remaining: usize,
510 keys: *const [MaybeUninit<K>; N],
511 values: *mut [MaybeUninit<V>; N],
512 nexts: *const [I; N],
513 _marker: core::marker::PhantomData<&'a mut V>,
514}
515
516impl<'a, K, V, const N: usize, I: IndexType> Iterator for IterMut<'a, K, V, N, I> {
517 type Item = (&'a K, &'a mut V);
518 fn next(&mut self) -> Option<Self::Item> {
519 if self.curr == I::NONE {
520 return None;
521 }
522 let idx = self.curr.as_usize();
523 unsafe {
524 let key = &*(*self.keys)[idx].as_ptr();
525 let val = &mut *(*self.values)[idx].as_mut_ptr();
526 self.curr = (*self.nexts)[idx];
527 self.remaining -= 1;
528 Some((key, val))
529 }
530 }
531 fn size_hint(&self) -> (usize, Option<usize>) {
532 (self.remaining, Some(self.remaining))
533 }
534}
535
536impl<'a, K, V, const N: usize, I: IndexType> ExactSizeIterator for IterMut<'a, K, V, N, I> {}
537
538impl<K, V, const N: usize, I: IndexType> Iterator for IntoIter<K, V, N, I>
539where
540 K: Hash + Eq + Ord + Clone,
541{
542 type Item = (K, V);
543 fn next(&mut self) -> Option<Self::Item> {
544 self.cache.pop_mru_internal()
545 }
546 fn size_hint(&self) -> (usize, Option<usize>) {
547 let len = self.cache.len();
548 (len, Some(len))
549 }
550}
551
552impl<K, V, const N: usize, I: IndexType> ExactSizeIterator for IntoIter<K, V, N, I> where
553 K: Hash + Eq + Ord + Clone
554{
555}
556
557impl<K, V, const N: usize, I: IndexType> IntoIterator for HeaplessLinearLruCache<K, V, N, I>
558where
559 K: Hash + Eq + Ord + Clone,
560{
561 type Item = (K, V);
562 type IntoIter = IntoIter<K, V, N, I>;
563 fn into_iter(self) -> Self::IntoIter {
564 IntoIter { cache: self }
565 }
566}
567
568impl<'a, K, V, const N: usize, I: IndexType> IntoIterator for &'a HeaplessLinearLruCache<K, V, N, I>
569where
570 K: Hash + Eq + Clone,
571{
572 type Item = (&'a K, &'a V);
573 type IntoIter = Iter<'a, K, V, N, I>;
574 fn into_iter(self) -> Self::IntoIter {
575 self.iter()
576 }
577}
578
579impl<'a, K, V, const N: usize, I: IndexType> IntoIterator
580 for &'a mut HeaplessLinearLruCache<K, V, N, I>
581where
582 K: Hash + Eq + Clone,
583{
584 type Item = (&'a K, &'a mut V);
585 type IntoIter = IterMut<'a, K, V, N, I>;
586 fn into_iter(self) -> Self::IntoIter {
587 self.iter_mut()
588 }
589}
590
591impl<K, V, const N: usize, I> FromIterator<(K, V)> for HeaplessLinearLruCache<K, V, N, I>
592where
593 K: Hash + Eq + Ord + Clone,
594 I: IndexType,
595{
596 fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
597 let mut cache = Self::default();
598 for (k, v) in iter {
599 let _ = cache.put(k, v, N);
600 }
601 cache
602 }
603}
604
605impl<K, V, const N: usize, I> Extend<(K, V)> for HeaplessLinearLruCache<K, V, N, I>
606where
607 K: Hash + Eq + Ord + Clone,
608 I: IndexType,
609{
610 fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
611 for (k, v) in iter {
612 let _ = self.put(k, v, N);
613 }
614 }
615}
616
617impl<K, V, const N: usize, I: IndexType> Clone for HeaplessLinearLruCache<K, V, N, I>
618where
619 K: Hash + Eq + Clone,
620 V: Clone,
621{
622 fn clone(&self) -> Self {
623 let mut new_keys: [MaybeUninit<K>; N] = unsafe { MaybeUninit::uninit().assume_init() };
624 let mut new_vals: [MaybeUninit<V>; N] = unsafe { MaybeUninit::uninit().assume_init() };
625 let mut curr = self.head;
626 while curr != I::NONE {
627 let idx = curr.as_usize();
628 let k = unsafe { &*self.keys[idx].as_ptr() };
629 let v = unsafe { &*self.values[idx].as_ptr() };
630 unsafe {
631 ptr::write(new_keys[idx].as_mut_ptr(), k.clone());
632 ptr::write(new_vals[idx].as_mut_ptr(), v.clone());
633 }
634 curr = self.nexts[idx];
635 }
636 Self {
637 keys: new_keys,
638 values: new_vals,
639 prevs: self.prevs,
640 nexts: self.nexts,
641 head: self.head,
642 tail: self.tail,
643 num_entries: self.num_entries,
644 free_head: self.free_head,
645 }
646 }
647}
648
649impl<K, V, const N: usize, I: IndexType> Drop for HeaplessLinearLruCache<K, V, N, I> {
650 fn drop(&mut self) {
651 let mut curr = self.head;
652 while curr != I::NONE {
653 let idx = curr.as_usize();
654 let next = self.nexts[idx];
655 unsafe {
656 ptr::drop_in_place(self.keys[idx].as_mut_ptr());
657 ptr::drop_in_place(self.values[idx].as_mut_ptr());
658 }
659 curr = next;
660 }
661 }
662}
663
664impl<K, V, const N: usize, I: IndexType> std::fmt::Debug for HeaplessLinearLruCache<K, V, N, I> {
665 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
666 f.debug_struct("HeaplessLinearLruCache")
667 .field("num_entries", &self.num_entries)
668 .field("head", &self.head)
669 .field("tail", &self.tail)
670 .finish()
671 }
672}
673
674#[cfg(test)]
675mod tests {
676 use super::*;
677
678 #[test]
679 fn test_linear_lookup_and_eviction() {
680 let mut cache: HeaplessLinearLruCache<i32, i32, 2> = HeaplessLinearLruCache::new();
681 let _ = cache.put(1, 10, 2);
682 let _ = cache.put(2, 20, 2);
683
684 assert_eq!(cache.get(&1), Some(&10)); let _ = cache.put(3, 30, 2); assert!(cache.contains(&1));
688 assert!(cache.contains(&3));
689 assert!(!cache.contains(&2));
690 }
691
692 #[test]
693 fn test_empty_ops() {
694 let mut cache: HeaplessLinearLruCache<i32, i32, 2> = HeaplessLinearLruCache::new();
695 assert!(cache.is_empty());
696 assert_eq!(cache.get(&1), None);
697 assert_eq!(cache.pop_lru(), None);
698 }
699}