intrusive_lru_cache/lib.rs
1#![doc = include_str!("../README.md")]
2#![no_std]
3#![deny(
4 missing_docs,
5 clippy::missing_safety_doc,
6 clippy::undocumented_unsafe_blocks,
7 clippy::must_use_candidate,
8 clippy::perf,
9 clippy::complexity,
10 clippy::suspicious
11)]
12
13extern crate alloc;
14
15use alloc::borrow::ToOwned;
16use alloc::boxed::Box;
17
18use core::borrow::Borrow;
19use core::cell::UnsafeCell;
20use core::fmt;
21use core::marker::PhantomData;
22use core::ops::{Deref, DerefMut, Index};
23use core::ptr::NonNull;
24
25use intrusive_collections::intrusive_adapter;
26use intrusive_collections::rbtree::Entry as RBTreeEntry;
27use intrusive_collections::{KeyAdapter, LinkedList, RBTree, UnsafeRef};
28
29pub use intrusive_collections::Bound;
30
31/// Utility function to convert a `Bound<&Q>` to `Bound<&Borrowed<Q>>`.
32#[inline(always)]
33const fn borrowed_bound<Q: ?Sized>(key: Bound<&Q>) -> Bound<&Borrowed<Q>> {
34 match key {
35 Bound::Included(key) => Bound::Included(Borrowed::new(key)),
36 Bound::Excluded(key) => Bound::Excluded(Borrowed::new(key)),
37 Bound::Unbounded => Bound::Unbounded,
38 }
39}
40
41#[cfg(feature = "atomic")]
42use intrusive_collections::{LinkedListAtomicLink as LinkedListLink, RBTreeAtomicLink as RBTreeLink};
43
44#[cfg(not(feature = "atomic"))]
45use intrusive_collections::{LinkedListLink, RBTreeLink};
46
47// intrusive-collections does not provide a way to mutably access the value
48// in a node, so we need to use `UnsafeCell` to allow for interior mutability.
49#[repr(transparent)]
50struct Value<V>(UnsafeCell<V>);
51
52impl<V> Value<V> {
53 #[inline(always)]
54 const fn new(value: V) -> Self {
55 Self(UnsafeCell::new(value))
56 }
57
58 #[inline(always)]
59 const fn get(&self) -> &V {
60 // SAFETY: Read-only access to value is safe in conjunction with
61 // the guarantees of other methods.
62 unsafe { &*self.0.get() }
63 }
64
65 // SAFETY: Only use with exclusive access to the Node
66 #[allow(clippy::mut_from_ref)]
67 #[inline(always)]
68 const unsafe fn get_mut(&self) -> &mut V {
69 &mut *self.0.get()
70 }
71
72 /// SAFETY: Only use with exclusive access to the Node
73 #[inline(always)]
74 const unsafe fn replace(&self, value: V) -> V {
75 core::ptr::replace(self.0.get(), value)
76 }
77
78 #[inline(always)]
79 fn into_inner(self) -> V {
80 self.0.into_inner()
81 }
82}
83
84// SAFETY: Value is Send/Sync if V is Send/Sync,
85// because the `Value<V>` is only accessed with exclusive access to the Node.
86unsafe impl<V> Send for Value<V> where V: Send {}
87
88// SAFETY: Value is Send/Sync if V is Send/Sync,
89// because the `Value<V>` is only accessed with exclusive access to the Node.
90unsafe impl<V> Sync for Value<V> where V: Sync {}
91
92struct Node<K, V> {
93 list_link: LinkedListLink,
94 tree_link: RBTreeLink,
95 key: K,
96 value: Value<V>,
97}
98
99impl<K, V> Node<K, V> {
100 #[inline(always)]
101 fn new(key: K, value: V) -> UnsafeRef<Self> {
102 UnsafeRef::from_box(Box::new(Self {
103 list_link: LinkedListLink::new(),
104 tree_link: RBTreeLink::new(),
105 key,
106 value: Value::new(value),
107 }))
108 }
109
110 /// Assumes the node is not in any collections, and extracts the key/value
111 /// when deallocating it.
112 ///
113 /// SAFETY: Only use after ensuring the node is not in any collections
114 #[inline(always)]
115 unsafe fn unwrap(this: UnsafeRef<Self>) -> (K, V) {
116 let Node { key, value, .. } = *UnsafeRef::into_box(this);
117
118 (key, value.into_inner())
119 }
120}
121
122intrusive_adapter!(NodeListAdapter<K, V> = UnsafeRef<Node<K, V>>: Node<K, V> { list_link: LinkedListLink });
123intrusive_adapter!(NodeTreeAdapter<K, V> = UnsafeRef<Node<K, V>>: Node<K, V> { tree_link: RBTreeLink });
124
125// Because KeyAdapter returns a reference, and `find` uses the returned type as `K`,
126// I ran into issues where `&K: Borrow<Q>` was not satisfied. Therefore, we need
127// to convince the compiler that some `Q` can be borrowed from `&K` by using a
128// transparent wrapper type for both halves, and casting `&Q` to `&Borrowed<Q>`.
129
130#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
131#[repr(transparent)]
132struct Key<K: ?Sized>(K);
133
134#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
135#[repr(transparent)]
136struct Borrowed<Q: ?Sized>(Q);
137
138impl<'a, Q: ?Sized> Borrowed<Q> {
139 #[inline(always)]
140 const fn new(value: &'a Q) -> &'a Self {
141 // SAFETY: &Q == &Borrowed<Q> due to transparent repr
142 unsafe { core::mem::transmute(value) }
143 }
144}
145
146// Magic that allows `&K: Borrow<Q>` to be satisfied
147impl<K, Q: ?Sized> Borrow<Borrowed<Q>> for Key<&K>
148where
149 K: Borrow<Q>,
150{
151 #[inline(always)]
152 fn borrow(&self) -> &Borrowed<Q> {
153 Borrowed::new(self.0.borrow())
154 }
155}
156
157impl<'a, K: 'a, V> KeyAdapter<'a> for NodeTreeAdapter<K, V> {
158 type Key = Key<&'a K>; // Allows `Key<&K>: Borrow<Borrowed<Q>>`
159
160 #[inline(always)]
161 fn get_key(&self, value: &'a Node<K, V>) -> Self::Key {
162 // SAFETY: &K == Key<&K> == &Key<K> due to transparent repr
163 unsafe { core::mem::transmute(&value.key) }
164 }
165}
166
167/// LRU Cache implementation using intrusive collections.
168///
169/// This cache uses an [`intrusive_collections::LinkedList`] to maintain the LRU order,
170/// and an [`intrusive_collections::RBTree`] to allow for efficient lookups by key,
171/// while maintaining only one allocation per key-value pair. Unfortunately, this
172/// is a linked structure, so cache locality is likely poor, but memory usage
173/// and flexibility are improved.
174///
175/// The cache is unbounded by default, but can be limited to a maximum capacity.
176///
177/// The `smart_*` methods allow for reading or updating the LRU order at the same time,
178/// based on how the value is accessed. The `get` method always updates the LRU order,
179/// and the `peek_*` methods allow for reading without updating the LRU order.
180///
181/// An overarching principle here is that the 'Used' in Lease-Recently-Used
182/// is defined by the mutable access to the value. This allows for a more flexible
183/// API, where the LRU order can be updated only when necessary, and not on every
184/// read access.
185///
186/// # Example
187/// ```rust
188/// use intrusive_lru_cache::LRUCache;
189///
190/// let mut lru: LRUCache<&'static str, &'static str> = LRUCache::default();
191///
192/// lru.insert("a", "1");
193/// lru.insert("b", "2");
194/// lru.insert("c", "3");
195///
196/// let _ = lru.get("b"); // updates LRU order
197///
198/// assert_eq!(lru.pop(), Some(("a", "1")));
199/// assert_eq!(lru.pop(), Some(("c", "3")));
200/// assert_eq!(lru.pop(), Some(("b", "2")));
201/// assert_eq!(lru.pop(), None);
202/// ```
203///
204/// # Notes
205/// - Cloning preserves LRU order.
206/// - If the `atomic` crate feature is enabled,
207/// the cache is thread-safe if `K` and `V` are `Send`/`Sync`.
208#[must_use]
209pub struct LRUCache<K, V> {
210 list: LinkedList<NodeListAdapter<K, V>>,
211 tree: RBTree<NodeTreeAdapter<K, V>>,
212 size: usize,
213 max_capacity: usize,
214}
215
216impl<K, V> fmt::Debug for LRUCache<K, V>
217where
218 K: fmt::Debug,
219 V: fmt::Debug,
220{
221 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
222 f.debug_map().entries(self.iter_peek_ord()).finish()
223 }
224}
225
226impl<K, V> LRUCache<K, V> {
227 /// Defines the size of the internal node structure in bytes.
228 ///
229 /// The memory footprint of the entire cache can then be calculated as:
230 /// ```rust,ignore
231 /// LRUCache::<K, V>::NODE_SIZE * cache.len()
232 /// + size_of::<LRUCache<K, V>>() // size of the cache itself
233 /// ```
234 /// or via [`memory_footprint`](Self::memory_footprint).
235 ///
236 /// This is a nice benefit of intrusive collections, as it allows for a single
237 /// allocation per key-value pair.
238 pub const NODE_SIZE: usize = size_of::<Node<K, V>>();
239
240 /// Returns the total bytes consumed by the cache, including
241 /// all allocations, internal structures, and the cache itself.
242 #[must_use]
243 #[inline]
244 pub const fn memory_footprint(&self) -> usize {
245 Self::NODE_SIZE * self.size + size_of::<Self>()
246 }
247
248 /// Creates a new unbounded LRU cache.
249 ///
250 /// This cache has no limit on the number of entries it can hold,
251 /// so entries must be manually removed via [`pop`](Self::pop),
252 /// or you can use [`set_max_capacity`](Self::set_max_capacity) to set a limit.
253 #[inline]
254 pub fn unbounded() -> Self {
255 Self::new(usize::MAX)
256 }
257
258 /// Creates a new LRU cache with a maximum capacity, after which
259 /// old entries will be evicted to make room for new ones.
260 ///
261 /// This does not preallocate any memory, only sets an upper limit.
262 pub fn new(max_capacity: usize) -> Self {
263 Self {
264 list: LinkedList::new(NodeListAdapter::new()),
265 tree: RBTree::new(NodeTreeAdapter::new()),
266 size: 0,
267 max_capacity,
268 }
269 }
270
271 #[inline(always)]
272 const fn get_ptr(&mut self) -> NonNull<Self> {
273 // SAFETY: Taking pointers from references is always guaranteed to be non-null
274 unsafe { NonNull::new_unchecked(&mut *self) }
275 }
276}
277
278impl<K, V> Default for LRUCache<K, V> {
279 #[inline]
280 fn default() -> Self {
281 Self::unbounded()
282 }
283}
284
285impl<K, V> Clone for LRUCache<K, V>
286where
287 K: Clone + Ord + 'static,
288 V: Clone,
289{
290 fn clone(&self) -> Self {
291 let mut new = Self::new(self.max_capacity);
292
293 // preserves the LRU ordering by placing the oldest in first
294 for (key, value) in self.iter_peek_lru().rev() {
295 new.insert(key.clone(), value.clone());
296 }
297
298 new
299 }
300}
301
302/// Bumps a node to the front of the list, only if it's not already there.
303#[inline]
304fn bump<K, V>(list: &mut LinkedList<NodeListAdapter<K, V>>, node: &Node<K, V>) {
305 // SAFETY: The list is guaranteed to be non-empty by virtue of `node` existing
306 let front = unsafe { list.front().get().unwrap_unchecked() };
307
308 // don't bother if it's already at the front
309 if core::ptr::eq(node, front) {
310 return;
311 }
312
313 // SAFETY: Cursor created from a known valid pointer
314 let node = unsafe { list.cursor_mut_from_ptr(node).remove().unwrap_unchecked() };
315
316 list.push_front(node);
317}
318
319/// Drops every element in the list, trying to be as efficient as possible.
320///
321/// Using front.remove() in a loop reconnects the linked list links, which is unnecessary.
322#[inline]
323fn clear_list<K, V>(list: &mut LinkedList<NodeListAdapter<K, V>>) {
324 let mut front = list.front();
325
326 // drop pointer as we iterate through the list, only after we've passed it
327 while let Some(ptr) = front.clone_pointer() {
328 front.move_next();
329
330 // SAFETY: While the node is technically not removed
331 // from the list, it's not accessible anymore after move_next
332 let _ = unsafe { Node::unwrap(ptr) };
333 }
334
335 // ensure LinkedList::drop does nothing
336 list.fast_clear();
337}
338
339impl<K, V> LRUCache<K, V>
340where
341 K: Ord + 'static,
342{
343 /// Returns a reference to the value corresponding to the key,
344 /// without updating the LRU list.
345 ///
346 /// This is an `O(log n)` operation.
347 pub fn peek<'a, Q>(&'a self, key: &Q) -> Option<&'a V>
348 where
349 K: Borrow<Q>,
350 Q: Ord + ?Sized,
351 {
352 self.tree
353 .find(Borrowed::new(key))
354 .get()
355 .map(|node| node.value.get())
356 }
357
358 /// Returns a reference to the most recently used key-value pair,
359 /// without updating the LRU order further.
360 ///
361 /// This is an `O(1)` operation.
362 #[must_use]
363 #[inline]
364 pub fn peek_newest(&self) -> Option<(&K, &V)> {
365 self.list.front().get().map(|node| (&node.key, node.value.get()))
366 }
367
368 /// Returns a reference to the least recently used key-value pair,
369 /// without updating the LRU order further.
370 ///
371 /// This is an `O(1)` operation.
372 #[must_use]
373 #[inline]
374 pub fn peek_oldest(&self) -> Option<(&K, &V)> {
375 self.list.back().get().map(|node| (&node.key, node.value.get()))
376 }
377
378 /// Returns a reference to the most recently used key-value pair.
379 ///
380 /// Because this is already the most recently used, it's free to be accessed without
381 /// any additional cost or additional modification of the LRU order.
382 ///
383 /// This is an `O(1)` operation.
384 #[inline]
385 pub fn get_newest(&mut self) -> Option<(&K, &mut V)> {
386 self.list
387 .front_mut()
388 .into_ref()
389 // SAFETY: We have `&mut self`
390 .map(|node| unsafe { (&node.key, node.value.get_mut()) })
391 }
392
393 // SAFETY: The caller must guarantee that the cache is non-empty
394 #[inline(always)]
395 unsafe fn get_newest_unchecked(&mut self) -> (&K, &mut V) {
396 let node = self.list.front_mut().into_ref().unwrap_unchecked();
397
398 (&node.key, node.value.get_mut())
399 }
400
401 /// Returns a reference to the value corresponding to the key,
402 /// and bumps the key to the front of the LRU list.
403 ///
404 /// This is an `O(log n)` operation.
405 pub fn get<'a, Q>(&'a mut self, key: &Q) -> Option<&'a mut V>
406 where
407 K: Borrow<Q>,
408 Q: Ord + ?Sized,
409 {
410 let node = self.tree.find(Borrowed::new(key)).get()?;
411
412 bump(&mut self.list, node);
413
414 // SAFETY: We have `&mut self`
415 Some(unsafe { node.value.get_mut() })
416 }
417
418 /// If the key is present in the cache, it is bumped to the
419 /// front of the LRU list as the most recently used. This
420 /// will cause it to be the last to be evicted.
421 ///
422 /// Has no effect if the key is not present.
423 ///
424 /// This is an `O(log n)` operation.
425 pub fn promote<Q>(&mut self, key: &Q)
426 where
427 K: Borrow<Q>,
428 Q: Ord + ?Sized,
429 {
430 if let Some(node) = self.tree.find(Borrowed::new(key)).get() {
431 bump(&mut self.list, node);
432 }
433 }
434
435 /// If the key is present in the cache, it is demoted to the
436 /// back of the LRU list as the least recently used. This
437 /// will cause it to be evicted first if the cache is full.
438 ///
439 /// Has no effect if the key is not present.
440 ///
441 /// This is an `O(log n)` operation.
442 pub fn demote<Q>(&mut self, key: &Q)
443 where
444 K: Borrow<Q>,
445 Q: Ord + ?Sized,
446 {
447 if let Some(node) = self.tree.find(Borrowed::new(key)).get() {
448 // same logic as bump, but for the back, and only used here
449
450 // SAFETY: The list is guaranteed to be non-empty by virtue of `node` existing
451 let back = unsafe { self.list.back().get().unwrap_unchecked() };
452
453 // don't bother if it's already at the back
454 if core::ptr::eq(node, back) {
455 return;
456 }
457
458 // SAFETY: Cursor created from a known valid pointer
459 let node = unsafe { self.list.cursor_mut_from_ptr(node).remove().unwrap_unchecked() };
460
461 self.list.push_back(node);
462 }
463 }
464
465 /// Returns a smart reference to the value corresponding to the key,
466 /// allowing for reading and updating at the same time,
467 /// only updating the LRU on mutable access.
468 ///
469 /// This does not immediately update the LRU order; only
470 /// when the value is accessed via [`SmartEntry::get`] or
471 /// [`SmartEntry::deref_mut`].
472 ///
473 /// Immutable access via [`SmartEntry::peek`] or [`SmartEntry::deref`]
474 /// does not update the LRU order.
475 ///
476 /// This is an `O(log n)` operation.
477 pub fn smart_get<'a, Q>(&'a mut self, key: &Q) -> Option<SmartEntry<'a, K, V>>
478 where
479 K: Borrow<Q>,
480 Q: Ord + ?Sized,
481 {
482 Some(SmartEntry::new(
483 self.get_ptr(),
484 self.tree.find(Borrowed::new(key)).get()?,
485 ))
486 }
487
488 /// Returns a smart reference to the least recently used key-value pair,
489 /// allowing for reading and updating at the same time,
490 /// only updating the LRU on mutable access.
491 ///
492 /// This does not immediately update the LRU order; only
493 /// when the value is accessed via [`SmartEntry::get`] or
494 /// [`SmartEntry::deref_mut`].
495 ///
496 /// This is an `O(1)` operation.
497 pub fn smart_get_oldest(&mut self) -> Option<SmartEntry<'_, K, V>> {
498 Some(SmartEntry::new(self.get_ptr(), self.list.back_mut().into_ref()?))
499 }
500
501 /// Returns an iterator over the key-value pairs in the cache described by the range,
502 /// _without_ updating the LRU order. The order of iteration is dependent on the order
503 /// of the keys in the tree via `Ord`.
504 ///
505 /// The range follows `[min, max)`, where `min` is inclusive and `max` is exclusive.
506 ///
507 /// This is an `O(log n)` operation, though the iterator itself is `O(1)` to increment.
508 pub fn peek_range<'a, MIN, MAX>(
509 &'a self,
510 min: &MIN,
511 max: &MAX,
512 ) -> impl DoubleEndedIterator<Item = (&'a K, &'a V)>
513 where
514 K: Borrow<MIN> + Borrow<MAX>,
515 MIN: Ord + ?Sized,
516 MAX: Ord + ?Sized,
517 {
518 self.tree
519 .range(
520 Bound::Included(Borrowed::new(min)),
521 Bound::Excluded(Borrowed::new(max)),
522 )
523 .map(move |node| (&node.key, node.value.get()))
524 }
525
526 /// Returns an iterator over the key-value pairs in the cache described by the range,
527 /// and **updates the LRU order** as they are yielded. The order of iteration is dependent
528 /// on the order of the keys in the tree via `Ord`.
529 ///
530 /// The range follows `[min, max)`, where `min` is inclusive and `max` is exclusive.
531 ///
532 /// This is an `O(log n)` operation, though the iterator itself is `O(1)` to increment.
533 pub fn range<'a, MIN, MAX>(
534 &'a mut self,
535 min: &MIN,
536 max: &MAX,
537 ) -> impl DoubleEndedIterator<Item = (&'a K, &'a mut V)>
538 where
539 K: Borrow<MIN> + Borrow<MAX>,
540 MIN: Ord + ?Sized,
541 MAX: Ord + ?Sized,
542 {
543 let LRUCache { tree, list, .. } = self;
544
545 tree.range(
546 Bound::Included(Borrowed::new(min)),
547 Bound::Excluded(Borrowed::new(max)),
548 )
549 .map(move |node| {
550 bump(list, node);
551
552 // SAFETY: We have `&mut self`
553 (&node.key, unsafe { node.value.get_mut() })
554 })
555 }
556
557 /// Returns an iterator over the key-value pairs in the cache described by the range,
558 /// which allows for reading and updating the LRU order at the same time, only
559 /// bumping the LRU order when the value is accessed mutably via either [`SmartEntry::get`]
560 /// or [`SmartEntry::deref_mut`].
561 ///
562 /// This is an `O(log n)` operation, though the iterator itself is `O(1)` to increment.
563 pub fn smart_range<'a, MIN, MAX>(
564 &'a mut self,
565 min: &MIN,
566 max: &MAX,
567 ) -> impl DoubleEndedIterator<Item = SmartEntry<'a, K, V>>
568 where
569 K: Borrow<MIN> + Borrow<MAX>,
570 MIN: Ord + ?Sized,
571 MAX: Ord + ?Sized,
572 {
573 let cache = self.get_ptr();
574
575 let LRUCache { tree, .. } = self;
576
577 tree.range(
578 Bound::Included(Borrowed::new(min)),
579 Bound::Excluded(Borrowed::new(max)),
580 )
581 .map(move |node| SmartEntry::new(cache, node))
582 }
583
584 /// Returns a [`SmartEntry`] to the last key-value pair whose key is below
585 /// the given bound. This allows for reading and updating the LRU order
586 /// at the same time, only bumping the LRU order when the value is accessed
587 /// mutably via either [`SmartEntry::get`] or [`SmartEntry::deref_mut`].
588 ///
589 /// This is an `O(log n)` operation.
590 ///
591 /// Only a "smart" version of this method is provided, as neither the key or value are
592 /// exactly known, and the complexity warrants a complete solution.
593 pub fn smart_upper_bound<'a, Q>(&'a mut self, key: Bound<&Q>) -> Option<SmartEntry<'a, K, V>>
594 where
595 K: Borrow<Q>,
596 Q: Ord + ?Sized,
597 {
598 Some(SmartEntry::new(
599 self.get_ptr(),
600 self.tree.upper_bound(borrowed_bound(key)).get()?,
601 ))
602 }
603
604 /// Returns a [`SmartEntry`] to the first key-value pair whose key is above
605 /// the given bound. This allows for reading and updating the LRU order
606 /// at the same time, only bumping the LRU order when the value is accessed
607 /// mutably via either [`SmartEntry::get`] or [`SmartEntry::deref_mut`].
608 ///
609 /// This is an `O(log n)` operation.
610 ///
611 /// Only a "smart" version of this method is provided, as neither the key or value are
612 /// exactly known, and the complexity warrants a complete solution.
613 pub fn smart_lower_bound<'a, Q>(&'a mut self, key: Bound<&Q>) -> Option<SmartEntry<'a, K, V>>
614 where
615 K: Borrow<Q>,
616 Q: Ord + ?Sized,
617 {
618 Some(SmartEntry::new(
619 self.get_ptr(),
620 self.tree.lower_bound(borrowed_bound(key)).get()?,
621 ))
622 }
623
624 /// Inserts a key-value pair into the cache, replacing
625 /// the existing value if the key was already present, and then
626 /// returning it. In both cases, the entry is moved to the front of the LRU list.
627 ///
628 /// This is an `O(log n)` operation.
629 pub fn insert(&mut self, key: K, value: V) -> Option<V> {
630 match self.tree.entry(Borrowed::new(&key)) {
631 // SAFETY: We treat the cursor as a mutable reference, and only use known valid pointers
632 RBTreeEntry::Occupied(cursor) => unsafe {
633 let node = cursor.get().unwrap_unchecked();
634
635 // NOTE: Treat cursor/node as if it were mutable for value.replace
636 // since we can't ever actually acquire a mutable reference to the node
637 // as per the restrictions of `intrusive_collections`
638 let old_value = node.value.replace(value);
639
640 bump(&mut self.list, node);
641
642 Some(old_value)
643 },
644 RBTreeEntry::Vacant(cursor) => {
645 let node = Node::new(key, value);
646
647 cursor.insert(node.clone());
648 self.list.push_front(node);
649
650 self.size += 1;
651 self.shrink();
652
653 None
654 }
655 }
656 }
657
658 /// Returns true if the cache contains the key.
659 ///
660 /// This does not update the LRU order.
661 ///
662 /// This is an `O(log n)` operation.
663 pub fn contains<Q>(&self, key: &Q) -> bool
664 where
665 K: Borrow<Q>,
666 Q: Ord + ?Sized,
667 {
668 !self.tree.find(Borrowed::new(key)).is_null()
669 }
670
671 /// Removes the value corresponding to the key from the cache,
672 /// and returning it if it was present. This has no effect on the order
673 /// of other entries in the LRU list.
674 ///
675 /// This is an `O(log n)` operation.
676 pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
677 where
678 K: Borrow<Q>,
679 Q: Ord + ?Sized,
680 {
681 let node = self.tree.find_mut(Borrowed::new(key)).remove()?;
682
683 // SAFETY: Cursor created from a known valid pointer
684 let _ = unsafe { self.list.cursor_mut_from_ptr(&*node).remove().unwrap_unchecked() };
685
686 self.size -= 1;
687
688 // SAFETY: node is removed from both the tree and list
689 Some(unsafe { Node::unwrap(node).1 })
690 }
691
692 /// Attempts to get a mutable reference to the value corresponding to the key,
693 /// and if it's not present, inserts a new key-value pair with the value returned
694 /// by the provided closure. The entry is then moved to the front of the LRU list.
695 ///
696 /// This is similar to [`insert_or_get`](Self::insert_or_get), but for cases
697 /// where the value is likely to be found in the cache already.
698 ///
699 /// See also [`get_or_insert2`](Self::get_or_insert2) for a version that allows for a borrowed key type.
700 ///
701 /// This is an `O(log n)` operation.
702 pub fn get_or_insert<F>(&mut self, key: K, f: F) -> GetOrInsertResult<K, V>
703 where
704 F: FnOnce() -> V,
705 {
706 let key = match self.tree.entry(Borrowed::new(&key)) {
707 // SAFETY: Cursor is a valid pointer here in both the tree and list
708 RBTreeEntry::Occupied(cursor) => unsafe {
709 bump(&mut self.list, cursor.get().unwrap_unchecked());
710
711 Some(key)
712 },
713 RBTreeEntry::Vacant(cursor) => {
714 let node = Node::new(key, f());
715
716 cursor.insert(node.clone());
717 self.list.push_front(node);
718
719 self.size += 1;
720 self.shrink();
721
722 None
723 }
724 };
725
726 // SAFETY: We have `&mut self` and the list is valid given the above logic
727 // the element we want was _just_ repositioned to the front
728 let v = unsafe { self.get_newest_unchecked().1 };
729
730 match key {
731 Some(key) => GetOrInsertResult::Existed(v, key),
732 None => GetOrInsertResult::Inserted(v),
733 }
734 }
735
736 /// Like [`get_or_insert`](Self::get_or_insert), but allows for a different borrowed key
737 /// type, so long as it can be made into an owned key of type `K`.
738 ///
739 /// Because the key is borrowed initially, we can return `&mut V` instead of
740 /// `GetOrInsertResult<K, V>`, as the key is not consumed.
741 ///
742 /// This is an `O(log n)` operation.
743 ///
744 /// # Example
745 /// ```rust
746 /// # use intrusive_lru_cache::LRUCache;
747 /// let mut lru = LRUCache::<String, String>::unbounded();
748 ///
749 /// // note that the key is just an `&str`
750 /// let v = lru.get_or_insert2("a", || "Hello".to_owned());
751 /// v.push_str(", World!");
752 ///
753 /// assert_eq!(lru.pop().unwrap(), ("a".to_owned(), "Hello, World!".to_owned()));
754 /// ```
755 pub fn get_or_insert2<Q, F>(&mut self, key: &Q, f: F) -> &mut V
756 where
757 K: Borrow<Q>,
758 Q: ToOwned<Owned = K> + Ord + ?Sized,
759 F: FnOnce() -> V,
760 {
761 match self.tree.entry(Borrowed::new(key)) {
762 // SAFETY: Cursor is a valid pointer here in both the tree and list
763 RBTreeEntry::Occupied(cursor) => unsafe {
764 bump(&mut self.list, cursor.get().unwrap_unchecked());
765 },
766 RBTreeEntry::Vacant(cursor) => {
767 let node = Node::new(key.to_owned(), f());
768
769 cursor.insert(node.clone());
770 self.list.push_front(node);
771
772 self.size += 1;
773 self.shrink();
774 }
775 }
776
777 // SAFETY: We have `&mut self` and the list is valid given the above logic
778 // the element we want was _just_ repositioned to the front
779 unsafe { self.get_newest_unchecked().1 }
780 }
781
782 /// Inserts a key-value pair into the cache only if it wasn't already present,
783 /// otherwise update the LRU order for this element and return a reference to the value.
784 ///
785 /// The returned value contains a mutable reference to the value, and if the key already existed,
786 /// it also contains the key and value that were passed in.
787 ///
788 /// This is similar to [`get_or_insert`](Self::get_or_insert), but for cases
789 /// where insertion is expected.
790 ///
791 /// This is an `O(log n)` operation.
792 pub fn insert_or_get(&mut self, key: K, value: V) -> InsertOrGetResult<'_, K, V> {
793 let kv = match self.tree.entry(Borrowed::new(&key)) {
794 // SAFETY: Cursor is a valid pointer here in both the tree and list
795 RBTreeEntry::Occupied(cursor) => unsafe {
796 let node = cursor.get().unwrap_unchecked();
797
798 bump(&mut self.list, node);
799
800 Some((key, value))
801 },
802 RBTreeEntry::Vacant(cursor) => {
803 let node = Node::new(key, value);
804
805 cursor.insert(node.clone());
806 self.list.push_front(node);
807
808 self.size += 1;
809
810 self.shrink();
811
812 None
813 }
814 };
815
816 // SAFETY: We have `&mut self` and the list is valid given the above logic
817 // the element we want was _just_ repositioned to the front
818 let v = unsafe { self.get_newest_unchecked().1 };
819
820 match kv {
821 Some((key, value)) => InsertOrGetResult::Existed(v, key, value),
822 None => InsertOrGetResult::Inserted(v),
823 }
824 }
825}
826
827/// The result of [`LRUCache::get_or_insert`](LRUCache::get_or_insert).
828///
829/// If inserted, it returns a reference to the newly inserted value.
830/// If the key already existed, it returns a reference to the existing value, and the provided key.
831#[derive(Debug, PartialEq, Eq)]
832pub enum GetOrInsertResult<'a, K, V> {
833 /// Element was inserted, key and value were consumed.
834 Inserted(&'a mut V),
835
836 /// Element already existed at the given key, so a reference
837 /// to the existing value is returned, along with the given key.
838 Existed(&'a mut V, K),
839}
840
841/// The result of [`LRUCache::insert_or_get`](LRUCache::insert_or_get).
842///
843/// If inserted, it returns a reference to the newly inserted value.
844/// If the key already existed, it returns a reference to the existing value, the key and the value.
845#[derive(Debug, PartialEq, Eq)]
846pub enum InsertOrGetResult<'a, K, V> {
847 /// Element was inserted, key and value were consumed.
848 Inserted(&'a mut V),
849
850 /// Element already existed at the given key, so a reference
851 /// to the existing value is returned, along with the given key and value.
852 Existed(&'a mut V, K, V),
853}
854
855impl<'a, K, V> GetOrInsertResult<'a, K, V> {
856 /// Consumes the result and returns a reference to the value.
857 ///
858 /// This will drop the key if it existed.
859 #[inline(always)]
860 pub fn into_inner(self) -> &'a mut V {
861 match self {
862 Self::Inserted(value) => value,
863 Self::Existed(value, _) => value,
864 }
865 }
866}
867
868impl<'a, K, V> InsertOrGetResult<'a, K, V> {
869 /// Consumes the result and returns a reference to the value.
870 ///
871 /// This will drop the key and value if they existed.
872 #[inline(always)]
873 pub fn into_inner(self) -> &'a mut V {
874 match self {
875 Self::Inserted(value) => value,
876 Self::Existed(value, _, _) => value,
877 }
878 }
879}
880
881impl<K, V> Deref for GetOrInsertResult<'_, K, V> {
882 type Target = V;
883
884 #[inline(always)]
885 fn deref(&self) -> &Self::Target {
886 match self {
887 Self::Inserted(value) => value,
888 Self::Existed(value, _) => value,
889 }
890 }
891}
892
893impl<K, V> Deref for InsertOrGetResult<'_, K, V> {
894 type Target = V;
895
896 #[inline(always)]
897 fn deref(&self) -> &Self::Target {
898 match self {
899 Self::Inserted(value) => value,
900 Self::Existed(value, _, _) => value,
901 }
902 }
903}
904
905impl<K, V> DerefMut for GetOrInsertResult<'_, K, V> {
906 #[inline(always)]
907 fn deref_mut(&mut self) -> &mut Self::Target {
908 match self {
909 Self::Inserted(value) => value,
910 Self::Existed(value, _) => value,
911 }
912 }
913}
914
915impl<K, V> DerefMut for InsertOrGetResult<'_, K, V> {
916 #[inline(always)]
917 fn deref_mut(&mut self) -> &mut Self::Target {
918 match self {
919 Self::Inserted(value) => value,
920 Self::Existed(value, _, _) => value,
921 }
922 }
923}
924
925impl<K, V> LRUCache<K, V> {
926 /// Removes and returns the least recently used key-value pair.
927 ///
928 /// This is an `O(1)` operation.
929 pub fn pop(&mut self) -> Option<(K, V)> {
930 let node = self.list.pop_back()?;
931
932 // SAFETY: Cursor created from a known valid pointer
933 let _ = unsafe { self.tree.cursor_mut_from_ptr(&*node).remove().unwrap_unchecked() };
934
935 self.size -= 1;
936
937 // SAFETY: node is removed from both the tree and list
938 Some(unsafe { Node::unwrap(node) })
939 }
940
941 // TODO: Add a pop_while function that takes a filter callback and returns an iterator
942
943 /// Removes and returns the highest `Ord` key-value pair.
944 ///
945 /// This is an `O(1)` operation.
946 pub fn pop_highest(&mut self) -> Option<(K, V)> {
947 let node = self.tree.back_mut().remove()?;
948
949 // SAFETY: Cursor created from a known valid pointer
950 let _ = unsafe { self.list.cursor_mut_from_ptr(&*node).remove().unwrap_unchecked() };
951
952 self.size -= 1;
953
954 // SAFETY: node is removed from both the tree and list
955 Some(unsafe { Node::unwrap(node) })
956 }
957
958 /// Removes and returns the lowest `Ord` key-value pair.
959 ///
960 /// This is an `O(1)` operation.
961 pub fn pop_lowest(&mut self) -> Option<(K, V)> {
962 let node = self.tree.front_mut().remove()?;
963
964 // SAFETY: Cursor created from a known valid pointer
965 let _ = unsafe { self.list.cursor_mut_from_ptr(&*node).remove().unwrap_unchecked() };
966
967 self.size -= 1;
968
969 // SAFETY: node is removed from both the tree and list
970 Some(unsafe { Node::unwrap(node) })
971 }
972}
973
974impl<K, V> LRUCache<K, V> {
975 /// Sets the maximum capacity of the cache.
976 ///
977 /// **This does not remove any entries**, but will cause the cache to evict
978 /// entries when inserting new ones if the length exceeds the new capacity.
979 ///
980 /// Use [`shrink`](Self::shrink) to manually trigger removal of entries
981 /// to meet the new capacity.
982 #[inline(always)]
983 pub fn set_max_capacity(&mut self, max_capacity: usize) {
984 self.max_capacity = max_capacity;
985 }
986
987 /// Returns the maximum capacity of the cache, which is the
988 /// point at which the cache will start evicting older entries.
989 #[inline(always)]
990 #[must_use]
991 pub const fn capacity(&self) -> usize {
992 self.max_capacity
993 }
994
995 /// Sets the maximum capacity of the cache, and removes the oldest entries
996 /// until the length is less than or equal to the new capacity.
997 ///
998 /// If the new capacity is greater than the current length, no entries are removed.
999 #[inline]
1000 pub fn resize(&mut self, max_capacity: usize) {
1001 self.set_max_capacity(max_capacity);
1002 self.shrink();
1003 }
1004
1005 /// Clears the cache, removing all key-value pairs.
1006 pub fn clear(&mut self) {
1007 self.tree.fast_clear();
1008
1009 clear_list(&mut self.list);
1010 }
1011
1012 /// Removes the oldest entries from the cache until the length is less than or equal to the maximum capacity.
1013 pub fn shrink(&mut self) {
1014 while self.size > self.max_capacity {
1015 let _ = self.pop();
1016 }
1017 }
1018
1019 /// Removes up to `amount` of the oldest entries from the cache.
1020 pub fn shrink_by(&mut self, amount: usize) {
1021 for _ in 0..amount {
1022 if self.pop().is_none() {
1023 break;
1024 }
1025 }
1026 }
1027
1028 /// Removes the oldest entries from the cache until the length is less than or equal to the maximum capacity,
1029 /// and calls the provided closure with the removed key-value pairs.
1030 ///
1031 /// # Example
1032 /// ```rust
1033 /// # use intrusive_lru_cache::LRUCache;
1034 /// let mut lru: LRUCache<&'static str, &'static str> = LRUCache::default();
1035 ///
1036 /// lru.insert("a", "1");
1037 /// lru.insert("b", "2");
1038 /// lru.insert("c", "3");
1039 ///
1040 /// lru.set_max_capacity(1);
1041 ///
1042 /// let mut removed = Vec::new();
1043 ///
1044 /// lru.shrink_with(|key, value| {
1045 /// removed.push((key, value));
1046 /// });
1047 ///
1048 /// assert_eq!(removed, vec![("a", "1"), ("b", "2")]);
1049 /// ```
1050 pub fn shrink_with<F>(&mut self, mut cb: F)
1051 where
1052 F: FnMut(K, V),
1053 {
1054 while self.size > self.max_capacity {
1055 let Some((key, value)) = self.pop() else {
1056 break;
1057 };
1058
1059 cb(key, value);
1060 }
1061 }
1062
1063 /// Removes up to `amount` of the oldest entries from the cache,
1064 /// and calls the provided closure with the removed key-value pairs.
1065 pub fn shrink_by_with<F>(&mut self, amount: usize, mut cb: F)
1066 where
1067 F: FnMut(K, V),
1068 {
1069 for _ in 0..amount {
1070 let Some((key, value)) = self.pop() else {
1071 break;
1072 };
1073
1074 cb(key, value);
1075 }
1076 }
1077
1078 /// Returns the number of key-value pairs in the cache.
1079 #[inline(always)]
1080 #[must_use]
1081 pub const fn len(&self) -> usize {
1082 self.size
1083 }
1084
1085 /// Returns `true` if the cache is empty.
1086 #[inline(always)]
1087 #[must_use]
1088 pub fn is_empty(&self) -> bool {
1089 debug_assert_eq!(self.size == 0, self.list.is_empty());
1090
1091 self.size == 0
1092 }
1093
1094 /// Returns `true` if the cache is full.
1095 ///
1096 /// Attempting to insert a new key-value pair will cause the cache to evict at least
1097 /// one entry to make room for the new one.
1098 #[inline(always)]
1099 #[must_use]
1100 pub fn is_full(&self) -> bool {
1101 self.size >= self.max_capacity
1102 }
1103
1104 /// Returns an iterator over the keys in the cache,
1105 /// in order of key `Ord` order.
1106 ///
1107 /// NOTE: This does _not_ update the LRU order.
1108 #[must_use]
1109 pub fn keys(&self) -> impl DoubleEndedIterator<Item = &K> {
1110 self.tree.iter().map(|node| &node.key)
1111 }
1112
1113 /// Returns an iterator over immutable key-value pairs in the cache,
1114 /// in order of most recently used to least recently used.
1115 ///
1116 /// NOTE: This does _not_ update the LRU order.
1117 #[must_use]
1118 pub fn iter_peek_lru(&self) -> impl DoubleEndedIterator<Item = (&K, &V)> {
1119 self.list.iter().map(|node| (&node.key, node.value.get()))
1120 }
1121
1122 /// Returns an iterator over immutable key-value pairs in the cache,
1123 /// in order of key `Ord` order.
1124 ///
1125 /// NOTE: This does _not_ update the LRU order.
1126 #[must_use]
1127 pub fn iter_peek_ord(&self) -> impl DoubleEndedIterator<Item = (&K, &V)> {
1128 self.tree.iter().map(|node| (&node.key, node.value.get()))
1129 }
1130
1131 /// Returns an iterator yielding key-value pairs in the cache as [`SmartEntry`],
1132 /// in the order determined by the `Ord` implementation of the keys. This allows for
1133 /// reading and updating the LRU order at the same time, only updating the LRU order
1134 /// when the value is accessed mutably.
1135 ///
1136 /// Entries yielded do not immediately update the LRU order; only when the value is accessed
1137 /// via [`SmartEntry::get`] or [`SmartEntry::deref_mut`].
1138 ///
1139 /// Note that [`SmartEntry`] instances yielded by this iterator do not support [`SmartEntry::remove`]. It's
1140 /// likely undefined behavior to remove entries while iterating over them, so we prevent that.
1141 pub fn smart_iter(&mut self) -> impl DoubleEndedIterator<Item = SmartEntry<'_, K, V, sealed::NoRemove>> {
1142 let cache = self.get_ptr();
1143
1144 self.tree.iter().map(move |node| SmartEntry::new(cache, node))
1145 }
1146
1147 /// Iterates over all key-value pairs in the cache, and calls the provided closure
1148 /// to determine if the key-value pair should be retained. If the closure returns `false`,
1149 /// the key-value pair is removed from the cache.
1150 ///
1151 /// LRU Order is unchanged.
1152 ///
1153 /// This is an `O(n)` operation.
1154 pub fn retain<F>(&mut self, mut predicate: F)
1155 where
1156 F: FnMut(&K, &V) -> bool,
1157 {
1158 let mut cursor = self.tree.front_mut();
1159
1160 while let Some(node) = cursor.get() {
1161 if predicate(&node.key, node.value.get()) {
1162 cursor.move_next();
1163 continue;
1164 }
1165
1166 // SAFETY: Cursor created from a known valid node
1167 unsafe { self.list.cursor_mut_from_ptr(node).remove().unwrap_unchecked() };
1168
1169 // SAFETY: We were just using the node, but now we own it
1170 let node = unsafe { cursor.remove().unwrap_unchecked() };
1171
1172 self.size -= 1;
1173
1174 // SAFETY: node is removed from both the tree and list
1175 let _ = unsafe { UnsafeRef::into_box(node) };
1176 }
1177 }
1178}
1179
1180mod sealed {
1181 pub struct NoRemove;
1182 pub struct CanRemove;
1183}
1184
1185/// An entry in the cache that can be used for for reading or writing,
1186/// only updating the LRU order when the value is accessed mutably.
1187///
1188/// The `Deref` and `DerefMut` implementations allow for easy access to the value,
1189/// without or with updating the LRU order, respectively. Accessing the value mutably
1190/// via `DerefMut` will update the LRU order.
1191///
1192/// See [`SmartEntry::peek`] and [`SmartEntry::get`] for more information.
1193#[must_use]
1194pub struct SmartEntry<'a, K, V, R = sealed::CanRemove> {
1195 // NOTE: This reference is valid so long as the `UnsafeRef` that holds it is allocated
1196 node: &'a Node<K, V>,
1197 cache: NonNull<LRUCache<K, V>>,
1198 _marker: core::marker::PhantomData<&'a mut LRUCache<K, V>>,
1199 _removal: core::marker::PhantomData<R>,
1200}
1201
1202impl<K, V, R> Deref for SmartEntry<'_, K, V, R> {
1203 type Target = V;
1204
1205 /// Dereferences the value, without updating the LRU order.
1206 #[inline(always)]
1207 fn deref(&self) -> &Self::Target {
1208 self.peek_value()
1209 }
1210}
1211
1212impl<K, V, R> DerefMut for SmartEntry<'_, K, V, R> {
1213 /// Mutably dereferences the value, and updates the LRU order.
1214 #[inline(always)]
1215 fn deref_mut(&mut self) -> &mut Self::Target {
1216 self.get_value()
1217 }
1218}
1219
1220impl<'a, K, V, R> SmartEntry<'a, K, V, R> {
1221 #[inline(always)]
1222 const fn new(cache: NonNull<LRUCache<K, V>>, node: &'a Node<K, V>) -> Self {
1223 Self {
1224 node,
1225 cache,
1226 _marker: PhantomData,
1227 _removal: PhantomData,
1228 }
1229 }
1230}
1231
1232impl<'a, K, V, R> SmartEntry<'a, K, V, R> {
1233 /// With the lifetime of the `LRUCache` itself,
1234 /// access the key-value pair and update the LRU order.
1235 ///
1236 /// It's not possible to have multiple `SmartEntry` instances, nor
1237 /// more than one mutable reference to the cache value at a time,
1238 /// but this allows the reference to outlive the `SmartEntry` itself.
1239 ///
1240 /// See also [`SmartEntry::get`].
1241 ///
1242 /// # Error Example
1243 ///
1244 /// With the returned references being tied to the LRU cache itself,
1245 /// it's not possible to borrow it mutably more than once at a time,
1246 /// even after dropping the `SmartEntry`.
1247 ///
1248 /// ```rust,compile_fail
1249 /// # use intrusive_lru_cache::LRUCache;
1250 /// # let mut lru: LRUCache<u32, u32> = LRUCache::unbounded();
1251 /// let mut entry = lru.smart_get(&1).unwrap();
1252 ///
1253 /// let (key, value) = entry.into_mut();
1254 ///
1255 /// // error[E0499]: cannot borrow `lru` as mutable more than once at a tim
1256 /// let another_entry = lru.smart_get(&1).unwrap();
1257 ///
1258 /// drop((key, value)); // pretend to use the value later
1259 /// ```
1260 #[must_use]
1261 pub fn into_mut(mut self) -> (&'a K, &'a mut V) {
1262 self.bump();
1263
1264 // SAFETY: We have exclusive access to the Node for the duration of 'a
1265 unsafe { (&self.node.key, self.node.value.get_mut()) }
1266 }
1267
1268 /// With the lifetime of the `LRUCache` itself,
1269 /// access the key-value pair without updating the LRU order.
1270 ///
1271 /// See also [`SmartEntry::peek`] and [`SmartEntry::into_mut`].
1272 #[inline(always)]
1273 #[must_use]
1274 pub fn into_ref(self) -> (&'a K, &'a V) {
1275 (&self.node.key, self.node.value.get())
1276 }
1277}
1278
1279impl<K, V, R> SmartEntry<'_, K, V, R> {
1280 fn bump(&mut self) {
1281 // SAFETY: We tied the lifetime of the pointer to 'a, the same as the LRUCache,
1282 // so it will always be valid here. Furthermore, because it's a raw pointer,
1283 // SmartEntry is not Send/Sync, so as long as the mutability happens right
1284 // here and now, it's safe, same as an `&mut LinkedList`.
1285 bump(unsafe { &mut self.cache.as_mut().list }, self.node);
1286 }
1287
1288 /// Access the key only, without updating the LRU order.
1289 #[inline(always)]
1290 #[must_use]
1291 pub fn key(&self) -> &K {
1292 &self.node.key
1293 }
1294
1295 /// Access the key-value pair immutably, without updating the LRU order.
1296 ///
1297 /// See also [`SmartEntry::into_ref`].
1298 #[inline(always)]
1299 #[must_use]
1300 pub fn peek(&self) -> (&K, &V) {
1301 (&self.node.key, self.node.value.get())
1302 }
1303
1304 /// Access the value immutably, without updating the LRU order.
1305 ///
1306 /// This is the same as [`SmartEntry::deref`].
1307 #[inline(always)]
1308 #[must_use]
1309 pub fn peek_value(&self) -> &V {
1310 self.node.value.get()
1311 }
1312
1313 /// Access the key-value pair, and update the LRU order.
1314 ///
1315 /// The LRU order is updated every time this method is called,
1316 /// as it is assumed that the caller is actively using the value.
1317 ///
1318 /// See also [`SmartEntry::into_mut`].
1319 #[must_use]
1320 pub fn get(&mut self) -> (&K, &mut V) {
1321 self.bump();
1322
1323 // SAFETY: We have exclusive access to the Node
1324 unsafe { (&self.node.key, self.node.value.get_mut()) }
1325 }
1326
1327 /// Access the value mutably, and update the LRU order.
1328 ///
1329 /// This LRU order is updated every time this method is called,
1330 /// as it is assumed that the caller is actively using the value.
1331 ///
1332 /// The `DerefMut` implementation invokes this method to access the value,
1333 /// updating the LRU order in the process.
1334 #[inline(always)]
1335 pub fn get_value(&mut self) -> &mut V {
1336 self.bump();
1337
1338 // SAFETY: We have exclusive access to the Node
1339 unsafe { self.node.value.get_mut() }
1340 }
1341
1342 /// Returns `true` if this entry is the most recently used in the cache.
1343 #[must_use]
1344 pub fn is_most_recent(&self) -> bool {
1345 // SAFETY: This pointer is the same as a mutable reference to the list
1346 let list = unsafe { &self.cache.as_ref().list };
1347
1348 // SAFETY: We know the list is non-empty, so front is always valid
1349 core::ptr::eq(self.node, unsafe { list.front().get().unwrap_unchecked() })
1350 }
1351}
1352
1353impl<K, V> SmartEntry<'_, K, V, sealed::CanRemove> {
1354 /// Removes the key-value pair from the cache, and returns the key and value.
1355 ///
1356 /// This cannot be called on every `SmartEntry`. For example, [`LRUCache::smart_iter`]
1357 /// does not allow for removal of entries, so the `SmartEntry` it yields will not have
1358 /// this method available. It can, however, still update the LRU order on mutable access.
1359 ///
1360 /// Consider using [`LRUCache::retain`] to remove entries based on a predicate.
1361 #[must_use]
1362 pub fn remove(self) -> (K, V) {
1363 // SAFETY: node being `&Node<K, V>` is safe here because it's lifetime is actually
1364 // kind of decoupled from the intrusive structures themselves
1365 let SmartEntry { node, mut cache, .. } = self;
1366
1367 // SAFETY: We have exclusive access to the cache given the SmartEntry lifetime
1368 let cache = unsafe { cache.as_mut() };
1369
1370 // SAFETY: Removing the node from the tree is safe, as it's a known valid pointer
1371 unsafe { cache.tree.cursor_mut_from_ptr(node).remove().unwrap_unchecked() };
1372
1373 // SAFETY: Removing the node from the list is also safe for similar reasons
1374 let ptr = unsafe { cache.list.cursor_mut_from_ptr(node).remove().unwrap_unchecked() };
1375
1376 // SAFETY: Node has been removed from both the tree and list
1377 unsafe { Node::unwrap(ptr) }
1378 }
1379}
1380
1381impl<K, V> Drop for LRUCache<K, V> {
1382 fn drop(&mut self) {
1383 self.clear();
1384 }
1385}
1386
1387impl<K, V, Q> Index<&Q> for LRUCache<K, V>
1388where
1389 K: Borrow<Q> + Ord + 'static,
1390 Q: Ord + ?Sized,
1391{
1392 type Output = V;
1393
1394 /// Returns a reference to the value corresponding to the key,
1395 /// without updating the LRU order.
1396 ///
1397 /// # Panics
1398 ///
1399 /// Panics if the key is not present in the cache.
1400 #[inline]
1401 fn index(&self, index: &Q) -> &Self::Output {
1402 self.peek(index).expect("no entry found for key")
1403 }
1404}
1405
1406impl<K, V> Extend<(K, V)> for LRUCache<K, V>
1407where
1408 K: Ord + 'static,
1409{
1410 fn extend<T>(&mut self, iter: T)
1411 where
1412 T: IntoIterator<Item = (K, V)>,
1413 {
1414 for (key, value) in iter {
1415 self.insert(key, value);
1416 }
1417 }
1418}
1419
1420impl<K, V> FromIterator<(K, V)> for LRUCache<K, V>
1421where
1422 K: Ord + 'static,
1423{
1424 fn from_iter<T>(iter: T) -> Self
1425 where
1426 T: IntoIterator<Item = (K, V)>,
1427 {
1428 let mut cache = Self::unbounded();
1429 cache.extend(iter);
1430 cache
1431 }
1432}
1433
1434/// An owning iterator over the key-value pairs in the cache,
1435/// in order of most recently used to least recently used.
1436pub struct IntoIter<K, V> {
1437 list: LinkedList<NodeListAdapter<K, V>>,
1438}
1439
1440impl<K, V> IntoIterator for LRUCache<K, V>
1441where
1442 K: Ord + 'static,
1443{
1444 type Item = (K, V);
1445 type IntoIter = IntoIter<K, V>;
1446
1447 #[inline]
1448 fn into_iter(mut self) -> Self::IntoIter {
1449 self.tree.fast_clear();
1450
1451 IntoIter {
1452 list: self.list.take(),
1453 }
1454 }
1455}
1456
1457impl<K, V> Iterator for IntoIter<K, V> {
1458 type Item = (K, V);
1459
1460 #[inline]
1461 fn next(&mut self) -> Option<Self::Item> {
1462 // SAFETY: node is removed from the list
1463 self.list.pop_front().map(|node| unsafe { Node::unwrap(node) })
1464 }
1465}
1466
1467impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
1468 #[inline]
1469 fn next_back(&mut self) -> Option<Self::Item> {
1470 // SAFETY: node is removed from the list
1471 self.list.pop_back().map(|node| unsafe { Node::unwrap(node) })
1472 }
1473}
1474
1475impl<K, V> Drop for IntoIter<K, V> {
1476 fn drop(&mut self) {
1477 clear_list(&mut self.list);
1478 }
1479}