weak_list/
lib.rs

1#![warn(unsafe_op_in_unsafe_fn)]
2use once_cell::sync::Lazy;
3use std::any::Any;
4use std::collections::HashSet;
5use std::mem::MaybeUninit;
6use std::ops::ControlFlow;
7use std::ops::Deref;
8use std::ptr;
9use std::sync::atomic::AtomicPtr;
10use std::sync::atomic::Ordering::SeqCst;
11use std::sync::Arc;
12use std::sync::Weak;
13
14/// Doubly linked, heterogeneous, list of `Arc`-like nodes that functions as a least recently used cache.
15// The idea is simple: make a linked list using Arc<Node<T>>.
16// A Weak<T> is just a pointer to the allocation of an ArcInner<T>. An Arc<T> is the same pointer.
17// So the WeakRef is also just a pointer, but instead of pointing to T it points to Node<T>.
18// This allows to very cheaply move the node to the front of the list when used, implementing a
19// LRU cache.
20//
21// Requirements:
22// * Must be able to push new element as recently used.
23// * Pushing new element returns a WeakRef (equivalent to Weak<T>), which can be upgraded to an ArcRef (equivalent to Arc<T>).
24// * Upgrading a WeakRef to an ArcRef can fail if the element was removed from the list.
25// * upgrade() should return an Option<ArcRef<T>>.
26// * Upgrading a WeakRef to an ArcRef must mark the element as recently used.
27// * Must be able to remove least recently used element that is not being currently used by an
28// ArcRef.
29// * Must be able to remove all elements that no longer have any WeakRefs or ArcRefs that could
30// use them.
31// * Must be able to push heterogenous types: one WeakList can contain an Arc<String> and an
32// Arc<Vec<u32>>, even an Arc<[u8]>.
33// * The WeakList must provide an API that does not perform allocations, but allocations can be used in user code.
34pub struct WeakList<S: WeakListNodeSet> {
35    head: *const RawArcNode,
36    tail: *const RawArcNode,
37    // Storage used to keep track of all the nodes that belong to this list. This is used to allow
38    // safe and fast remove and upgrade functions, because otherwise these functions would need to
39    // check if the node belongs to the list by iterating over all the elements, or trust the
40    // caller that the node belongs to the list, making almost all the functions unsafe.
41    node_set: S,
42}
43
44/// `WeakList` storage, needs to be able to tell whether a node belongs to this list.
45///
46/// Implementations can assume that:
47/// * insert will only be called for nodes that do not already belong to the list
48/// * remove will only be called for nodes that do already belong to the list
49///
50/// # Safety
51///
52/// This trait is unsafe because if an implementation returns true for a pointer that is not in the
53/// list, many invariants will be broken. Therefore, contains must always return the correct value.
54pub unsafe trait WeakListNodeSet {
55    fn insert_ns(&mut self, ptr: usize);
56    fn contains_ns(&self, list: &WeakList<Self>, ptr: usize) -> bool
57    where
58        Self: Sized;
59    fn remove_ns(&mut self, ptr: usize);
60}
61
62/// Default `WeakList` storage: a hashset of pointer values.
63/// O(n) space but O(1) time.
64// This is Lazy because HashSet::new is not a const fn.
65pub type WeakListHashSet = Lazy<HashSet<usize>>;
66
67unsafe impl WeakListNodeSet for Lazy<HashSet<usize>> {
68    fn insert_ns(&mut self, ptr: usize) {
69        self.insert(ptr);
70    }
71    fn contains_ns(&self, _list: &WeakList<Self>, ptr: usize) -> bool {
72        self.contains(&ptr)
73    }
74    fn remove_ns(&mut self, ptr: usize) {
75        self.remove(&ptr);
76    }
77}
78
79/// To check if the list contains a node, iterate over all the nodes in the list until we find the
80/// target one. O(1) space but O(n) time.
81#[derive(Default)]
82pub struct LinearSearch;
83
84unsafe impl WeakListNodeSet for LinearSearch {
85    fn insert_ns(&mut self, _ptr: usize) {}
86    fn contains_ns(&self, list: &WeakList<Self>, ptr: usize) -> bool {
87        // Iterate over list, check if any node address matches ptr
88        let mut node = list.head;
89        while !node.is_null() {
90            if node as usize == ptr {
91                return true;
92            }
93            unsafe { node = read_raw_node_next(node) };
94        }
95
96        false
97    }
98    fn remove_ns(&mut self, _ptr: usize) {}
99}
100
101unsafe impl<S: Send + WeakListNodeSet> Send for WeakList<S> {}
102unsafe impl<S: Sync + WeakListNodeSet> Sync for WeakList<S> {}
103
104impl<S: Default + WeakListNodeSet> Default for WeakList<S> {
105    fn default() -> Self {
106        Self {
107            head: ptr::null(),
108            tail: ptr::null(),
109            node_set: S::default(),
110        }
111    }
112}
113
114impl<S: WeakListNodeSet> Drop for WeakList<S> {
115    fn drop(&mut self) {
116        self.clear();
117    }
118}
119
120impl WeakList<Lazy<HashSet<usize>>> {
121    pub const fn new() -> Self {
122        Self {
123            head: ptr::null(),
124            tail: ptr::null(),
125            node_set: Lazy::new(HashSet::new),
126        }
127    }
128
129    pub fn realloc_hashset_if_needed_no_alloc(
130        &mut self,
131        bigger_hashset: Option<&mut AllocHashSet>,
132    ) {
133        // Only reallocate if the hashset is full
134        if self.node_set.len() != self.node_set.capacity() {
135            return;
136        }
137
138        match bigger_hashset {
139            Some(new_hs) => {
140                if new_hs.capacity() <= self.node_set.capacity() {
141                    panic!("New AllocHashSet capacity must be greater than the current capacity but {} <= {}", new_hs.capacity(), self.node_set.capacity());
142                }
143                new_hs.0.extend(self.node_set.drain());
144                std::mem::swap(&mut *self.node_set, &mut new_hs.0);
145                // Now bigger_hashset contains the smaller hashset
146            }
147            None => {
148                // If the user did not provide a bigger_hashset, assume they don't mind allocating
149                // memory, so don't realloc here
150            }
151        }
152    }
153
154    /// Returns the capacity of the hashset used to keep track of the active nodes. This capacity
155    /// can be used to ensure that the `AllocHashSet::with_capacity` passed to
156    /// `push_front_no_alloc` has a greater capacity.
157    pub fn hashset_capacity(&self) -> usize {
158        self.node_set.capacity()
159    }
160}
161
162impl<S: WeakListNodeSet> WeakList<S> {
163    /// Push element to the front of the list. This makes this new element the most recently used.
164    ///
165    /// Returns a `WeakRef<T>` that can be upgraded to an `ArcRef<T>`, similarly to the
166    /// `Weak`/`Arc` std types.
167    // This asserts that T is Send+Sync because that's the requirement to make Arc<T> Send+Sync.
168    pub fn push_front<T: Send + Sync + 'static>(&mut self, elem: T) -> WeakRef<T> {
169        self.push_front_no_alloc(elem, AllocMem::default())
170    }
171
172    /// Same as `push_front`, but does not allocate. The user is expected to pass
173    /// `AllocMem::default()` and `AllocHashSet::with_capacity(cap)` as arguments.
174    // This asserts that T is Send+Sync because that's the requirement to make Arc<T> Send+Sync.
175    pub fn push_front_no_alloc<T: Send + Sync + 'static>(
176        &mut self,
177        elem: T,
178        memory: AllocMem<T>,
179    ) -> WeakRef<T> {
180        let f_move_out = arc_from_raw_to_arc_any::<T>;
181        let meta = RawArcNode {
182            prev: AtomicPtr::new(ptr::null_mut()),
183            next: AtomicPtr::new(ptr::null_mut()),
184            f_move_out,
185        };
186        let mut uninit_arc: Arc<MaybeUninit<Node<T>>> = memory.0;
187        let node = Node { meta, elem };
188        Arc::get_mut(&mut uninit_arc).unwrap().write(node);
189        let arc_node: Arc<Node<T>> =
190            unsafe { Arc::from_raw(Arc::into_raw(uninit_arc) as *const Node<T>) };
191        let weak_ref = WeakRef {
192            weak: Arc::downgrade(&arc_node),
193        };
194        let raw_node_ptr = Arc::into_raw(arc_node) as *const RawArcNode;
195        unsafe { self.push_front_node(raw_node_ptr) }
196
197        weak_ref
198    }
199
200    // I give up but an unsized push must be possible to implement.
201    // But note that it won't be useful for dinamically sized arrays, because a `Node<[u8]>` does
202    // not own any heap memory so dropping one of those nodes from the list will not free any new
203    // memory. But it may be useful to store a `dyn Trait` for some use cases.
204    /*
205    pub fn push_front_unsized<T: ?Sized + Send + Sync + 'static, F>(&mut self, f: F) -> WeakRef<T>
206    where F: FnOnce(OpaqueMeta) -> Arc<UnsizedNode<T>>
207    {
208        let meta = RawArcNode {
209            prev: AtomicPtr::new(ptr::null_mut()),
210            next: AtomicPtr::new(ptr::null_mut()),
211            list: self as *const Self,
212            f_move_out: |_| todo!(),
213        };
214        let elem = f(OpaqueMeta(meta));
215        let arc_node: Arc<Node<T>> = unsafe { Arc::from_raw(Arc::into_raw(elem) as *const Node<T>) };
216        let weak_ref = WeakRef { weak: Arc::downgrade(&arc_node) };
217        let raw_node_ptr = Arc::into_raw(arc_node) as *const RawArcNode;
218        unsafe { self.push_front_node(raw_node_ptr) };
219
220        weak_ref
221    }
222    */
223
224    // Push node to the head of the list.
225    unsafe fn push_front_node(&mut self, raw_node_ptr: *const RawArcNode) {
226        match (self.head.is_null(), self.tail.is_null()) {
227            (true, true) => {
228                self.head = raw_node_ptr;
229                self.tail = self.head;
230            }
231            (false, false) => {
232                unsafe {
233                    raw_nodes_link(raw_node_ptr, self.head);
234                }
235                self.head = raw_node_ptr;
236            }
237            _ => unreachable!("head and tail must both be null or both be not null"),
238        }
239        self.node_set.insert_ns(raw_node_ptr as usize);
240    }
241
242    fn contains_node(&self, raw_node_ptr: *const RawArcNode) -> bool {
243        self.node_set.contains_ns(self, raw_node_ptr as usize)
244    }
245
246    // Remove node assuming that it belongs to this list
247    unsafe fn remove_node(&mut self, raw_node_ptr: *const RawArcNode) {
248        unsafe {
249            // First update head and tail pointers
250            if self.head == raw_node_ptr {
251                self.head = read_raw_node_next(raw_node_ptr);
252            }
253            if self.tail == raw_node_ptr {
254                self.tail = read_raw_node_prev(raw_node_ptr);
255            }
256            // Then update neighbors of node
257            remove_node_from_list(raw_node_ptr);
258            self.node_set.remove_ns(raw_node_ptr as usize);
259            // Note that this function does not drop the node!
260        }
261    }
262
263    // Move node to the front of the list assuming that it belongs to this list
264    unsafe fn move_node_to_front(&mut self, raw_node_ptr: *const RawArcNode) {
265        // Inlined from
266        //self.remove_node(raw_node_ptr);
267        // If the node is already at the front of the list, we are done
268        if self.head == raw_node_ptr {
269            return;
270        }
271        unsafe {
272            // Update tail
273            if self.tail == raw_node_ptr {
274                self.tail = read_raw_node_prev(raw_node_ptr);
275            }
276            // Update neighbors of node
277            remove_node_from_list(raw_node_ptr);
278        }
279
280        // Inlined from
281        //self.push_front_node(raw_node_ptr);
282        unsafe {
283            raw_nodes_link(raw_node_ptr, self.head);
284        }
285        self.head = raw_node_ptr;
286    }
287
288    /// Remove the least recently used element. This does not check if that element is being
289    /// currently used, so prefer using `pop_lru` instead.
290    pub fn pop_back(&mut self) -> Option<Arc<dyn Any + Send + Sync + 'static>> {
291        if self.tail.is_null() {
292            return None;
293        }
294
295        unsafe {
296            let tail = self.tail;
297            self.remove_node(tail);
298            let arc_any = move_out_of_raw_node(tail);
299
300            Some(arc_any)
301        }
302    }
303
304    /// Remove the least recently used element that is not currently being used: it does not have
305    /// any active `ArcRef`s referencing it.
306    pub fn pop_lru(&mut self) -> Option<Arc<dyn Any + Send + Sync + 'static>> {
307        //println!("{:?}", self.node_set);
308        // Find the last element with strong count 1
309        let mut node = self.tail;
310
311        while !node.is_null() {
312            unsafe {
313                let strong_count = read_raw_node_strong_count(node);
314                if strong_count == 1 {
315                    // When removing the node, move node.next to the head of the list.
316                    // This is done because elements with strong_count > 1 can be consider active,
317                    // so we can move them to the front of the queue to speed up the next call to
318                    // pop_lru. Let's say these are the strong counts of the nodes:
319                    // n0 n1 n2 n3 n4
320                    //  2  1  1  2  2
321                    // If n2 is the removed node, the resulting list should look like:
322                    // n3 n4 n0 n1 n2
323                    //  2  2  2  1  1
324                    // And then n2 can be immediately removed. Then the next call to pop_lru will
325                    // check n1 then n0, and then n4 and n3. n4 and n3 used to have a strong count
326                    // of 2, so it is unlikely that they will be removed, so it is better to have
327                    // them near the front of the list.
328                    //
329                    // So:
330                    // * cut n2-n3
331                    // * link n4-n0
332                    // * head = n3
333                    // * tail = n2
334                    // * self.pop_back()
335                    let n2 = node;
336                    let n3 = read_raw_node_next(n2);
337                    if !n3.is_null() {
338                        raw_nodes_cut(n2, n3);
339                        let n4 = self.tail;
340                        let n0 = self.head;
341                        raw_nodes_link(n4, n0);
342                        self.head = n3;
343                        self.tail = n2;
344                    }
345                    // If n2.next was null then n2 was the last element
346
347                    return self.pop_back();
348                }
349
350                // Visit next node
351                node = read_raw_node_prev(node);
352            }
353        }
354
355        None
356    }
357
358    /// Remove all the unreachable elements that do not have any `WeakRef` or `ArcRef`, so they
359    /// will never be used again. Returns a `Vec` of the removed elements.
360    pub fn remove_unreachable(&mut self) -> Vec<Arc<dyn Any + Send + Sync + 'static>> {
361        let mut v = vec![];
362
363        self.remove_unreachable_into_f(|arc_any| {
364            v.push(arc_any);
365
366            ControlFlow::Continue(())
367        });
368
369        v
370    }
371
372    /// Same as `remove_all_unreachable`, but insert the removed elements into the provided buffer.
373    /// This can be used to manually deallocate the elements later if needed, and also if it is
374    /// important to avoid allocations.
375    ///
376    /// Note: this function returns immediately if the buffer fills up, so if the returned length
377    /// is equal to the buffer size it means that there may still be some unreachable elements
378    /// left.
379    // TODO: this could return a cursor to continue iteration or something
380    pub fn remove_unreachable_into_buf(
381        &mut self,
382        buf: &mut [Option<Arc<dyn Any + Send + Sync + 'static>>],
383    ) -> usize {
384        if buf.is_empty() {
385            return 0;
386        }
387
388        let mut count = 0;
389
390        self.remove_unreachable_into_f(|arc_any| {
391            buf[count] = Some(arc_any);
392            count += 1;
393
394            if count == buf.len() {
395                ControlFlow::Break(())
396            } else {
397                ControlFlow::Continue(())
398            }
399        });
400
401        count
402    }
403
404    /// Same as `remove_all_unreachable`, but pass ownership of the removed element to the provided
405    /// callback. This can be used to manually deallocate the elements later if needed.
406    pub fn remove_unreachable_into_f<F>(&mut self, mut f: F)
407    where
408        F: FnMut(Arc<dyn Any + Send + Sync + 'static>) -> ControlFlow<()>,
409    {
410        // Remove all elements with strong count 1 and weak count 0
411        // (these can never be upgraded to an ArcRef, so they can be safely removed)
412        let mut node = self.tail;
413
414        while !node.is_null() {
415            unsafe {
416                let next_node = read_raw_node_prev(node);
417                if raw_node_arc_is_unique(node) {
418                    self.remove_node(node);
419                    let arc_any = move_out_of_raw_node(node);
420                    if f(arc_any).is_break() {
421                        break;
422                    }
423                }
424                // Visit next node
425                node = next_node;
426            }
427        }
428    }
429
430    /// Remove an element from the list.
431    pub fn remove<T>(&mut self, weak_ref: &WeakRef<T>) -> Option<ArcRef<T>> {
432        // Check if weak_ref belongs to this list
433        let raw_node_ptr = weak_ref.weak.as_ptr() as *const RawArcNode;
434        if !self.contains_node(raw_node_ptr) {
435            return None;
436        }
437        // SAFETY: we just checked that the list contains this node
438        unsafe { self.remove_node(raw_node_ptr) };
439        // No need to use move_out_of_raw_node function because we know T
440        let arc: Arc<Node<T>> = unsafe { Arc::from_raw(raw_node_ptr as *const Node<T>) };
441
442        Some(ArcRef { arc })
443    }
444
445    /// Remove all the elements from the list, regardless of if they are being actively used or
446    /// not.
447    pub fn clear(&mut self) {
448        while let Some(arc) = self.pop_back() {
449            drop(arc);
450        }
451    }
452}
453
454pub struct AllocMem<T>(Arc<MaybeUninit<Node<T>>>);
455
456impl<T> Default for AllocMem<T> {
457    fn default() -> Self {
458        Self(Arc::new(MaybeUninit::uninit()))
459    }
460}
461
462pub struct AllocHashSet(HashSet<usize>);
463
464unsafe impl Send for AllocHashSet {}
465unsafe impl Sync for AllocHashSet {}
466
467impl AllocHashSet {
468    // We should not provide a default implementation, because users may incorrectly pass
469    // AllocHashSet::default to push_front_no_alloc, and that will not work.
470    #[allow(clippy::new_without_default)]
471    pub fn new() -> Self {
472        Self(HashSet::new())
473    }
474
475    pub fn with_capacity(capacity: usize) -> Self {
476        Self(HashSet::with_capacity(capacity))
477    }
478
479    pub fn capacity(&self) -> usize {
480        self.0.capacity()
481    }
482
483    /// Reserve capacity such that the new capacity is at least `target_cap`. If `target_cap` is
484    /// less than the current capacity, this function may not actually reallocate.
485    pub fn allocate_capacity(&mut self, target_cap: usize) {
486        self.0.reserve(target_cap.saturating_sub(self.0.len()))
487    }
488}
489
490/// Recover a `Arc<dyn Any>` back from a `*const RawArcNode`. This is needed to run the destructor
491/// of `T` when nodes are removed from the list.
492unsafe fn arc_from_raw_to_arc_any<T: Send + Sync + 'static>(
493    raw_node_ptr: *const RawArcNode,
494) -> Arc<dyn Any + Send + Sync + 'static> {
495    // This Arc<dyn Any> cannot be used to recover the Arc<T> because we do not have an Arc<T>, we
496    // have an Arc<Node<T>> and users cannot use the Node type. But users can recover a ArcRef<T>
497    // using the function `arc_dyn_any_to_arc_ref_t` (that name will probably change). And then use
498    // ArcRef::get_mut to get a mutable reference. See test push_pop_back_recover_type_from_pop for
499    // an example.
500    unsafe { Arc::<Node<T>>::from_raw(raw_node_ptr as *const Node<T>) }
501}
502
503unsafe fn read_raw_node_prev(p: *const RawArcNode) -> *const RawArcNode {
504    unsafe { &(*p).prev }.load(SeqCst)
505}
506
507unsafe fn read_raw_node_next(p: *const RawArcNode) -> *const RawArcNode {
508    unsafe { &(*p).next }.load(SeqCst)
509}
510
511unsafe fn update_raw_node_prev(p: *const RawArcNode, prev: *const RawArcNode) {
512    unsafe { &(*p).prev }.store(prev as *mut RawArcNode, SeqCst);
513}
514
515unsafe fn update_raw_node_next(p: *const RawArcNode, next: *const RawArcNode) {
516    unsafe { &(*p).next }.store(next as *mut RawArcNode, SeqCst);
517}
518
519unsafe fn raw_nodes_cut(p0: *const RawArcNode, p1: *const RawArcNode) {
520    unsafe {
521        update_raw_node_next(p0, ptr::null());
522        update_raw_node_prev(p1, ptr::null());
523    }
524}
525
526unsafe fn raw_nodes_link(p0: *const RawArcNode, p1: *const RawArcNode) {
527    unsafe {
528        update_raw_node_next(p0, p1);
529        update_raw_node_prev(p1, p0);
530    }
531}
532
533unsafe fn remove_node_from_list(p: *const RawArcNode) {
534    // [n0 n1 n2]
535    // [n0 n2]
536    // n0 = n1.prev
537    // n2 = n1.next
538    // n0.next = n2
539    // n2.prev = n0
540
541    unsafe {
542        let prev = read_raw_node_prev(p);
543        let next = read_raw_node_next(p);
544
545        if !prev.is_null() {
546            update_raw_node_next(prev, next);
547        }
548        if !next.is_null() {
549            update_raw_node_prev(next, prev);
550        }
551
552        update_raw_node_prev(p, ptr::null());
553        update_raw_node_next(p, ptr::null());
554    }
555}
556
557/// Return strong_count == 1 && weak_count == 0
558unsafe fn raw_node_arc_is_unique(p: *const RawArcNode) -> bool {
559    let mut dummy_arc: Arc<Node<()>> = unsafe { Arc::from_raw(p as *const Node<()>) };
560
561    // This function cannot read strong_count and weak_count directly because of possible race
562    // conditions. There is an Arc::is_unique method but it is private. Arc::get_mut is implemented
563    // as "if Arc::is_unique { Some } else { None }", so let's use that here.
564    let is_unique = Arc::get_mut(&mut dummy_arc).is_some();
565    std::mem::forget(dummy_arc);
566
567    // Race conditions are not possible here because if the Arc is unique, the only place where we
568    // can add new Arcs or Refs is inside this WeakList. So if `is_unique` is true, it will stay
569    // true, but if `is_unique` is false it may become true. That is not a problem because we only
570    // remove the node if `is_unique` is true.
571    is_unique
572}
573
574unsafe fn read_raw_node_strong_count(p: *const RawArcNode) -> usize {
575    let dummy_arc: Arc<Node<()>> = unsafe { Arc::from_raw(p as *const Node<()>) };
576    let strong_count = Arc::strong_count(&dummy_arc);
577    std::mem::forget(dummy_arc);
578
579    strong_count
580}
581
582unsafe fn move_out_of_raw_node(node: *const RawArcNode) -> Arc<dyn Any + Send + Sync + 'static> {
583    let arc_from_raw_to_arc_any = unsafe { (*node).f_move_out };
584    unsafe { arc_from_raw_to_arc_any(node) }
585}
586
587/// Arc<Node<T>> is just a pointer to an ArcInner<Node<T>>. This struct represents the Node<T>
588/// part. The memory layout of the ArcInner<Node<T>> is:
589/// * strong_count: AtomicUsize
590/// * weak_count: AtomicUsize
591/// * prev
592/// * next
593/// * f_move_out
594/// * T
595/// Accessing the inner fields of the Node<T> requires owning a `&mut WeakList`, so it should be
596/// safe to just unsafely write it. But to be conservative we use atomic fields to implement
597/// mutability.
598///
599/// The reason this should not be needed is because these fields are private, so user code can only
600/// access T through a &T. Technically a reference &T is also a borrow of `&Node<T>` and mutating
601/// the other fields requires creating a `&mut Node<T>`, and creating a `&mut` reference while
602/// there is another `&` reference is undefined behavior. So just to be sure we avoid ever creating
603/// a `&mut Node<T>` or a `&mut RawArcNode`.
604#[repr(C)]
605struct RawArcNode {
606    prev: AtomicPtr<RawArcNode>,
607    next: AtomicPtr<RawArcNode>,
608    f_move_out: unsafe fn(*const RawArcNode) -> Arc<dyn Any + Send + Sync + 'static>,
609}
610
611// repr(C) guarantees that it is safe to cast *const Node to *const RawArcNode
612// TODO: but does RawArcNode need repr(C) as well?
613#[repr(C)]
614struct Node<T: ?Sized> {
615    meta: RawArcNode,
616    elem: T,
617}
618
619// Failed attempt to implement push_unsized
620/*
621#[repr(C)]
622pub struct UnsizedNode<T: ?Sized> {
623    pub meta: OpaqueMeta,
624    pub elem: T,
625}
626
627pub struct OpaqueMeta(RawArcNode);
628
629impl Default for OpaqueMeta {
630    fn default() -> Self {
631        Self(RawArcNode {
632            prev: AtomicPtr::new(ptr::null_mut()),
633            next: AtomicPtr::new(ptr::null_mut()),
634            list: ptr::null(),
635            f_move_out: |_| { unreachable!() },
636        })
637    }
638}
639
640impl OpaqueMeta {
641    pub fn set_metadata<T: Send + Sync + 'static>(&mut self, _arc: &Arc<UnsizedNode<T>>) {
642        self.0.f_move_out = arc_from_raw_to_arc_any::<T>;
643    }
644}
645*/
646
647// TODO: the destructor of WeakRef could check the strong/weak counts, and if they are 1/1 this
648// means that this was the last WeakRef to the value, and the value can be removed from the list.
649// But we cannot remove items from the list because we dont have a mutable reference to the list
650// inside the drop function. Same applies to ArcRef.
651pub struct WeakRef<T: ?Sized> {
652    weak: Weak<Node<T>>,
653}
654
655impl<T: ?Sized> Clone for WeakRef<T> {
656    fn clone(&self) -> Self {
657        Self {
658            weak: self.weak.clone(),
659        }
660    }
661}
662
663impl<T: ?Sized + Send + Sync + 'static> WeakRef<T> {
664    pub fn upgrade<S: WeakListNodeSet>(&self, list: &mut WeakList<S>) -> Option<ArcRef<T>> {
665        // First, upgrade to prevent WeakList::pop_lru from removing the element (although that's
666        // impossible because we hold a `&mut WeakList`).
667        let ret = self.upgrade_quietly()?;
668
669        // If the list does not contain this node we do not need to update element position
670        let raw_node_ptr = self.weak.as_ptr() as *const RawArcNode;
671        if !list.contains_node(raw_node_ptr) {
672            return None;
673        }
674        // Mark node as recently used by moving it to the front of the list.
675        // SAFETY: we just checked that the list contains this node
676        unsafe {
677            list.move_node_to_front(raw_node_ptr);
678        }
679
680        Some(ret)
681    }
682
683    /// Upgrade the `WeakRef` to an `ArcRef`, but do not mark it as least recently used in its
684    /// parent `WeakList`.
685    ///
686    /// This can be used to check whether a `WeakRef` can still be upgraded. It can also be used to
687    /// upgrade without a reference to the `WeakList`, but that is not recommended because it makes
688    /// the `WeakList` less useful.
689    pub fn upgrade_quietly(&self) -> Option<ArcRef<T>> {
690        self.weak.upgrade().map(|arc| ArcRef { arc })
691    }
692}
693
694pub struct ArcRef<T: ?Sized> {
695    arc: Arc<Node<T>>,
696}
697
698impl<T: ?Sized> Clone for ArcRef<T> {
699    fn clone(&self) -> Self {
700        Self {
701            arc: self.arc.clone(),
702        }
703    }
704}
705
706impl<T: ?Sized> Deref for ArcRef<T> {
707    type Target = T;
708
709    #[inline]
710    fn deref(&self) -> &Self::Target {
711        &self.arc.elem
712    }
713}
714
715impl<T: ?Sized> ArcRef<T> {
716    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
717        Arc::get_mut(&mut this.arc).map(|x| &mut x.elem)
718    }
719
720    pub fn downgrade(this: &Self) -> WeakRef<T> {
721        WeakRef {
722            weak: Arc::downgrade(&this.arc),
723        }
724    }
725}
726
727/// Try to downcast the result of `WeakList::pop_lru` back into an `ArcRef<T>`. Returns the passed
728/// `Arc` back in case of error.
729pub fn arc_dyn_any_to_arc_ref_t<T: Send + Sync + 'static>(
730    aa: Arc<dyn Any + Send + Sync + 'static>,
731) -> Result<ArcRef<T>, Arc<dyn Any + Send + Sync + 'static>> {
732    // Try to downcast Arc<dyn Any> to Arc<Node<T>>
733    aa.downcast::<Node<T>>().map(|node| ArcRef { arc: node })
734}
735
736/*
737pub trait ArcNodeDowncast {
738    fn get_arc_ref<T: ?Sized>(&self) -> Option<ArcRef<T>>;
739}
740
741impl<T> ArcNodeDowncast for Arc<dyn ArcNodeDowncast {
742    fn get_arc_ref<T: ?Sized>(&self
743}
744*/
745
746#[cfg(test)]
747mod tests {
748    use super::*;
749
750    #[test]
751    fn push_elem() {
752        let mut wl = WeakList::new();
753
754        let ws1 = wl.push_front(format!("string1"));
755
756        let s1 = ws1.upgrade(&mut wl).expect("s1 died");
757
758        assert_eq!(*s1, format!("string1"));
759    }
760
761    #[test]
762    fn push_pop_back() {
763        let mut wl = WeakList::new();
764
765        let ws1 = wl.push_front(format!("string1"));
766
767        wl.pop_back();
768
769        assert!(ws1.upgrade(&mut wl).is_none());
770    }
771
772    #[test]
773    fn push_pop_lru() {
774        let mut wl = WeakList::new();
775
776        let ws1 = wl.push_front(format!("string1"));
777
778        assert!(wl.pop_lru().is_some());
779
780        assert!(ws1.upgrade(&mut wl).is_none());
781    }
782
783    #[test]
784    fn push_pop_lru_while_upgraded() {
785        let mut wl = WeakList::new();
786
787        let ws1 = wl.push_front(format!("string1"));
788
789        let _s1 = ws1.upgrade(&mut wl).unwrap();
790
791        assert!(wl.pop_lru().is_none());
792    }
793
794    #[test]
795    fn push_pop_lru_moves_upgraded_tail_to_front() {
796        let mut wl = WeakList::new();
797
798        let ws1 = wl.push_front(format!("string1"));
799        let s1 = ws1.upgrade(&mut wl).unwrap();
800        let _ws2 = wl.push_front(format!("string2"));
801        let _ws3 = wl.push_front(format!("string3"));
802
803        // ws1 is the least recently used, but we hold an ArcRef so it will not be removed by
804        // pop_lru. ws2 will be removed instead, and ws1 will be moved to the front.
805        assert!(wl.pop_lru().is_some());
806
807        // Release ArcRef, but ws1 is still at the front so ws2 will be removed first
808        drop(s1);
809
810        assert!(wl.pop_lru().is_some());
811
812        // If ws1 was not removed, we can upgrade it again
813        let _s1 = ws1.upgrade(&mut wl).unwrap();
814    }
815
816    #[test]
817    fn push_push_upgrade_pop() {
818        let mut wl = WeakList::new();
819
820        let ws1 = wl.push_front(format!("string1"));
821        let ws2 = wl.push_front(format!("string2"));
822
823        assert!(ws1.upgrade(&mut wl).is_some());
824
825        wl.pop_lru();
826
827        assert!(ws1.upgrade(&mut wl).is_some());
828        assert!(ws2.upgrade(&mut wl).is_none());
829    }
830
831    #[test]
832    fn remove_unreachable() {
833        let mut wl = WeakList::new();
834
835        let ws1 = wl.push_front(format!("string1"));
836        let ws2 = wl.push_front(format!("string2"));
837        let ws3 = wl.push_front(format!("string3"));
838
839        drop(ws1);
840        drop(ws3);
841
842        // Removes ws1 and ws3
843        assert_eq!(wl.remove_unreachable().len(), 2);
844        assert!(ws2.upgrade(&mut wl).is_some());
845        // Removes ws2
846        assert!(wl.pop_back().is_some());
847        assert!(ws2.upgrade(&mut wl).is_none());
848        // List is now empty
849        assert!(wl.pop_back().is_none());
850    }
851
852    #[test]
853    fn remove_unreachable_empty_buf() {
854        let mut wl = WeakList::new();
855
856        let ws1 = wl.push_front(format!("string1"));
857
858        assert_eq!(wl.remove_unreachable_into_buf(&mut []), 0);
859
860        // ws1 was not removed from list
861        let _s1 = ws1.upgrade(&mut wl).unwrap();
862    }
863
864    #[test]
865    fn remove_unreachable_big_buf() {
866        let mut wl = WeakList::new();
867
868        let ws1 = wl.push_front(format!("string1"));
869        drop(ws1);
870
871        let mut buf = vec![None; 10];
872
873        assert_eq!(wl.remove_unreachable_into_buf(&mut buf), 1);
874        // List is now empty
875        assert!(wl.pop_back().is_none());
876    }
877
878    #[test]
879    fn heterogenous_list() {
880        let mut wl = WeakList::new();
881
882        let _ws1 = wl.push_front(format!("string1"));
883        let _ws2 = wl.push_front(8u32);
884        let _ws3 = wl.push_front(vec!["a", "b", "c"]);
885    }
886
887    #[test]
888    fn weak_list_is_sync_and_send() {
889        fn assert_is_sync_and_send<T: Send + Sync>(_x: &T) {}
890
891        assert_is_sync_and_send(&WeakList::new());
892    }
893
894    /*
895    #[test]
896    fn weak_list_of_rc_should_not_compile() {
897        let mut wl = WeakList::new();
898
899        let wsrc = wl.push_front(std::rc::Rc::new(1u8));
900    }
901    */
902
903    #[test]
904    fn upgrade_node_with_another_list() {
905        let mut wl1 = WeakList::new();
906        let _ws1 = wl1.push_front(format!("string1"));
907        let mut wl2 = WeakList::new();
908        let ws2 = wl2.push_front(format!("string2"));
909
910        assert!(ws2.upgrade(&mut wl1).is_none());
911    }
912
913    #[test]
914    fn push_pop_back_updates_head() {
915        let mut wl = WeakList::new();
916
917        let ws1 = wl.push_front(format!("string1"));
918
919        wl.pop_back();
920
921        assert!(ws1.upgrade(&mut wl).is_none());
922
923        // If wl.head still points to ws1, this will fail
924        let ws2 = wl.push_front(format!("string2"));
925        assert!(ws2.upgrade(&mut wl).is_some());
926    }
927
928    #[test]
929    fn remove_node_twice() {
930        let mut wl = WeakList::new();
931
932        let ws1 = wl.push_front(format!("string1"));
933
934        let _s1 = wl.remove(&ws1).unwrap();
935        assert!(wl.remove(&ws1).is_none());
936    }
937
938    #[test]
939    fn remove_node_after_moving_list() {
940        let mut wl = WeakList::new();
941
942        let ws1 = wl.push_front(format!("string1"));
943
944        let mut wl2 = WeakList::new();
945        std::mem::swap(&mut wl, &mut wl2);
946
947        let _s1 = wl2.remove(&ws1).unwrap();
948    }
949
950    #[test]
951    fn upgrade_node_after_moving_list() {
952        let mut wl = WeakList::new();
953
954        let ws1 = wl.push_front(format!("string1"));
955
956        let mut wl2 = WeakList::new();
957        std::mem::swap(&mut wl, &mut wl2);
958
959        let _s1 = ws1.upgrade(&mut wl2).unwrap();
960    }
961
962    #[test]
963    fn remove_node_from_another_list() {
964        let mut wl1 = WeakList::new();
965        let ws1 = wl1.push_front(format!("string1"));
966        let mut wl2 = WeakList::new();
967        let _ws2 = wl2.push_front(format!("string2"));
968
969        assert!(wl2.remove(&ws1).is_none());
970    }
971
972    #[test]
973    fn list_cleared_on_drop() {
974        let mut wl = WeakList::new();
975
976        let ws1 = wl.push_front(format!("string1"));
977
978        drop(wl);
979
980        assert_eq!(ws1.upgrade_quietly().as_deref(), None);
981    }
982
983    /*
984    #[test]
985    fn unsized_push() {
986        let a1: Arc<[u8]> = Arc::new([1]);
987
988        let mut wl = WeakList::new();
989
990        wl.push_front_unsized(|mut meta| {
991            let an1 = Arc::new(UnsizedNode { meta, elem: [1] });
992            meta.set_metadata(&an1);
993            an1
994        });
995    }
996    */
997
998    #[test]
999    fn upgrade_node_after_removing_from_list() {
1000        // This test used to segfault when dropping the WeakList after the upgrade.
1001        let mut wl = WeakList::new();
1002
1003        let ws1 = wl.push_front(format!("string1"));
1004
1005        let _arc_s1 = wl.pop_back().unwrap();
1006
1007        // This currently returns None, but it would be fine if it also returns Some
1008        assert!(ws1.upgrade(&mut wl).is_none());
1009    }
1010
1011    #[test]
1012    fn push_pop_back_recover_type_from_pop() {
1013        let mut wl = WeakList::new();
1014
1015        let ws1 = wl.push_front(format!("string1"));
1016
1017        let arc_dyn_any = wl.pop_back().unwrap();
1018
1019        drop(ws1);
1020
1021        let mut as1: ArcRef<String> = arc_dyn_any_to_arc_ref_t(arc_dyn_any).unwrap();
1022
1023        let s1_ref = ArcRef::get_mut(&mut as1).unwrap();
1024        let s1 = std::mem::take(s1_ref);
1025
1026        assert_eq!(s1, format!("string1"));
1027    }
1028
1029    #[test]
1030    fn push_pop_back_recover_type_from_weak_ref() {
1031        let mut wl = WeakList::new();
1032
1033        let ws1 = wl.push_front(format!("string1"));
1034
1035        let _arc_s1 = wl.pop_back().unwrap();
1036
1037        // It is impossible to recover an ArcRef after the item has been removed from the list
1038        assert!(ws1.upgrade(&mut wl).is_none());
1039    }
1040
1041    #[test]
1042    fn fuzz_1() {
1043        // [Give { size: 91 }, Give { size: 72057594037927936 }, Upgrade { index: 0 }, Give { size: 0 }, Clear, Clear, Upgrade { index: 0 }]
1044        let mut wl = WeakList::new();
1045        let mut weaks: Vec<Option<WeakRef<Vec<u8>>>> = vec![];
1046        let mut upgrades = vec![];
1047
1048        weaks.push(Some(wl.push_front(Vec::with_capacity(91))));
1049        upgrades.push(weaks[0].as_ref().unwrap().upgrade(&mut wl));
1050        weaks.push(Some(wl.push_front(Vec::with_capacity(0))));
1051        wl.clear();
1052        //wl.clear();
1053        upgrades.push(weaks[0].as_ref().unwrap().upgrade(&mut wl));
1054    }
1055
1056    #[test]
1057    fn variance_remove() {
1058        let mut wl = WeakList::new();
1059
1060        let ws1: WeakRef<&'static str> = wl.push_front("string1");
1061
1062        fn shorten_lifetime<'a, T: ?Sized>(
1063            weak_ref: WeakRef<&'a T>,
1064            _lifetime: &'a T,
1065        ) -> WeakRef<&'a T> {
1066            weak_ref
1067        }
1068
1069        let stack_str: &str = &format!("hi");
1070        let shorter_ws1 = shorten_lifetime(ws1, stack_str);
1071        // Calling remove with a WeakRef<&'a str> instead of a WeakRef<&'static str> is valid and
1072        // harmless
1073        let s1: ArcRef<&str> = wl.remove(&shorter_ws1).unwrap();
1074
1075        assert_eq!(&*s1, &"string1");
1076    }
1077
1078    #[test]
1079    fn default_impl_lazy() {
1080        // Lazy::default calls HashSet::default, so this works as expected
1081        let mut wl: WeakList<WeakListHashSet> = WeakList::default();
1082        let ws1 = wl.push_front(format!("string1"));
1083        let s1 = ws1.upgrade(&mut wl).expect("s1 died");
1084        assert_eq!(*s1, format!("string1"));
1085    }
1086}