sdd 4.8.3

Scalable lock-free delayed memory reclaimer
Documentation
use std::alloc::{Layout, alloc, dealloc};
use std::mem::forget;
use std::panic::catch_unwind;
use std::ptr::{null, null_mut};
use std::sync::atomic::AtomicPtr;
use std::sync::atomic::Ordering::{AcqRel, Acquire, Relaxed, Release};

use super::collector::Collector;
use super::link::Link;
use super::ref_counted::RefCounted;
use super::{Guard, Owned, Shared};

/// Private garbage collector to limit the lifetime of allocated memory chunks.
///
/// When the [`PrivateCollector`] is dropped, it performs an aggressive reclamation; it deallocates
/// all remaining memory chunks that it has collected in its lifetime without ensuring that there
/// are no active [`Ptr`](super::Ptr) or [`RawPtr`](super::RawPtr) pointing to any of the memory
/// chunks.
///
/// # Safety
///
/// Using [`PrivateCollector`] for types that do not implement [`Send`] is unsafe, as their
/// instances may be dropped on an arbitrary thread. See [`collect_owned`](Self::collect_owned) and
/// [`collect_shared`](Self::collect_shared) for more safety notes.
pub struct PrivateCollector {
    /// [`SharedGarbageBag`] that is shared among multiple threads.
    ///
    /// While the thread-local [`Collector`] handles most retirements, the [`SharedGarbageBag`] acts
    /// as a secondary storage for memory chunks that failed to be collected locally, e.g., due to
    /// memory allocation failure.
    shared_garbage_bag: AtomicPtr<SharedGarbageBag>,
    /// Backup garbage bag in case heap allocation fails.
    backup_garbage_bag: AtomicPtr<Link>,
}

/// A lock-free, singly-linked list of [`Link`] used to temporarily hold retired memory chunks.
pub(super) struct SharedGarbageBag(AtomicPtr<Link>);

impl PrivateCollector {
    /// Creates a new [`PrivateCollector`].
    ///
    /// # Examples
    ///
    /// ```
    /// use sdd::PrivateCollector;
    ///
    /// let private_collector = PrivateCollector::new();
    /// ```
    #[inline]
    #[must_use]
    pub const fn new() -> Self {
        Self {
            shared_garbage_bag: AtomicPtr::new(null_mut()),
            backup_garbage_bag: AtomicPtr::new(null_mut()),
        }
    }

    /// Collects the memory owned by the supplied [`Owned`].
    ///
    /// # Safety
    ///
    /// This method is unsafe because it manually manages the lifetime of the underlying memory. The
    /// caller must ensure that the [`PrivateCollector`] is not dropped while any
    /// [`Ptr`](super::Ptr) or [`RawPtr`](super::RawPtr) still point to the memory held by the
    /// [`Owned`].
    ///
    /// Additionally, `T` must implement [`Send`] because its instances may be dropped on an
    /// arbitrary thread: for example, when the [`PrivateCollector`] is moved across threads or
    /// shared among multiple threads. The [`Send`] constraint is intentionally not enforced at
    /// compile time for more flexibility.
    ///
    ///
    /// # Examples
    ///
    /// ```
    /// use sdd::{Guard, Owned, PrivateCollector};
    ///
    /// let private_collector = PrivateCollector::new();
    /// let owned = Owned::new(10);
    ///
    /// unsafe { private_collector.collect_owned(owned, &Guard::new()); }
    /// ```
    #[inline]
    pub unsafe fn collect_owned<T>(&self, owned: Owned<T>, guard: &Guard) {
        let ptr = RefCounted::link_ptr(owned.underlying_ptr()).cast_mut();
        forget(owned);
        self.push(ptr, guard);
    }

    /// Collects the supplied [`Shared`].
    ///
    /// Returns `true` if the last reference was dropped and the memory was successfully collected.
    ///
    /// # Safety
    ///
    /// This method is unsafe because it manually manages the lifetime of the underlying memory. The
    /// caller must ensure that the [`PrivateCollector`] is not dropped while any
    /// [`Ptr`](super::Ptr) or [`RawPtr`](super::RawPtr) still point to the memory held by the
    /// [`Shared`].
    ///
    /// Additionally, `T` must implement [`Send`] because its instances may be dropped on an
    /// arbitrary thread: for example, when the [`PrivateCollector`] is moved across threads or
    /// shared among multiple threads. The [`Send`] constraint is intentionally not enforced at
    /// compile time for more flexibility.
    ///
    /// # Examples
    ///
    /// ```
    /// use sdd::{Guard, PrivateCollector, Shared};
    ///
    /// let private_collector = PrivateCollector::new();
    /// let shared = Shared::new(10);
    ///
    /// let collected = unsafe { private_collector.collect_shared(shared, &Guard::new()) };
    /// assert!(collected);
    /// ```
    #[inline]
    #[must_use]
    pub unsafe fn collect_shared<T>(&self, shared: Shared<T>, guard: &Guard) -> bool {
        let ptr = shared.underlying_ptr();
        forget(shared);

        if unsafe { (*ptr).drop_ref() } {
            self.push(RefCounted::link_ptr(ptr).cast_mut(), guard);
            true
        } else {
            false
        }
    }

    /// Pushes a [`Link`] into its [`SharedGarbageBag`].
    #[inline]
    fn push(&self, ptr: *mut Link, guard: &Guard) {
        let mut shared_garbage_bag = self.shared_garbage_bag.load(Acquire).cast_const();
        if shared_garbage_bag.is_null() {
            shared_garbage_bag = self.alloc();
        }
        if !shared_garbage_bag.is_null()
            && catch_unwind(|| guard.collect(ptr, shared_garbage_bag)).is_ok()
        {
            return;
        }
        self.fallback(ptr);
    }

    /// Allocates a [`SharedGarbageBag`].
    #[inline(never)]
    fn alloc(&self) -> *const SharedGarbageBag {
        let Ok(allocated) = catch_unwind(|| {
            let shared_garbage_bag = self.shared_garbage_bag.load(Acquire);
            if !shared_garbage_bag.is_null() {
                return shared_garbage_bag;
            }

            #[allow(clippy::cast_ptr_alignment)]
            let allocated =
                unsafe { alloc(Layout::new::<SharedGarbageBag>()).cast::<SharedGarbageBag>() };
            assert!(!allocated.is_null(), "Memory allocation failed");
            unsafe {
                allocated.write(SharedGarbageBag(AtomicPtr::new(null_mut())));
            }

            match self
                .shared_garbage_bag
                .compare_exchange(null_mut(), allocated, AcqRel, Acquire)
            {
                Ok(_) => allocated,
                Err(actual) => {
                    unsafe {
                        dealloc(allocated.cast::<u8>(), Layout::new::<SharedGarbageBag>());
                    }
                    actual
                }
            }
        }) else {
            return null();
        };

        // Move any memory chunks stored in the backup garbage bag to the shared bag.
        let head = self.backup_garbage_bag.swap(null_mut(), Acquire);
        let mut tail = head;
        if !tail.is_null() {
            loop {
                let next = unsafe { (*tail).next_ptr(Relaxed) };
                if next.is_null() {
                    break;
                }
                tail = next;
            }
            let result = unsafe {
                (*allocated)
                    .0
                    .fetch_update(Release, Relaxed, |head_ptr| {
                        (*tail).set_next_ptr(head_ptr, Relaxed);
                        Some(head)
                    })
                    .is_ok()
            };
            debug_assert!(result);
        }

        allocated
    }

    /// Falls back to using a shared/backup garbage bag.
    #[cold]
    #[inline(never)]
    fn fallback(&self, ptr: *mut Link) {
        let shared_garbage_bag = self.shared_garbage_bag.load(Acquire);
        let garbage_bag = if shared_garbage_bag.is_null() {
            &self.backup_garbage_bag
        } else {
            unsafe { &(*shared_garbage_bag).0 }
        };
        let result = garbage_bag
            .fetch_update(Release, Relaxed, |head_ptr| unsafe {
                (*ptr).set_next_ptr(head_ptr, Relaxed);
                Some(ptr)
            })
            .is_ok();
        debug_assert!(result);

        // Call `alloc` again in case a `SharedGarbageBag` was allocated right before pushing the
        // pointer into the backup bag.
        self.alloc();
    }
}

impl Default for PrivateCollector {
    #[inline]
    fn default() -> Self {
        Self::new()
    }
}

impl Drop for PrivateCollector {
    #[inline]
    fn drop(&mut self) {
        let shared_garbage_bag = self.shared_garbage_bag.swap(null_mut(), Acquire);
        if !shared_garbage_bag.is_null() {
            let guard = Guard::new();
            guard.purge(shared_garbage_bag);
            let link = unsafe { (*shared_garbage_bag).0.swap(null_mut(), Acquire) };
            Collector::dealloc(link);
            unsafe {
                shared_garbage_bag.drop_in_place();
                dealloc(
                    shared_garbage_bag.cast::<u8>(),
                    Layout::new::<SharedGarbageBag>(),
                );
            }
        }
        let link = self.backup_garbage_bag.swap(null_mut(), Acquire);
        Collector::dealloc(link);
    }
}

impl SharedGarbageBag {
    /// Exports memory chunks.
    ///
    /// Returns the head, tail pointers and the length of the linked list.
    #[inline]
    pub(super) fn export(&self) -> (*mut Link, *mut Link, usize) {
        let Ok(head) = self.0.fetch_update(Relaxed, Acquire, |ptr| {
            if ptr.is_null() {
                None
            } else {
                Some(null_mut())
            }
        }) else {
            return (null_mut(), null_mut(), 0);
        };
        let mut tail = head;
        let mut link_len = 1;
        loop {
            let next = unsafe { (*tail).next_ptr(Relaxed) };
            if next.is_null() {
                break;
            }
            link_len += 1;
            tail = next;
        }
        (head, tail, link_len)
    }
}