potential-well 1.0.0

Atomic boxes.
Documentation
//! Atomic containers.
use core::{
    fmt,
    mem::ManuallyDrop,
    ops::Deref,
    pin::Pin,
    ptr::{NonNull, null_mut},
    sync::atomic::{AtomicPtr, Ordering, fence},
};

use crate::traits::{Well, WellMut};

/// Potentially empty atomic potential well.
///
/// Internally, this just wraps a pointer to `<T as Deref>::Target` and uses atomic pointer
/// operations to access it. However, the number of operations on the pointer is limited to
/// ensure correctness in safe code.
pub struct AtomicOption<T: Well>(AtomicPtr<<T as Deref>::Target>);

/// By default, nothing is stored in the atomic.
impl<T: Well> Default for AtomicOption<T> {
    #[inline]
    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
    fn default() -> Self {
        AtomicOption::none()
    }
}
impl<T: Well> AtomicOption<T> {
    /// Creates atomic without anything inside.
    #[inline]
    pub fn none() -> AtomicOption<T> {
        AtomicOption(AtomicPtr::new(null_mut()))
    }

    /// Creates atomic with something inside.
    #[inline]
    pub fn some(well: T) -> AtomicOption<T> {
        AtomicOption(AtomicPtr::new(well.remove().as_ptr()))
    }

    /// Gives access to the underlying [`AtomicPtr`].
    ///
    /// # Safety
    ///
    /// The pointer inside the atomic must always be null, or a valid pointer from [`Well::remove`].
    /// Additionally, keep in mind that this atomic *owns* the pointer, and if you want to move it
    /// out, you must put a different pointer in its place first.
    #[inline]
    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<<T as Deref>::Target> {
        &self.0
    }

    /// Loads the inner data as a value.
    ///
    /// Uses [`ManuallyDrop`] to avoid accidentally dropping the value
    /// while the atomic is still in use.
    #[inline]
    fn load_value(&self, ordering: Ordering) -> ManuallyDrop<Option<T>> {
        let loaded = self.0.load(ordering);

        // SAFETY: Data was originally `remove`d from a well.
        ManuallyDrop::new(NonNull::new(loaded).map(|ptr| unsafe { T::insert(ptr) }))
    }

    /// Loads the inner data as an immutable reference.
    ///
    /// This is equivalent to an atomic [`load`].
    ///
    /// [`load`]: AtomicPtr::load
    #[inline]
    pub fn load(&self, ordering: Ordering) -> Option<&<T as Deref>::Target> {
        let loaded = self.0.load(ordering);
        match NonNull::new(loaded) {
            // SAFETY: We can read the data from a `Well`.
            Some(ptr) => Some(unsafe { ptr.as_ref() }),
            None => None,
        }
    }

    /// Atomically swaps the data inside the well.
    ///
    /// This is equivalent to an atomic [`swap`].
    ///
    /// [`swap`]: AtomicPtr::swap
    #[inline]
    pub fn swap(&self, well: T, ordering: Ordering) -> Option<T> {
        let old = self.0.swap(well.remove().as_ptr(), ordering);

        // SAFETY: Data was originally `remove`d from a well.
        NonNull::new(old).map(|old| unsafe { T::insert(old) })
    }

    /// Takes the data out of the well.
    ///
    /// This is equivalent to an atomic [`swap`] with a null pointer.
    ///
    /// [`swap`]: AtomicPtr::swap
    #[inline]
    pub fn take(&self, ordering: Ordering) -> Option<T> {
        let old = self.0.swap(null_mut(), ordering);

        // SAFETY: Data was originally `remove`d from a well.
        NonNull::new(old).map(|old| unsafe { T::insert(old) })
    }

    /// Inserts data into the well.
    ///
    /// This uses [`compare_exchange`] to avoid inserting into the well if it's already full. If you
    /// want to use [`compare_exchange_weak`] instead, use [`insert_weak`].
    ///
    /// [`insert_weak`]: AtomicOption::insert_weak
    /// [`compare_exchange`]: AtomicPtr::compare_exchange
    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
    #[inline]
    pub fn insert(&self, well: T, success: Ordering, failure: Ordering) -> Result<(), T> {
        let ptr = well.remove();
        if self
            .0
            .compare_exchange(null_mut(), ptr.as_ptr(), success, failure)
            .is_err()
        {
            // SAFETY: This is just the `Well` we passed in.
            Err(unsafe { T::insert(ptr) })
        } else {
            Ok(())
        }
    }
    /// Inserts data into the well, sometimes failing spuriously.
    ///
    /// This uses [`compare_exchange_weak`] to avoid inserting into the well if it's already full,
    /// which may spuriously fail. If you want to use [`compare_exchange`] instead, use [`insert`].
    ///
    /// [`insert`]: AtomicOption::insert
    /// [`compare_exchange`]: AtomicPtr::compare_exchange
    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
    #[inline]
    pub fn insert_weak(&self, well: T, success: Ordering, failure: Ordering) -> Result<(), T> {
        let ptr = well.remove();
        if self
            .0
            .compare_exchange_weak(null_mut(), ptr.as_ptr(), success, failure)
            .is_err()
        {
            // SAFETY: This is just the `Well` we passed in.
            Err(unsafe { T::insert(ptr) })
        } else {
            Ok(())
        }
    }
}
impl<T: WellMut<Target: Unpin>> AtomicOption<T> {
    /// Loads the inner data as a mutable reference.
    ///
    /// This performs a non-atomic access since the atomic is mutably borrowed.
    #[inline]
    pub fn load_mut(&mut self) -> Option<&mut <T as Deref>::Target> {
        let loaded = self.0.get_mut();
        match NonNull::new(*loaded) {
            // SAFETY: We can mutate the data from a `WellMut`.
            Some(mut ptr) => Some(unsafe { ptr.as_mut() }),
            None => None,
        }
    }
}
impl<T: WellMut> AtomicOption<Pin<T>>
where
    Pin<T>: Well,
{
    /// Loads the inner data as a pinned mutable reference.
    ///
    /// This is a version of [`load_mut`] that works with pinned values.
    ///
    /// [`load_mut`]: AtomicOption::load_mut
    #[inline]
    pub fn load_mut_pinned(&mut self) -> Option<Pin<&mut <Pin<T> as Deref>::Target>> {
        let loaded = self.0.get_mut();
        match NonNull::new(*loaded) {
            // SAFETY: We can pull out a pinned pointer safely.
            Some(mut ptr) => Some(unsafe { Pin::new_unchecked(ptr.as_mut()) }),
            None => None,
        }
    }
}
impl<T: Well> From<T> for AtomicOption<T> {
    #[inline]
    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
    fn from(well: T) -> Self {
        AtomicOption::some(well)
    }
}
impl<T: Well> From<Option<T>> for AtomicOption<T> {
    #[inline]
    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
    fn from(well: Option<T>) -> Self {
        match well {
            Some(well) => AtomicOption::some(well),
            None => AtomicOption::none(),
        }
    }
}
impl<T: Well + Clone> AtomicOption<T> {
    /// Loads a clone of the inner data.
    ///
    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
    /// pointer is cloned instead.
    ///
    /// [`load`]: AtomicPtr::load
    #[inline]
    pub fn load_clone(&self, ordering: Ordering) -> Option<T> {
        (*self.load_value(ordering)).clone()
    }
}
impl<T: Well> Drop for AtomicOption<T> {
    #[inline]
    fn drop(&mut self) {
        fence(Ordering::SeqCst);

        // SAFETY: We're dropping, so, it's okay to drop.
        unsafe {
            ManuallyDrop::drop(&mut self.load_value(Ordering::SeqCst));
        }
    }
}
impl<T: Well<Target: fmt::Debug>> fmt::Debug for AtomicOption<T> {
    #[inline]
    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        fmt::Debug::fmt(&self.load(Ordering::Relaxed), f)
    }
}

/// Atomic potential well.
///
/// Internally, this just wraps a pointer to `<T as Deref>::Target` and uses atomic pointer
/// operations to access it. However, the number of operations on the pointer is limited to
/// ensure correctness in safe code.
pub struct Atomic<T: Well>(AtomicPtr<<T as Deref>::Target>);
impl<T: Well + Default> Default for Atomic<T> {
    #[inline]
    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
    fn default() -> Self {
        Atomic::new(Default::default())
    }
}
impl<T: Well> Atomic<T> {
    /// Creates atomic with a value.
    #[inline]
    pub fn new(well: T) -> Atomic<T> {
        Atomic(AtomicPtr::new(well.remove().as_ptr()))
    }

    /// Gives access to the underlying [`AtomicPtr`].
    ///
    /// # Safety
    ///
    /// The pointer inside the atomic must always a valid pointer from [`Well::remove`] and
    /// therefore must not be null. Additionally, keep in mind that this atomic *owns* the
    /// pointer, and if you want to move it out, you must put a different pointer in its place
    /// first.
    #[inline]
    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<<T as Deref>::Target> {
        &self.0
    }

    /// Loads the inner data as an immutable reference.
    ///
    /// Uses [`ManuallyDrop`] to avoid accidentally dropping the value
    /// while the atomic is still in use.
    #[inline]
    fn load_value(&self, ordering: Ordering) -> ManuallyDrop<T> {
        let loaded = self.0.load(ordering);

        // SAFETY: Data was originally `remove`d from a well, and is never null.
        ManuallyDrop::new(unsafe { T::insert(NonNull::new_unchecked(loaded)) })
    }

    /// Loads the inner data as an immutable reference.
    ///
    /// This is equivalent to an atomic [`load`].
    ///
    /// [`load`]: AtomicPtr::load
    #[inline]
    pub fn load(&self, ordering: Ordering) -> &<T as Deref>::Target {
        let loaded = self.0.load(ordering);

        // SAFETY: We can read the data from a `Well`, and the data is never null.
        unsafe { NonNull::new_unchecked(loaded).as_ref() }
    }

    /// Atomically swaps the data inside the well.
    ///
    /// This is equivalent to an atomic [`swap`].
    ///
    /// [`swap`]: AtomicPtr::swap
    #[inline]
    pub fn swap(&self, well: T, ordering: Ordering) -> T {
        let old = self.0.swap(well.remove().as_ptr(), ordering);

        // SAFETY: Data was originally `remove`d from a well, and is never null.
        unsafe { T::insert(NonNull::new_unchecked(old)) }
    }
}
impl<T: WellMut<Target: Unpin>> Atomic<T> {
    /// Loads the inner data as a mutable reference.
    ///
    /// This performs a non-atomic access since the atomic is mutably borrowed.
    #[inline]
    pub fn load_mut(&mut self) -> &mut <T as Deref>::Target {
        let loaded = self.0.get_mut();

        // SAFETY: We can mutate the data from a `WellMut`, and it is never null.
        unsafe { NonNull::new_unchecked(*loaded).as_mut() }
    }
}
impl<T: WellMut> Atomic<Pin<T>>
where
    Pin<T>: Well,
{
    /// Loads the inner data as a mutable reference.
    ///
    /// This is a version of [`load_mut`] that works with pinned values.
    ///
    /// [`load_mut`]: AtomicOption::load_mut
    #[inline]
    pub fn load_mut_pinned(&mut self) -> Pin<&mut <Pin<T> as Deref>::Target> {
        let loaded = self.0.get_mut();

        // SAFETY: We can mutate the data from a `WellMut` as long as it's pinned.
        unsafe { Pin::new_unchecked(NonNull::new_unchecked(*loaded).as_mut()) }
    }
}
impl<T: Well> From<T> for Atomic<T> {
    #[inline]
    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
    fn from(well: T) -> Self {
        Atomic::new(well)
    }
}
impl<T: Well + Clone> Atomic<T> {
    /// Loads a clone of the inner data.
    ///
    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
    /// pointer is cloned instead.
    ///
    /// [`load`]: AtomicPtr::load
    #[inline]
    pub fn load_clone(&self, ordering: Ordering) -> T {
        (*self.load_value(ordering)).clone()
    }
}
impl<T: Well> Drop for Atomic<T> {
    #[inline]
    fn drop(&mut self) {
        fence(Ordering::SeqCst);

        // SAFETY: We're dropping, so, it's okay to drop.
        unsafe {
            ManuallyDrop::drop(&mut self.load_value(Ordering::SeqCst));
        }
    }
}
impl<T: Well<Target: fmt::Debug>> fmt::Debug for Atomic<T> {
    #[inline]
    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        fmt::Debug::fmt(&self.load(Ordering::Relaxed), f)
    }
}