use core::{
fmt,
marker::PhantomData,
sync::atomic::{AtomicUsize, Ordering},
};
use crate::{AtomicTagPtr, TagPtr};
unsafe impl<T, const N: usize> Send for AtomicTagPtr<T, N> {}
unsafe impl<T, const N: usize> Sync for AtomicTagPtr<T, N> {}
impl<T, const N: usize> AtomicTagPtr<T, N> {
doc_comment! {
doc_tag_bits!(),
pub const TAG_BITS: usize = N;
}
doc_comment! {
doc_tag_mask!(),
pub const TAG_MASK: usize = crate::mark_mask(Self::TAG_BITS);
}
doc_comment! {
doc_ptr_mask!(),
pub const POINTER_MASK: usize = !Self::TAG_MASK;
}
doc_comment! {
doc_null!(),
pub const fn null() -> Self {
Self { inner: AtomicUsize::new(0), _marker: PhantomData }
}
}
doc_comment! {
doc_atomic_new!(),
#[inline]
pub fn new(marked_ptr: TagPtr<T, N>) -> Self {
Self { inner: AtomicUsize::new(marked_ptr.into_usize()), _marker: PhantomData }
}
}
doc_comment! {
doc_atomic_into_inner!(),
#[inline]
pub fn into_inner(self) -> TagPtr<T, N> {
TagPtr::from_usize(self.inner.into_inner())
}
}
#[inline]
pub fn get_mut(&mut self) -> &mut TagPtr<T, N> {
unsafe { &mut *(self.inner.get_mut() as *mut usize as *mut _) }
}
#[inline]
pub fn load(&self, order: Ordering) -> TagPtr<T, N> {
TagPtr::from_usize(self.inner.load(order))
}
#[inline]
pub fn store(&self, ptr: TagPtr<T, N>, order: Ordering) {
self.inner.store(ptr.into_usize(), order)
}
pub fn swap(&self, ptr: TagPtr<T, N>, order: Ordering) -> TagPtr<T, N> {
TagPtr::from_usize(self.inner.swap(ptr.into_usize(), order))
}
#[inline]
pub fn compare_exchange(
&self,
current: TagPtr<T, N>,
new: TagPtr<T, N>,
(success, failure): (Ordering, Ordering),
) -> Result<TagPtr<T, N>, TagPtr<T, N>> {
self.inner
.compare_exchange(current.into_usize(), new.into_usize(), success, failure)
.map(|_| current)
.map_err(TagPtr::from_usize)
}
#[inline]
pub fn compare_exchange_weak(
&self,
current: TagPtr<T, N>,
new: TagPtr<T, N>,
(success, failure): (Ordering, Ordering),
) -> Result<TagPtr<T, N>, TagPtr<T, N>> {
self.inner
.compare_exchange_weak(current.into_usize(), new.into_usize(), success, failure)
.map(|_| current)
.map_err(TagPtr::from_usize)
}
#[inline]
pub fn fetch_add(&self, value: usize, order: Ordering) -> TagPtr<T, N> {
debug_assert!(value < Self::TAG_MASK, "`value` exceeds tag bits (would overflow)");
TagPtr::from_usize(self.inner.fetch_add(value, order))
}
#[inline]
pub fn fetch_sub(&self, value: usize, order: Ordering) -> TagPtr<T, N> {
debug_assert!(value < Self::TAG_MASK, "`value` exceeds tag bits (would underflow)");
TagPtr::from_usize(self.inner.fetch_sub(value, order))
}
#[inline]
pub fn fetch_or(&self, value: usize, order: Ordering) -> TagPtr<T, N> {
debug_assert!(value <= Self::TAG_MASK, "`value` exceeds tag bits (would corrupt pointer)");
TagPtr::from_usize(self.inner.fetch_or(Self::TAG_MASK & value, order))
}
#[inline]
pub fn fetch_and(&self, value: usize, order: Ordering) -> TagPtr<T, N> {
debug_assert!(value <= Self::TAG_MASK, "`value` exceeds tag bits (would corrupt pointer)");
TagPtr::from_usize(self.inner.fetch_and(Self::POINTER_MASK | value, order))
}
}
impl<T, const N: usize> fmt::Debug for AtomicTagPtr<T, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let (ptr, tag) = self.load(Ordering::SeqCst).decompose();
f.debug_struct("AtomicTagPtr").field("ptr", &ptr).field("tag", &tag).finish()
}
}
impl<T, const N: usize> Default for AtomicTagPtr<T, N> {
impl_default!();
}
impl<T, const N: usize> From<*mut T> for AtomicTagPtr<T, N> {
#[inline]
fn from(ptr: *mut T) -> Self {
Self::new(ptr.into())
}
}
impl<T, const N: usize> From<TagPtr<T, N>> for AtomicTagPtr<T, N> {
#[inline]
fn from(ptr: TagPtr<T, N>) -> Self {
Self::new(ptr)
}
}
impl<T, const N: usize> fmt::Pointer for AtomicTagPtr<T, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&self.load(Ordering::SeqCst), f)
}
}