use crate::atomic::Atomic;
use crate::guard::{self, Guard};
use crate::retired::RetiredNode;
use alloc::boxed::Box;
use core::fmt;
use core::marker::PhantomData as marker;
use core::mem::ManuallyDrop;
use core::ops::Deref;
use core::sync::atomic::Ordering;
#[repr(C)]
struct AtomNode<T> {
_retired: RetiredNode,
val: T,
}
impl<T> AtomNode<T> {
fn new(val: T) -> Self {
Self {
_retired: RetiredNode::new(),
val,
}
}
fn boxed(val: T) -> *mut Self {
Box::into_raw(Box::new(Self::new(val)))
}
#[inline]
unsafe fn val_ptr(ptr: *const Self) -> *const T {
unsafe { core::ptr::addr_of!((*ptr).val) }
}
}
pub struct AtomGuard<'a, T> {
_guard: Guard,
ptr: *const T,
marker: marker<&'a T>,
}
impl<T> Deref for AtomGuard<'_, T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
unsafe { &*self.ptr }
}
}
impl<T: fmt::Debug> fmt::Debug for AtomGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: fmt::Display> fmt::Display for AtomGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
pub struct Removed<T: Send + Sync + 'static> {
value: ManuallyDrop<T>,
birth_epoch: u64,
}
impl<T: Send + Sync + 'static> Deref for Removed<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.value
}
}
impl<T: Send + Sync + 'static> Removed<T> {
pub unsafe fn into_inner_unchecked(mut self) -> T {
let val = unsafe { ManuallyDrop::take(&mut self.value) };
core::mem::forget(self); val
}
}
impl<T: Send + Sync + 'static> Drop for Removed<T> {
fn drop(&mut self) {
#[repr(C)]
struct DeferDrop<U> {
_retired: RetiredNode,
value: U,
}
let val = unsafe { ManuallyDrop::take(&mut self.value) };
let wrapper = Box::into_raw(Box::new(DeferDrop {
_retired: RetiredNode::new(),
value: val,
}));
unsafe {
(*(wrapper as *mut RetiredNode)).set_birth_epoch(self.birth_epoch);
}
unsafe { guard::retire(wrapper) };
}
}
impl<T: Send + Sync + 'static + PartialEq> PartialEq for Removed<T> {
fn eq(&self, other: &Self) -> bool {
**self == **other
}
}
impl<T: Send + Sync + 'static + PartialEq> PartialEq<T> for Removed<T> {
fn eq(&self, other: &T) -> bool {
**self == *other
}
}
impl<T: Send + Sync + 'static + fmt::Debug> fmt::Debug for Removed<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: Send + Sync + 'static + fmt::Display> fmt::Display for Removed<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
unsafe impl<T: Send + Sync + 'static> Send for Removed<T> {}
unsafe impl<T: Send + Sync + 'static> Sync for Removed<T> {}
pub struct Atom<T: Send + Sync + 'static> {
inner: Atomic<AtomNode<T>>,
}
impl<T: Send + Sync + 'static> Atom<T> {
#[inline]
pub fn new(val: T) -> Self {
let ptr = AtomNode::boxed(val);
Self {
inner: Atomic::new(ptr),
}
}
#[inline]
pub fn into_inner(self) -> T {
let guard = guard::pin();
let shared = self.inner.load(Ordering::Acquire, &guard);
let node_ptr = shared.as_raw();
core::mem::forget(self);
let node = unsafe { Box::from_raw(node_ptr) };
node.val
}
#[inline]
pub fn load(&self) -> AtomGuard<'_, T> {
let guard = guard::pin();
let shared = self.inner.load(Ordering::Acquire, &guard);
AtomGuard {
ptr: unsafe { AtomNode::val_ptr(shared.as_raw()) },
_guard: guard,
marker,
}
}
#[inline]
pub fn load_clone(&self) -> T
where
T: Clone,
{
self.load().clone()
}
#[inline]
pub fn peek<R>(&self, f: impl FnOnce(&T) -> R) -> R {
let guard = self.load();
f(&*guard)
}
#[inline]
pub fn store(&self, val: T) {
let new_ptr = AtomNode::boxed(val);
let guard = guard::pin();
let new_shared = unsafe { crate::atomic::Shared::from_raw(new_ptr) };
let old = self.inner.swap(new_shared, Ordering::AcqRel, &guard);
let old_ptr = old.as_raw();
if !old_ptr.is_null() {
unsafe { guard::retire(old_ptr) };
}
}
#[inline]
pub fn swap(&self, val: T) -> Removed<T> {
let new_ptr = AtomNode::boxed(val);
let guard = guard::pin();
let new_shared = unsafe { crate::atomic::Shared::from_raw(new_ptr) };
let old = self.inner.swap(new_shared, Ordering::AcqRel, &guard);
let old_ptr = old.as_raw();
let old_val = unsafe { core::ptr::read(AtomNode::val_ptr(old_ptr)) };
let birth_epoch = unsafe { (*(old_ptr as *mut RetiredNode)).birth_epoch() };
unsafe { retire_node_dealloc_only::<T>(old_ptr) };
Removed {
value: ManuallyDrop::new(old_val),
birth_epoch,
}
}
#[inline]
pub fn compare_and_swap<'a>(
&'a self,
current: &AtomGuard<'_, T>,
new: T,
) -> Result<AtomGuard<'a, T>, T> {
let new_ptr = AtomNode::boxed(new);
let guard = guard::pin();
let current_shared = self.inner.load(Ordering::Acquire, &guard);
let current_val_ptr = unsafe { AtomNode::val_ptr(current_shared.as_raw()) };
if current_val_ptr != current.ptr {
let rejected = unsafe { Box::from_raw(new_ptr) };
return Err(rejected.val);
}
let new_shared = unsafe { crate::atomic::Shared::from_raw(new_ptr) };
match self.inner.compare_exchange(
current_shared,
new_shared,
Ordering::AcqRel,
Ordering::Acquire,
&guard,
) {
Ok(old) => {
let old_ptr = old.as_raw();
if !old_ptr.is_null() {
unsafe { guard::retire(old_ptr) };
}
Ok(AtomGuard {
ptr: unsafe { AtomNode::val_ptr(new_ptr) },
_guard: guard,
marker,
})
}
Err(_) => {
let rejected = unsafe { Box::from_raw(new_ptr) };
Err(rejected.val)
}
}
}
pub fn rcu<F>(&self, mut f: F)
where
F: FnMut(&T) -> T,
{
loop {
let guard = guard::pin();
let current = self.inner.load(Ordering::Acquire, &guard);
let current_ref = unsafe { &*AtomNode::val_ptr(current.as_raw()) };
let new_val = f(current_ref);
let new_ptr = AtomNode::boxed(new_val);
let new_shared = unsafe { crate::atomic::Shared::from_raw(new_ptr) };
match self.inner.compare_exchange(
current,
new_shared,
Ordering::AcqRel,
Ordering::Acquire,
&guard,
) {
Ok(old) => {
let old_ptr = old.as_raw();
if !old_ptr.is_null() {
unsafe { guard::retire(old_ptr) };
}
return;
}
Err(_) => {
unsafe {
drop(Box::from_raw(new_ptr));
}
}
}
}
}
pub fn map<'a, R: 'a, F>(&'a self, f: F) -> AtomMap<'a, T, R, F>
where
F: Fn(&T) -> &R,
{
AtomMap {
atom: self,
project: f,
marker,
}
}
}
impl<T: Send + Sync + 'static> Drop for Atom<T> {
fn drop(&mut self) {
let ptr = self.inner.load_raw();
if !ptr.is_null() {
unsafe {
drop(Box::from_raw(ptr));
}
}
crate::flush();
}
}
impl<T: Send + Sync + 'static + Default> Default for Atom<T> {
fn default() -> Self {
Self::new(T::default())
}
}
impl<T: Send + Sync + 'static> From<T> for Atom<T> {
fn from(val: T) -> Self {
Self::new(val)
}
}
impl<T: Send + Sync + 'static + fmt::Debug> fmt::Debug for Atom<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Atom")
.field("value", &*self.load())
.finish()
}
}
unsafe impl<T: Send + Sync + 'static> Send for Atom<T> {}
unsafe impl<T: Send + Sync + 'static> Sync for Atom<T> {}
pub struct AtomOption<T: Send + Sync + 'static> {
inner: Atomic<AtomNode<T>>,
}
impl<T: Send + Sync + 'static> AtomOption<T> {
#[inline]
pub fn none() -> Self {
Self {
inner: Atomic::null(),
}
}
#[inline]
pub fn some(val: T) -> Self {
let ptr = AtomNode::boxed(val);
Self {
inner: Atomic::new(ptr),
}
}
#[inline]
pub fn load(&self) -> Option<AtomGuard<'_, T>> {
let guard = guard::pin();
let shared = self.inner.load(Ordering::Acquire, &guard);
if shared.is_null() {
None
} else {
Some(AtomGuard {
ptr: unsafe { AtomNode::val_ptr(shared.as_raw()) },
_guard: guard,
marker,
})
}
}
#[inline]
pub fn is_none(&self) -> bool {
let guard = guard::pin();
self.inner.load(Ordering::Acquire, &guard).is_null()
}
#[inline]
pub fn is_some(&self) -> bool {
!self.is_none()
}
#[inline]
pub fn store_some(&self, val: T) {
let new_ptr = AtomNode::boxed(val);
let guard = guard::pin();
let new_shared = unsafe { crate::atomic::Shared::from_raw(new_ptr) };
let old = self.inner.swap(new_shared, Ordering::AcqRel, &guard);
let old_ptr = old.as_raw();
if !old_ptr.is_null() {
unsafe { guard::retire(old_ptr) };
}
}
#[inline]
pub fn store_none(&self) {
let guard = guard::pin();
let null_shared = unsafe { crate::atomic::Shared::from_raw(core::ptr::null_mut()) };
let old = self.inner.swap(null_shared, Ordering::AcqRel, &guard);
let old_ptr = old.as_raw();
if !old_ptr.is_null() {
unsafe { guard::retire(old_ptr) };
}
}
pub fn take(&self) -> Option<Removed<T>> {
let guard = guard::pin();
let null_shared = unsafe { crate::atomic::Shared::from_raw(core::ptr::null_mut()) };
let old = self.inner.swap(null_shared, Ordering::AcqRel, &guard);
let old_ptr = old.as_raw();
if old_ptr.is_null() {
None
} else {
let val = unsafe { core::ptr::read(AtomNode::val_ptr(old_ptr)) };
let birth_epoch = unsafe { (*(old_ptr as *mut RetiredNode)).birth_epoch() };
unsafe { retire_node_dealloc_only::<T>(old_ptr) };
Some(Removed {
value: ManuallyDrop::new(val),
birth_epoch,
})
}
}
}
impl<T: Send + Sync + 'static> Drop for AtomOption<T> {
fn drop(&mut self) {
let ptr = self.inner.load_raw();
if !ptr.is_null() {
unsafe {
drop(Box::from_raw(ptr));
}
}
crate::flush();
}
}
impl<T: Send + Sync + 'static> Default for AtomOption<T> {
fn default() -> Self {
Self::none()
}
}
impl<T: Send + Sync + 'static + fmt::Debug> fmt::Debug for AtomOption<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.load() {
Some(guard) => f.debug_tuple("AtomOption::Some").field(&*guard).finish(),
None => f.write_str("AtomOption::None"),
}
}
}
unsafe impl<T: Send + Sync + 'static> Send for AtomOption<T> {}
unsafe impl<T: Send + Sync + 'static> Sync for AtomOption<T> {}
pub struct AtomMap<'a, T: Send + Sync + 'static, R, F> {
atom: &'a Atom<T>,
project: F,
marker: marker<R>,
}
pub struct AtomMapGuard<'a, T, R, F>
where
T: Send + Sync + 'static,
F: Fn(&T) -> &R,
{
_inner: AtomGuard<'a, T>,
projected: *const R,
_project: marker<F>,
}
impl<T: Send + Sync + 'static, R, F: Fn(&T) -> &R> Deref for AtomMapGuard<'_, T, R, F> {
type Target = R;
#[inline]
fn deref(&self) -> &R {
unsafe { &*self.projected }
}
}
impl<'a, T: Send + Sync + 'static, R, F> AtomMap<'a, T, R, F>
where
F: Fn(&T) -> &R,
{
#[inline]
pub fn load(&self) -> AtomMapGuard<'a, T, R, F> {
let guard = self.atom.load();
let projected = (self.project)(&*guard) as *const R;
AtomMapGuard {
_inner: guard,
projected,
_project: marker,
}
}
}
unsafe fn retire_node_dealloc_only<T: 'static>(ptr: *mut AtomNode<T>) {
unsafe fn dealloc_destructor<T>(p: *mut RetiredNode) {
unsafe {
alloc::alloc::dealloc(p as *mut u8, alloc::alloc::Layout::new::<AtomNode<T>>());
}
}
let node_ptr = ptr as *mut RetiredNode;
unsafe {
(*node_ptr).set_destructor(Some(dealloc_destructor::<T>));
}
unsafe { guard::retire_raw(node_ptr) };
}