#![warn(rust_2018_idioms)]
#![warn(missing_docs)]
#![feature(stdsimd)]
#![feature(cmpxchg16b_target_feature)]
pub use core::sync::atomic::{fence, Ordering};
use std::panic::RefUnwindSafe;
#[cfg(feature = "fallback")]
mod fallback;
mod ops;
use core::cell::UnsafeCell;
use core::fmt;
#[repr(C, align(16))]
pub struct AtomicDouble<T> {
v: UnsafeCell<T>,
}
unsafe impl<T: Copy + Send> Sync for AtomicDouble<T> {}
impl<T: Copy + RefUnwindSafe> RefUnwindSafe for AtomicDouble<T> {}
impl<T: Copy + Default> Default for AtomicDouble<T> {
#[inline]
fn default() -> Self {
Self::new(Default::default())
}
}
impl<T: Copy + fmt::Debug> fmt::Debug for AtomicDouble<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("AtomicDouble")
.field(&self.load(Ordering::SeqCst))
.finish()
}
}
impl<T> AtomicDouble<T> {
#[inline]
pub const fn new(v: T) -> AtomicDouble<T> {
AtomicDouble {
v: UnsafeCell::new(v),
}
}
#[inline]
pub fn is_lock_free() -> bool {
ops::atomic_is_lock_free::<T>()
}
}
impl<T: Copy> AtomicDouble<T> {
#[inline]
pub fn get_mut(&mut self) -> &mut T {
unsafe { &mut *self.v.get() }
}
#[inline]
pub fn into_inner(self) -> T {
self.v.into_inner()
}
#[inline]
pub fn load(&self, order: Ordering) -> T {
unsafe { ops::atomic_load(self.v.get(), order) }
}
#[inline]
pub fn store(&self, val: T, order: Ordering) {
unsafe {
ops::atomic_store(self.v.get(), val, order);
}
}
#[inline]
pub fn compare_exchange(
&self,
current: T,
new: T,
success: Ordering,
failure: Ordering,
) -> Result<T, T> {
unsafe { ops::atomic_compare_exchange(self.v.get(), current, new, success, failure) }
}
#[inline]
pub fn fetch_add(&self, val: T, order: Ordering) -> T {
unsafe { ops::atomic_add(self.v.get(), val, order) }
}
#[inline]
pub fn fetch_sub(&self, val: T, order: Ordering) -> T {
unsafe { ops::atomic_sub(self.v.get(), val, order) }
}
}