include!("macros.rs");
#[allow(dead_code)] #[cfg(target_arch = "x86_64")]
#[cfg(not(target_feature = "cmpxchg16b"))]
#[path = "../fallback/outline_atomics.rs"]
mod fallback;
#[cfg(target_arch = "x86_64")]
#[cfg(not(target_feature = "cmpxchg16b"))]
#[path = "../detect/x86_64.rs"]
mod detect;
#[cfg(not(target_arch = "x86_64"))]
use core::intrinsics;
use core::sync::atomic::Ordering::{self, AcqRel, Acquire, Relaxed, Release, SeqCst};
#[cfg(target_arch = "x86_64")]
#[inline]
fn strongest_failure_ordering(order: Ordering) -> Ordering {
match order {
Release | Relaxed => Relaxed,
SeqCst => SeqCst,
Acquire | AcqRel => Acquire,
_ => unreachable!(),
}
}
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_load(src: *mut u128, order: Ordering) -> u128 {
#[cfg(target_arch = "x86_64")]
unsafe {
let fail_order = strongest_failure_ordering(order);
match atomic_compare_exchange(src, 0, 0, order, fail_order) {
Ok(v) | Err(v) => v,
}
}
#[cfg(not(target_arch = "x86_64"))]
unsafe {
match order {
Acquire => intrinsics::atomic_load_acquire(src),
Relaxed => intrinsics::atomic_load_relaxed(src),
SeqCst => intrinsics::atomic_load_seqcst(src),
_ => unreachable!(),
}
}
}
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_store(dst: *mut u128, val: u128, order: Ordering) {
#[cfg(target_arch = "x86_64")]
unsafe {
atomic_swap(dst, val, order);
}
#[cfg(not(target_arch = "x86_64"))]
unsafe {
match order {
Release => intrinsics::atomic_store_release(dst, val),
Relaxed => intrinsics::atomic_store_relaxed(dst, val),
SeqCst => intrinsics::atomic_store_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_compare_exchange(
dst: *mut u128,
old: u128,
new: u128,
success: Ordering,
failure: Ordering,
) -> Result<u128, u128> {
#[cfg(target_arch = "x86_64")]
let (val, ok) = {
#[target_feature(enable = "cmpxchg16b")]
#[cfg_attr(target_feature = "cmpxchg16b", inline)]
#[cfg_attr(not(target_feature = "cmpxchg16b"), inline(never))]
unsafe fn cmpxchg16b(
dst: *mut u128,
old: u128,
new: u128,
success: Ordering,
failure: Ordering,
) -> (u128, bool) {
debug_assert!(dst as usize % 16 == 0);
#[cfg(not(target_feature = "cmpxchg16b"))]
{
debug_assert!(detect::detect().cmpxchg16b());
}
let prev = unsafe { core::arch::x86_64::cmpxchg16b(dst, old, new, success, failure) };
(prev, prev == old)
}
#[cfg(target_feature = "cmpxchg16b")]
unsafe {
cmpxchg16b(dst, old, new, success, failure)
}
#[cfg(not(target_feature = "cmpxchg16b"))]
unsafe {
ifunc!(unsafe fn(
dst: *mut u128, old: u128, new: u128, success: Ordering, failure: Ordering
) -> (u128, bool) {
if detect::detect().cmpxchg16b() {
cmpxchg16b
} else {
fallback::atomic_compare_exchange
}
})
}
};
#[cfg(not(target_arch = "x86_64"))]
let (val, ok) = unsafe {
match (success, failure) {
(Relaxed, Relaxed) => intrinsics::atomic_cxchg_relaxed_relaxed(dst, old, new),
(Relaxed, Acquire) => intrinsics::atomic_cxchg_relaxed_acquire(dst, old, new),
(Relaxed, SeqCst) => intrinsics::atomic_cxchg_relaxed_seqcst(dst, old, new),
(Acquire, Relaxed) => intrinsics::atomic_cxchg_acquire_relaxed(dst, old, new),
(Acquire, Acquire) => intrinsics::atomic_cxchg_acquire_acquire(dst, old, new),
(Acquire, SeqCst) => intrinsics::atomic_cxchg_acquire_seqcst(dst, old, new),
(Release, Relaxed) => intrinsics::atomic_cxchg_release_relaxed(dst, old, new),
(Release, Acquire) => intrinsics::atomic_cxchg_release_acquire(dst, old, new),
(Release, SeqCst) => intrinsics::atomic_cxchg_release_seqcst(dst, old, new),
(AcqRel, Relaxed) => intrinsics::atomic_cxchg_acqrel_relaxed(dst, old, new),
(AcqRel, Acquire) => intrinsics::atomic_cxchg_acqrel_acquire(dst, old, new),
(AcqRel, SeqCst) => intrinsics::atomic_cxchg_acqrel_seqcst(dst, old, new),
(SeqCst, Relaxed) => intrinsics::atomic_cxchg_seqcst_relaxed(dst, old, new),
(SeqCst, Acquire) => intrinsics::atomic_cxchg_seqcst_acquire(dst, old, new),
(SeqCst, SeqCst) => intrinsics::atomic_cxchg_seqcst_seqcst(dst, old, new),
_ => unreachable!(),
}
};
if ok { Ok(val) } else { Err(val) }
}
#[cfg(target_arch = "x86_64")]
use self::atomic_compare_exchange as atomic_compare_exchange_weak;
#[cfg(not(target_arch = "x86_64"))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_compare_exchange_weak(
dst: *mut u128,
old: u128,
new: u128,
success: Ordering,
failure: Ordering,
) -> Result<u128, u128> {
let (val, ok) = unsafe {
match (success, failure) {
(Relaxed, Relaxed) => intrinsics::atomic_cxchgweak_relaxed_relaxed(dst, old, new),
(Relaxed, Acquire) => intrinsics::atomic_cxchgweak_relaxed_acquire(dst, old, new),
(Relaxed, SeqCst) => intrinsics::atomic_cxchgweak_relaxed_seqcst(dst, old, new),
(Acquire, Relaxed) => intrinsics::atomic_cxchgweak_acquire_relaxed(dst, old, new),
(Acquire, Acquire) => intrinsics::atomic_cxchgweak_acquire_acquire(dst, old, new),
(Acquire, SeqCst) => intrinsics::atomic_cxchgweak_acquire_seqcst(dst, old, new),
(Release, Relaxed) => intrinsics::atomic_cxchgweak_release_relaxed(dst, old, new),
(Release, Acquire) => intrinsics::atomic_cxchgweak_release_acquire(dst, old, new),
(Release, SeqCst) => intrinsics::atomic_cxchgweak_release_seqcst(dst, old, new),
(AcqRel, Relaxed) => intrinsics::atomic_cxchgweak_acqrel_relaxed(dst, old, new),
(AcqRel, Acquire) => intrinsics::atomic_cxchgweak_acqrel_acquire(dst, old, new),
(AcqRel, SeqCst) => intrinsics::atomic_cxchgweak_acqrel_seqcst(dst, old, new),
(SeqCst, Relaxed) => intrinsics::atomic_cxchgweak_seqcst_relaxed(dst, old, new),
(SeqCst, Acquire) => intrinsics::atomic_cxchgweak_seqcst_acquire(dst, old, new),
(SeqCst, SeqCst) => intrinsics::atomic_cxchgweak_seqcst_seqcst(dst, old, new),
_ => unreachable!(),
}
};
if ok { Ok(val) } else { Err(val) }
}
#[inline(always)]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_update<F>(dst: *mut u128, order: Ordering, mut f: F) -> u128
where
F: FnMut(u128) -> u128,
{
unsafe {
let mut prev = atomic_load(dst, Ordering::Relaxed);
loop {
let next = f(prev);
match atomic_compare_exchange_weak(dst, prev, next, order, Ordering::Relaxed) {
Ok(x) => return x,
Err(x) => prev = x,
}
}
}
}
#[cfg(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18)))]
atomic_rmw_by_atomic_update!();
#[cfg(target_arch = "powerpc64")]
atomic_rmw_by_atomic_update!(cmp);
#[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_swap(dst: *mut u128, val: u128, order: Ordering) -> u128 {
unsafe {
match order {
Acquire => intrinsics::atomic_xchg_acquire(dst, val),
Release => intrinsics::atomic_xchg_release(dst, val),
AcqRel => intrinsics::atomic_xchg_acqrel(dst, val),
Relaxed => intrinsics::atomic_xchg_relaxed(dst, val),
SeqCst => intrinsics::atomic_xchg_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_add(dst: *mut u128, val: u128, order: Ordering) -> u128 {
unsafe {
match order {
Acquire => intrinsics::atomic_xadd_acquire(dst, val),
Release => intrinsics::atomic_xadd_release(dst, val),
AcqRel => intrinsics::atomic_xadd_acqrel(dst, val),
Relaxed => intrinsics::atomic_xadd_relaxed(dst, val),
SeqCst => intrinsics::atomic_xadd_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_sub(dst: *mut u128, val: u128, order: Ordering) -> u128 {
unsafe {
match order {
Acquire => intrinsics::atomic_xsub_acquire(dst, val),
Release => intrinsics::atomic_xsub_release(dst, val),
AcqRel => intrinsics::atomic_xsub_acqrel(dst, val),
Relaxed => intrinsics::atomic_xsub_relaxed(dst, val),
SeqCst => intrinsics::atomic_xsub_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_and(dst: *mut u128, val: u128, order: Ordering) -> u128 {
unsafe {
match order {
Acquire => intrinsics::atomic_and_acquire(dst, val),
Release => intrinsics::atomic_and_release(dst, val),
AcqRel => intrinsics::atomic_and_acqrel(dst, val),
Relaxed => intrinsics::atomic_and_relaxed(dst, val),
SeqCst => intrinsics::atomic_and_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_nand(dst: *mut u128, val: u128, order: Ordering) -> u128 {
unsafe {
match order {
Acquire => intrinsics::atomic_nand_acquire(dst, val),
Release => intrinsics::atomic_nand_release(dst, val),
AcqRel => intrinsics::atomic_nand_acqrel(dst, val),
Relaxed => intrinsics::atomic_nand_relaxed(dst, val),
SeqCst => intrinsics::atomic_nand_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_or(dst: *mut u128, val: u128, order: Ordering) -> u128 {
unsafe {
match order {
Acquire => intrinsics::atomic_or_acquire(dst, val),
Release => intrinsics::atomic_or_release(dst, val),
AcqRel => intrinsics::atomic_or_acqrel(dst, val),
Relaxed => intrinsics::atomic_or_relaxed(dst, val),
SeqCst => intrinsics::atomic_or_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_xor(dst: *mut u128, val: u128, order: Ordering) -> u128 {
unsafe {
match order {
Acquire => intrinsics::atomic_xor_acquire(dst, val),
Release => intrinsics::atomic_xor_release(dst, val),
AcqRel => intrinsics::atomic_xor_acqrel(dst, val),
Relaxed => intrinsics::atomic_xor_relaxed(dst, val),
SeqCst => intrinsics::atomic_xor_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[cfg(not(any(
target_arch = "x86_64",
target_arch = "powerpc64",
all(target_arch = "s390x", portable_atomic_pre_llvm_18),
)))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_max(dst: *mut u128, val: u128, order: Ordering) -> i128 {
#[allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)]
unsafe {
match order {
Acquire => intrinsics::atomic_max_acquire(dst.cast::<i128>(), val as i128),
Release => intrinsics::atomic_max_release(dst.cast::<i128>(), val as i128),
AcqRel => intrinsics::atomic_max_acqrel(dst.cast::<i128>(), val as i128),
Relaxed => intrinsics::atomic_max_relaxed(dst.cast::<i128>(), val as i128),
SeqCst => intrinsics::atomic_max_seqcst(dst.cast::<i128>(), val as i128),
_ => unreachable!(),
}
}
}
#[cfg(not(any(
target_arch = "x86_64",
target_arch = "powerpc64",
all(target_arch = "s390x", portable_atomic_pre_llvm_18),
)))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_min(dst: *mut u128, val: u128, order: Ordering) -> i128 {
#[allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)]
unsafe {
match order {
Acquire => intrinsics::atomic_min_acquire(dst.cast::<i128>(), val as i128),
Release => intrinsics::atomic_min_release(dst.cast::<i128>(), val as i128),
AcqRel => intrinsics::atomic_min_acqrel(dst.cast::<i128>(), val as i128),
Relaxed => intrinsics::atomic_min_relaxed(dst.cast::<i128>(), val as i128),
SeqCst => intrinsics::atomic_min_seqcst(dst.cast::<i128>(), val as i128),
_ => unreachable!(),
}
}
}
#[cfg(not(any(
target_arch = "x86_64",
target_arch = "powerpc64",
all(target_arch = "s390x", portable_atomic_pre_llvm_18),
)))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_umax(dst: *mut u128, val: u128, order: Ordering) -> u128 {
unsafe {
match order {
Acquire => intrinsics::atomic_umax_acquire(dst, val),
Release => intrinsics::atomic_umax_release(dst, val),
AcqRel => intrinsics::atomic_umax_acqrel(dst, val),
Relaxed => intrinsics::atomic_umax_relaxed(dst, val),
SeqCst => intrinsics::atomic_umax_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[cfg(not(any(
target_arch = "x86_64",
target_arch = "powerpc64",
all(target_arch = "s390x", portable_atomic_pre_llvm_18),
)))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_umin(dst: *mut u128, val: u128, order: Ordering) -> u128 {
unsafe {
match order {
Acquire => intrinsics::atomic_umin_acquire(dst, val),
Release => intrinsics::atomic_umin_release(dst, val),
AcqRel => intrinsics::atomic_umin_acqrel(dst, val),
Relaxed => intrinsics::atomic_umin_relaxed(dst, val),
SeqCst => intrinsics::atomic_umin_seqcst(dst, val),
_ => unreachable!(),
}
}
}
#[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_not(dst: *mut u128, order: Ordering) -> u128 {
unsafe { atomic_xor(dst, !0, order) }
}
#[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
#[inline]
#[cfg_attr(miri, track_caller)] unsafe fn atomic_neg(dst: *mut u128, order: Ordering) -> u128 {
unsafe { atomic_update(dst, order, u128::wrapping_neg) }
}
#[cfg(not(target_arch = "x86_64"))]
#[inline]
const fn is_lock_free() -> bool {
IS_ALWAYS_LOCK_FREE
}
#[cfg(not(target_arch = "x86_64"))]
const IS_ALWAYS_LOCK_FREE: bool = true;
#[cfg(target_arch = "x86_64")]
#[inline]
fn is_lock_free() -> bool {
#[cfg(target_feature = "cmpxchg16b")]
{
true
}
#[cfg(not(target_feature = "cmpxchg16b"))]
{
detect::detect().cmpxchg16b()
}
}
#[cfg(target_arch = "x86_64")]
const IS_ALWAYS_LOCK_FREE: bool = cfg!(target_feature = "cmpxchg16b");
atomic128!(AtomicI128, i128, atomic_max, atomic_min);
atomic128!(AtomicU128, u128, atomic_umax, atomic_umin);
#[cfg(test)]
mod tests {
use super::*;
test_atomic_int!(i128);
test_atomic_int!(u128);
stress_test!(u128);
}