#[cfg(not(portable_atomic_no_asm))]
use core::arch::asm;
#[cfg(not(feature = "critical-section"))]
use core::cell::UnsafeCell;
use core::sync::atomic::Ordering;
#[inline]
#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
pub fn fence(order: Ordering) {
match order {
Ordering::Relaxed => panic!("there is no such thing as a relaxed fence"),
_ => compiler_fence(order),
}
}
#[inline]
#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
pub fn compiler_fence(order: Ordering) {
match order {
Ordering::Relaxed => panic!("there is no such thing as a relaxed compiler fence"),
_ => {}
}
unsafe {
#[cfg(not(portable_atomic_no_asm))]
asm!("", options(nostack, preserves_flags));
#[cfg(portable_atomic_no_asm)]
llvm_asm!("" ::: "memory" : "volatile");
}
}
#[cfg(not(feature = "critical-section"))]
items!({
macro_rules! atomic {
(load_store, $([$($generics:tt)*])? $atomic_type:ident, $value_type:ty, $size:tt) => {
#[repr(transparent)]
pub(crate) struct $atomic_type $(<$($generics)*>)? {
v: UnsafeCell<$value_type>,
}
unsafe impl $(<$($generics)*>)? Send for $atomic_type $(<$($generics)*>)? {}
unsafe impl $(<$($generics)*>)? Sync for $atomic_type $(<$($generics)*>)? {}
impl $(<$($generics)*>)? $atomic_type $(<$($generics)*>)? {
#[inline]
#[cfg_attr(
all(debug_assertions, not(portable_atomic_no_track_caller)),
track_caller
)]
pub(crate) fn load(&self, order: Ordering) -> $value_type {
crate::utils::assert_load_ordering(order);
let src = self.v.get();
unsafe {
let out;
#[cfg(not(portable_atomic_no_asm))]
asm!(
concat!("mov.", $size, " @{src}, {out}"), src = in(reg) src,
out = lateout(reg) out,
options(nostack, preserves_flags),
);
#[cfg(portable_atomic_no_asm)]
llvm_asm!(
concat!("mov.", $size, " $1, $0")
: "=r"(out) : "*m"(src) : "memory" : "volatile"
);
out
}
}
#[inline]
#[cfg_attr(
all(debug_assertions, not(portable_atomic_no_track_caller)),
track_caller
)]
pub(crate) fn store(&self, val: $value_type, order: Ordering) {
crate::utils::assert_store_ordering(order);
let dst = self.v.get();
unsafe {
#[cfg(not(portable_atomic_no_asm))]
asm!(
concat!("mov.", $size, " {val}, 0({dst})"), dst = in(reg) dst,
val = in(reg) val,
options(nostack, preserves_flags),
);
#[cfg(portable_atomic_no_asm)]
llvm_asm!(
concat!("mov.", $size, " $1, $0")
:: "*m"(dst), "ir"(val) : "memory" : "volatile"
);
}
}
}
};
($([$($generics:tt)*])? $atomic_type:ident, $value_type:ty, $size:tt) => {
atomic!(load_store, $([$($generics)*])? $atomic_type, $value_type, $size);
impl $(<$($generics)*>)? $atomic_type $(<$($generics)*>)? {
#[inline]
pub(crate) fn add(&self, val: $value_type, _order: Ordering) {
let dst = self.v.get();
unsafe {
#[cfg(not(portable_atomic_no_asm))]
asm!(
concat!("add.", $size, " {val}, 0({dst})"), dst = in(reg) dst,
val = in(reg) val,
options(nostack),
);
#[cfg(portable_atomic_no_asm)]
llvm_asm!(
concat!("add.", $size, " $1, $0")
:: "*m"(dst), "ir"(val) : "memory" : "volatile"
);
}
}
#[inline]
pub(crate) fn sub(&self, val: $value_type, _order: Ordering) {
let dst = self.v.get();
unsafe {
#[cfg(not(portable_atomic_no_asm))]
asm!(
concat!("sub.", $size, " {val}, 0({dst})"), dst = in(reg) dst,
val = in(reg) val,
options(nostack),
);
#[cfg(portable_atomic_no_asm)]
llvm_asm!(
concat!("sub.", $size, " $1, $0")
:: "*m"(dst), "ir"(val) : "memory" : "volatile"
);
}
}
#[inline]
pub(crate) fn and(&self, val: $value_type, _order: Ordering) {
let dst = self.v.get();
unsafe {
#[cfg(not(portable_atomic_no_asm))]
asm!(
concat!("and.", $size, " {val}, 0({dst})"), dst = in(reg) dst,
val = in(reg) val,
options(nostack),
);
#[cfg(portable_atomic_no_asm)]
llvm_asm!(
concat!("and.", $size, " $1, $0")
:: "*m"(dst), "ir"(val) : "memory" : "volatile"
);
}
}
#[inline]
pub(crate) fn or(&self, val: $value_type, _order: Ordering) {
let dst = self.v.get();
unsafe {
#[cfg(not(portable_atomic_no_asm))]
asm!(
concat!("bis.", $size, " {val}, 0({dst})"), dst = in(reg) dst,
val = in(reg) val,
options(nostack, preserves_flags),
);
#[cfg(portable_atomic_no_asm)]
llvm_asm!(
concat!("bis.", $size, " $1, $0")
:: "*m"(dst), "ir"(val) : "memory" : "volatile"
);
}
}
#[inline]
pub(crate) fn xor(&self, val: $value_type, _order: Ordering) {
let dst = self.v.get();
unsafe {
#[cfg(not(portable_atomic_no_asm))]
asm!(
concat!("xor.", $size, " {val}, 0({dst})"), dst = in(reg) dst,
val = in(reg) val,
options(nostack),
);
#[cfg(portable_atomic_no_asm)]
llvm_asm!(
concat!("xor.", $size, " $1, $0")
:: "*m"(dst), "ir"(val) : "memory" : "volatile"
);
}
}
#[inline]
pub(crate) fn not(&self, _order: Ordering) {
let dst = self.v.get();
unsafe {
#[cfg(not(portable_atomic_no_asm))]
asm!(
concat!("inv.", $size, " 0({dst})"), dst = in(reg) dst,
options(nostack),
);
#[cfg(portable_atomic_no_asm)]
llvm_asm!(
concat!("inv.", $size, " $0")
:: "*m"(dst) : "memory" : "volatile"
);
}
}
}
};
}
atomic!(AtomicI8, i8, "b");
atomic!(AtomicU8, u8, "b");
atomic!(AtomicI16, i16, "w");
atomic!(AtomicU16, u16, "w");
atomic!(AtomicIsize, isize, "w");
atomic!(AtomicUsize, usize, "w");
atomic!(load_store, [T] AtomicPtr, *mut T, "w");
});