#[cfg(all(target_has_atomic = "ptr", not(target_has_atomic = "32")))]
compile_error!("currently all targets that support `AtomicPtr` also support `AtomicU32`");
use core::sync::atomic::{AtomicU32, Ordering};
#[allow(unused_macros)]
macro_rules! unique_masks {
($ty:ty, $($name:ident,)+) => {
#[cfg(test)]
pub const ALL: &[$ty] = &[$($name),+];
#[cfg(test)]
pub const NAMES: &[&str] = &[$(stringify!($name)),+];
unique_masks!(@one; $ty; 0; $($name,)+);
};
(@one; $_ty:ty; $_idx:expr;) => {};
(@one; $ty:ty; $shift:expr; $name:ident, $($tail:tt)*) => {
pub const $name: $ty = 1 << $shift;
const _: () = assert!($name != (1 << (<$ty>::BITS - 1)));
unique_masks!(@one; $ty; $shift + 1; $($tail)*);
};
}
#[allow(unused_macros)] macro_rules! select_once {
(
sig: fn($($arg:ident: $ArgTy:ty),*) -> $RetTy:ty,
init: $init:expr,
call: $call:expr,
) => {{
use core::mem;
use core::sync::atomic::{AtomicPtr, Ordering};
type Func = unsafe fn($($arg: $ArgTy),*) -> $RetTy;
static FUNC: AtomicPtr<()> = AtomicPtr::new((initializer as Func) as *mut ());
fn initializer($($arg: $ArgTy),*) -> $RetTy {
let fn_ptr: Func = $init();
FUNC.store(fn_ptr as *mut (), Ordering::Relaxed);
$call(fn_ptr)
}
let raw: *mut () = FUNC.load(Ordering::Relaxed);
let fn_ptr: Func = unsafe { mem::transmute::<*mut (), Func>(raw) };
$call(fn_ptr)
}}
}
#[allow(unused_imports)]
pub(crate) use {select_once, unique_masks};
use crate::support::cold_path;
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct Flags(u32);
#[allow(dead_code)] impl Flags {
pub const fn empty() -> Self {
Self(0)
}
pub const fn from_bits(val: u32) -> Self {
Self(val)
}
pub fn bits(&self) -> u32 {
self.0
}
pub fn insert(&mut self, mask: u32) {
self.0 |= mask;
}
pub fn contains(&self, mask: u32) -> bool {
self.0 & mask == mask
}
pub fn test_nth(&self, bit: u32) -> bool {
debug_assert!(bit < u32::BITS, "bit index out-of-bounds");
self.0 & (1 << bit) != 0
}
}
#[allow(dead_code)] pub fn get_or_init_flags_cache(cache: &AtomicU32, init: impl FnOnce() -> Flags) -> Flags {
const INITIALIZED: u32 = 1 << 31;
let mut flags = Flags::from_bits(cache.load(Ordering::Relaxed));
if !flags.contains(INITIALIZED) {
cold_path();
flags = init();
debug_assert!(
!flags.contains(INITIALIZED),
"initialized bit shouldn't be set"
);
flags.insert(INITIALIZED);
cache.store(flags.bits(), Ordering::Relaxed);
}
flags
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn unique_masks() {
unique_masks! {
u32,
V0,
V1,
V2,
}
assert_eq!(V0, 1u32 << 0);
assert_eq!(V1, 1u32 << 1);
assert_eq!(V2, 1u32 << 2);
assert_eq!(ALL, [V0, V1, V2]);
assert_eq!(NAMES, ["V0", "V1", "V2"]);
}
#[test]
fn flag_cache_is_used() {
static CACHE: AtomicU32 = AtomicU32::new(0);
let mut f1 = Flags::from_bits(0x1);
let f2 = Flags::from_bits(0x2);
let r1 = get_or_init_flags_cache(&CACHE, || f1);
let r2 = get_or_init_flags_cache(&CACHE, || f2);
f1.insert(1 << 31);
assert_eq!(r1, f1);
assert_eq!(r2, f1);
}
#[test]
fn select_cache_is_used() {
static CALLED: AtomicU32 = AtomicU32::new(0);
fn inner() {
fn nop() {}
select_once! {
sig: fn() -> (),
init: || {
CALLED.fetch_add(1, Ordering::Relaxed);
nop
},
call: |fn_ptr: Func| unsafe { fn_ptr() },
}
}
inner();
assert_eq!(CALLED.load(Ordering::Relaxed), 1);
inner();
assert_eq!(CALLED.load(Ordering::Relaxed), 1);
}
}