use std::marker::PhantomData;
use std::mem::{self as StdMem, size_of};
use std::ptr as StdPtr;
use std::sync::atomic::{AtomicPtr, AtomicU8, AtomicU16, AtomicU32, AtomicU64, Ordering};
use crate::leaf15::WIDTH_15;
use crate::ordering::{READ_ORD, RELAXED, WRITE_ORD};
use super::RetireHandle;
use super::ValueArray;
use super::ValuePtr;
macro_rules! atomic_load_dispatch {
($ptr:expr, $ordering:expr) => {
match size_of::<V>() {
1 => StdMem::transmute_copy(&(*$ptr.cast::<AtomicU8>()).load($ordering)),
2 => StdMem::transmute_copy(&(*$ptr.cast::<AtomicU16>()).load($ordering)),
4 => StdMem::transmute_copy(&(*$ptr.cast::<AtomicU32>()).load($ordering)),
8 => StdMem::transmute_copy(&(*$ptr.cast::<AtomicU64>()).load($ordering)),
_ => unreachable!(),
}
};
}
macro_rules! atomic_store_dispatch {
($ptr:expr, $value:expr, $ordering:expr) => {
match size_of::<V>() {
1 => (*$ptr.cast::<AtomicU8>()).store(StdMem::transmute_copy($value), $ordering),
2 => (*$ptr.cast::<AtomicU16>()).store(StdMem::transmute_copy($value), $ordering),
4 => (*$ptr.cast::<AtomicU32>()).store(StdMem::transmute_copy($value), $ordering),
8 => (*$ptr.cast::<AtomicU64>()).store(StdMem::transmute_copy($value), $ordering),
_ => unreachable!(),
}
};
}
#[inline(always)]
pub unsafe fn atomic_read_value<V>(ptr: *const u8, ordering: Ordering) -> V {
unsafe { atomic_load_dispatch!(ptr, ordering) }
}
#[inline(always)]
pub(super) unsafe fn atomic_write_value<V>(ptr: *mut u8, value: &V, ordering: Ordering) {
unsafe { atomic_store_dispatch!(ptr, value, ordering) }
}
#[repr(C)]
pub struct BoxValueArray<V> {
ptrs: [AtomicPtr<u8>; WIDTH_15],
_marker: PhantomData<V>,
}
impl<V> BoxValueArray<V> {
#[inline(always)]
pub(crate) fn load_raw(&self, slot: usize) -> *mut u8 {
debug_assert!(slot < WIDTH_15, "load_raw: slot {slot} out of bounds");
self.ptrs[slot].load(READ_ORD)
}
#[inline(always)]
pub(crate) fn load_raw_relaxed(&self, slot: usize) -> *mut u8 {
debug_assert!(
slot < WIDTH_15,
"load_raw_relaxed: slot {slot} out of bounds"
);
self.ptrs[slot].load(RELAXED)
}
#[inline(always)]
pub(crate) unsafe fn write_through_update(&self, slot: usize, new_value: &V) -> V {
debug_assert!(slot < WIDTH_15, "write_through_update: slot {slot} OOB");
debug_assert!(size_of::<V>() <= 8, "write-through requires V <= 8 bytes");
let box_ptr: *mut u8 = self.ptrs[slot].load(RELAXED);
debug_assert!(
!box_ptr.is_null(),
"write_through_update on empty slot {slot}"
);
let old_value: V = unsafe { atomic_read_value::<V>(box_ptr, RELAXED) };
unsafe { atomic_write_value::<V>(box_ptr, new_value, RELAXED) };
old_value
}
}
unsafe impl<V: Send + Sync> Send for BoxValueArray<V> {}
unsafe impl<V: Send + Sync> Sync for BoxValueArray<V> {}
impl<V: Send + Sync + 'static> ValueArray<ValuePtr<V>> for BoxValueArray<V> {
#[inline(always)]
fn new() -> Self {
unsafe { StdMem::zeroed() }
}
#[inline(always)]
fn is_empty(&self, slot: usize) -> bool {
debug_assert!(slot < WIDTH_15, "is_empty: slot {slot} out of bounds");
self.ptrs[slot].load(READ_ORD).is_null()
}
#[inline(always)]
fn is_empty_relaxed(&self, slot: usize) -> bool {
debug_assert!(
slot < WIDTH_15,
"is_empty_relaxed: slot {slot} out of bounds"
);
self.ptrs[slot].load(RELAXED).is_null()
}
#[inline(always)]
fn is_layer(&self, slot: usize) -> bool {
debug_assert!(slot < WIDTH_15, "is_layer: slot {slot} out of bounds");
!self.ptrs[slot].load(READ_ORD).is_null()
}
#[inline(always)]
fn load(&self, slot: usize) -> Option<ValuePtr<V>> {
debug_assert!(slot < WIDTH_15, "load: slot {slot} out of bounds");
let ptr: *mut u8 = self.ptrs[slot].load(READ_ORD);
if ptr.is_null() {
return None;
}
unsafe { Some(ValuePtr::from_raw(ptr.cast::<V>())) }
}
#[inline(always)]
fn store(&self, slot: usize, output: &ValuePtr<V>) {
debug_assert!(slot < WIDTH_15, "store: slot {slot} out of bounds");
let ptr: *mut u8 = output.as_ptr().cast::<u8>();
self.ptrs[slot].store(ptr, WRITE_ORD);
}
#[inline(always)]
fn store_relaxed(&self, slot: usize, output: &ValuePtr<V>) {
debug_assert!(slot < WIDTH_15, "store_relaxed: slot {slot} out of bounds");
let ptr: *mut u8 = output.as_ptr().cast::<u8>();
self.ptrs[slot].store(ptr, RELAXED);
}
#[inline(always)]
fn update_in_place(&self, slot: usize, output: &ValuePtr<V>) -> RetireHandle {
debug_assert!(
slot < WIDTH_15,
"update_in_place: slot {slot} out of bounds"
);
let old_ptr: *mut u8 = self.ptrs[slot].load(RELAXED);
debug_assert!(
!old_ptr.is_null(),
"update_in_place called on empty slot {slot}"
);
let new_ptr: *mut u8 = output.as_ptr().cast::<u8>();
self.ptrs[slot].store(new_ptr, WRITE_ORD);
RetireHandle::Ptr(old_ptr)
}
#[inline(always)]
fn update_in_place_relaxed(&self, slot: usize, output: &ValuePtr<V>) -> RetireHandle {
debug_assert!(
slot < WIDTH_15,
"update_in_place_relaxed: slot {slot} out of bounds"
);
let old_ptr: *mut u8 = self.ptrs[slot].load(RELAXED);
debug_assert!(
!old_ptr.is_null(),
"update_in_place_relaxed called on empty slot {slot}"
);
let new_ptr: *mut u8 = output.as_ptr().cast::<u8>();
self.ptrs[slot].store(new_ptr, RELAXED);
RetireHandle::Ptr(old_ptr)
}
#[inline(always)]
fn take(&self, slot: usize) -> Option<ValuePtr<V>> {
debug_assert!(slot < WIDTH_15, "take: slot {slot} out of bounds");
let old_ptr: *mut u8 = self.ptrs[slot].swap(StdPtr::null_mut(), RELAXED);
if old_ptr.is_null() {
return None;
}
unsafe { Some(ValuePtr::from_raw(old_ptr.cast::<V>())) }
}
#[inline(always)]
fn load_raw(&self, slot: usize) -> *mut u8 {
self.load_raw(slot)
}
#[inline(always)]
fn load_raw_relaxed(&self, slot: usize) -> *mut u8 {
self.load_raw_relaxed(slot)
}
#[inline(always)]
fn load_layer(&self, slot: usize) -> *mut u8 {
debug_assert!(slot < WIDTH_15, "load_layer: slot {slot} out of bounds");
self.ptrs[slot].load(READ_ORD)
}
#[inline(always)]
fn store_layer(&self, slot: usize, ptr: *mut u8) {
debug_assert!(slot < WIDTH_15, "store_layer: slot {slot} out of bounds");
self.ptrs[slot].store(ptr, WRITE_ORD);
}
#[inline(always)]
fn load_relaxed(&self, slot: usize) -> Option<ValuePtr<V>> {
debug_assert!(slot < WIDTH_15, "load_relaxed: slot {slot} out of bounds");
let ptr: *mut u8 = self.ptrs[slot].load(RELAXED);
if ptr.is_null() {
return None;
}
unsafe { Some(ValuePtr::from_raw(ptr.cast::<V>())) }
}
#[inline(always)]
fn clear(&self, slot: usize) {
debug_assert!(slot < WIDTH_15, "clear: slot {slot} out of bounds");
self.ptrs[slot].store(StdPtr::null_mut(), WRITE_ORD);
}
#[inline(always)]
fn clear_relaxed(&self, slot: usize) {
debug_assert!(slot < WIDTH_15, "clear_relaxed: slot {slot} out of bounds");
self.ptrs[slot].store(StdPtr::null_mut(), RELAXED);
}
#[inline(always)]
fn move_slot(&self, dst: &Self, src_slot: usize, dst_slot: usize) {
debug_assert!(
src_slot < WIDTH_15,
"move_slot: src_slot {src_slot} out of bounds"
);
debug_assert!(
dst_slot < WIDTH_15,
"move_slot: dst_slot {dst_slot} out of bounds"
);
let ptr: *mut u8 = self.ptrs[src_slot].load(RELAXED);
dst.ptrs[dst_slot].store(ptr, WRITE_ORD);
}
#[inline(always)]
fn move_slot_relaxed(&self, dst: &Self, src_slot: usize, dst_slot: usize) {
debug_assert!(
src_slot < WIDTH_15,
"move_slot_relaxed: src_slot {src_slot} out of bounds"
);
debug_assert!(
dst_slot < WIDTH_15,
"move_slot_relaxed: dst_slot {dst_slot} out of bounds"
);
let ptr: *mut u8 = self.ptrs[src_slot].load(RELAXED);
dst.ptrs[dst_slot].store(ptr, RELAXED);
}
#[inline(always)]
unsafe fn cleanup(&self, slot: usize) {
debug_assert!(slot < WIDTH_15, "cleanup: slot {slot} out of bounds");
let ptr: *mut u8 = self.ptrs[slot].load(RELAXED);
debug_assert!(!ptr.is_null(), "cleanup called on empty slot {slot}");
unsafe {
drop(Box::from_raw(ptr.cast::<V>()));
}
}
}