use std::fmt::{Binary, Debug};
use log::trace;
use std::sync::atomic::{
AtomicU16, AtomicU32, AtomicU64, AtomicU8, Ordering,
};
use crate::af::Zero;
use crate::synth_int::AtomicU128;
use crate::{impl_primitive_atomic_stride, AddressFamily};
pub type Stride3 = u16;
pub type Stride4 = u32;
pub type Stride5 = u64;
pub struct AtomicStride2(pub AtomicU8);
pub struct AtomicStride3(pub AtomicU16);
pub struct AtomicStride4(pub AtomicU32);
pub struct AtomicStride5(pub AtomicU64);
pub struct AtomicStride6(pub AtomicU128);
pub struct CasResult<InnerType>(pub Result<InnerType, InnerType>);
impl<InnerType> CasResult<InnerType> {
fn new(value: InnerType) -> Self {
CasResult(Ok(value))
}
}
pub trait AtomicBitmap {
type InnerType: Binary
+ Copy
+ Debug
+ Zero
+ PartialOrd
+ std::ops::BitAnd<Output = Self::InnerType>
+ std::ops::BitOr<Output = Self::InnerType>;
fn new() -> Self;
fn inner(self) -> Self::InnerType;
fn is_set(&self, index: usize) -> bool;
fn compare_exchange(
&self,
current: Self::InnerType,
new: Self::InnerType,
) -> CasResult<Self::InnerType>;
fn load(&self) -> Self::InnerType;
fn to_u64(&self) -> u64;
fn to_u32(&self) -> u32;
}
impl AtomicBitmap for AtomicStride2 {
type InnerType = u8;
fn new() -> Self {
AtomicStride2(AtomicU8::new(0))
}
fn inner(self) -> Self::InnerType {
self.0.into_inner()
}
fn is_set(&self, bit: usize) -> bool {
self.load() & (1 << bit) != 0
}
fn compare_exchange(
&self,
current: Self::InnerType,
new: Self::InnerType,
) -> CasResult<Self::InnerType> {
CasResult(self.0.compare_exchange(
current,
new,
Ordering::SeqCst,
Ordering::SeqCst,
))
}
fn load(&self) -> Self::InnerType {
self.0.load(Ordering::SeqCst)
}
fn to_u32(&self) -> u32 {
self.0.load(Ordering::SeqCst) as u32
}
fn to_u64(&self) -> u64 {
self.0.load(Ordering::SeqCst) as u64
}
}
impl Zero for AtomicStride2 {
fn zero() -> Self {
AtomicStride2(AtomicU8::new(0))
}
fn is_zero(&self) -> bool {
self.0.load(Ordering::SeqCst) == 0
}
}
impl AtomicBitmap for AtomicStride3 {
type InnerType = u16;
fn new() -> Self {
AtomicStride3(AtomicU16::new(0))
}
fn inner(self) -> Self::InnerType {
self.0.into_inner()
}
fn is_set(&self, bit: usize) -> bool {
self.load() & (1 << bit) != 0
}
fn compare_exchange(
&self,
current: Self::InnerType,
new: Self::InnerType,
) -> CasResult<Self::InnerType> {
CasResult(self.0.compare_exchange(
current,
new,
Ordering::SeqCst,
Ordering::SeqCst,
))
}
fn load(&self) -> Self::InnerType {
self.0.load(Ordering::SeqCst)
}
fn to_u32(&self) -> u32 {
self.0.load(Ordering::SeqCst) as u32
}
fn to_u64(&self) -> u64 {
self.0.load(Ordering::SeqCst) as u64
}
}
impl Zero for AtomicStride3 {
fn zero() -> Self {
AtomicStride3(AtomicU16::new(0))
}
fn is_zero(&self) -> bool {
self.0.load(Ordering::SeqCst) == 0
}
}
impl AtomicBitmap for AtomicStride4 {
type InnerType = u32;
fn new() -> Self {
AtomicStride4(AtomicU32::new(0))
}
fn inner(self) -> Self::InnerType {
self.0.into_inner()
}
fn is_set(&self, bit: usize) -> bool {
self.load() & (1 << bit) != 0
}
fn compare_exchange(
&self,
current: Self::InnerType,
new: Self::InnerType,
) -> CasResult<Self::InnerType> {
CasResult(self.0.compare_exchange(
current,
new,
Ordering::SeqCst,
Ordering::SeqCst,
))
}
fn load(&self) -> Self::InnerType {
self.0.load(Ordering::SeqCst)
}
fn to_u32(&self) -> u32 {
self.0.load(Ordering::SeqCst) as u32
}
fn to_u64(&self) -> u64 {
self.0.load(Ordering::SeqCst) as u64
}
}
impl Zero for AtomicStride4 {
fn zero() -> Self {
AtomicStride4(AtomicU32::new(0))
}
fn is_zero(&self) -> bool {
self.0.load(Ordering::SeqCst) == 0
}
}
impl AtomicBitmap for AtomicStride5 {
type InnerType = u64;
fn new() -> Self {
AtomicStride5(AtomicU64::new(0))
}
fn inner(self) -> Self::InnerType {
self.0.into_inner()
}
fn is_set(&self, bit: usize) -> bool {
self.load() & (1 << bit) != 0
}
fn compare_exchange(
&self,
current: Self::InnerType,
new: Self::InnerType,
) -> CasResult<Self::InnerType> {
CasResult(self.0.compare_exchange(
current,
new,
Ordering::SeqCst,
Ordering::SeqCst,
))
}
fn load(&self) -> Self::InnerType {
self.0.load(Ordering::SeqCst)
}
fn to_u32(&self) -> u32 {
self.0.load(Ordering::SeqCst) as u32
}
fn to_u64(&self) -> u64 {
self.0.load(Ordering::SeqCst) as u64
}
}
impl Zero for AtomicStride5 {
fn zero() -> Self {
AtomicStride5(AtomicU64::new(0))
}
fn is_zero(&self) -> bool {
self.0.load(Ordering::SeqCst) == 0
}
}
impl AtomicBitmap for AtomicStride6 {
type InnerType = u128;
fn new() -> Self {
AtomicStride6(AtomicU128::new(0))
}
fn inner(self) -> Self::InnerType {
let hi = self.0 .0.into_inner().to_be_bytes();
let lo = self.0 .1.into_inner().to_be_bytes();
u128::from_be_bytes([
hi[0], hi[1], hi[2], hi[3], hi[4], hi[5], hi[6], hi[7], lo[0],
lo[1], lo[2], lo[3], lo[4], lo[5], lo[6], lo[7],
])
}
fn is_set(&self, bit: usize) -> bool {
self.load() & (1 << bit) != 0
}
fn compare_exchange(
&self,
current: Self::InnerType,
new: Self::InnerType,
) -> CasResult<Self::InnerType> {
(
self.0 .0.compare_exchange(
((current << 64) >> 64) as u64,
((new >> 64) << 64) as u64,
Ordering::SeqCst,
Ordering::SeqCst,
),
self.0 .1.compare_exchange(
((current << 64) >> 64) as u64,
((new >> 64) << 64) as u64,
Ordering::SeqCst,
Ordering::SeqCst,
),
)
.into()
}
fn load(&self) -> Self::InnerType {
let hi = self.0 .0.load(Ordering::SeqCst).to_be_bytes();
let lo = self.0 .1.load(Ordering::SeqCst).to_be_bytes();
u128::from_be_bytes([
hi[0], hi[1], hi[2], hi[3], hi[4], hi[5], hi[6], hi[7], lo[0],
lo[1], lo[2], lo[3], lo[4], lo[5], lo[6], lo[7],
])
}
fn to_u32(&self) -> u32 {
unimplemented!()
}
fn to_u64(&self) -> u64 {
unimplemented!()
}
}
impl Zero for AtomicStride6 {
fn zero() -> Self {
AtomicStride6(AtomicU128::new(0))
}
fn is_zero(&self) -> bool {
self.0 .0.load(Ordering::SeqCst) == 0
&& self.0 .1.load(Ordering::SeqCst) == 0
}
}
impl From<(Result<u64, u64>, Result<u64, u64>)> for CasResult<u128> {
fn from(r: (Result<u64, u64>, Result<u64, u64>)) -> Self {
match r {
(Ok(hi), Ok(lo)) => CasResult::new(u128::from_be_bytes([
hi.to_be_bytes()[0],
hi.to_be_bytes()[1],
hi.to_be_bytes()[2],
hi.to_be_bytes()[3],
hi.to_be_bytes()[4],
hi.to_be_bytes()[5],
hi.to_be_bytes()[6],
hi.to_be_bytes()[7],
lo.to_be_bytes()[0],
lo.to_be_bytes()[1],
lo.to_be_bytes()[2],
lo.to_be_bytes()[3],
lo.to_be_bytes()[4],
lo.to_be_bytes()[5],
lo.to_be_bytes()[6],
lo.to_be_bytes()[7],
])),
(Err(hi), Ok(lo)) => CasResult(Err(u128::from_be_bytes([
hi.to_be_bytes()[0],
hi.to_be_bytes()[1],
hi.to_be_bytes()[2],
hi.to_be_bytes()[3],
hi.to_be_bytes()[4],
hi.to_be_bytes()[5],
hi.to_be_bytes()[6],
hi.to_be_bytes()[7],
lo.to_be_bytes()[0],
lo.to_be_bytes()[1],
lo.to_be_bytes()[2],
lo.to_be_bytes()[3],
lo.to_be_bytes()[4],
lo.to_be_bytes()[5],
lo.to_be_bytes()[6],
lo.to_be_bytes()[7],
]))),
(Ok(hi), Err(lo)) => CasResult(Err(u128::from_be_bytes([
hi.to_be_bytes()[0],
hi.to_be_bytes()[1],
hi.to_be_bytes()[2],
hi.to_be_bytes()[3],
hi.to_be_bytes()[4],
hi.to_be_bytes()[5],
hi.to_be_bytes()[6],
hi.to_be_bytes()[7],
lo.to_be_bytes()[0],
lo.to_be_bytes()[1],
lo.to_be_bytes()[2],
lo.to_be_bytes()[3],
lo.to_be_bytes()[4],
lo.to_be_bytes()[5],
lo.to_be_bytes()[6],
lo.to_be_bytes()[7],
]))),
(Err(hi), Err(lo)) => CasResult(Err(u128::from_be_bytes([
hi.to_be_bytes()[0],
hi.to_be_bytes()[1],
hi.to_be_bytes()[2],
hi.to_be_bytes()[3],
hi.to_be_bytes()[4],
hi.to_be_bytes()[5],
hi.to_be_bytes()[6],
hi.to_be_bytes()[7],
lo.to_be_bytes()[0],
lo.to_be_bytes()[1],
lo.to_be_bytes()[2],
lo.to_be_bytes()[3],
lo.to_be_bytes()[4],
lo.to_be_bytes()[5],
lo.to_be_bytes()[6],
lo.to_be_bytes()[7],
]))),
}
}
}
pub trait Stride:
Sized
+ Debug
+ Eq
+ Binary
+ PartialOrd
+ PartialEq
+ Zero
+ std::ops::BitAnd<Output = Self>
+ std::ops::BitOr<Output = Self>
where
Self::AtomicPtrSize: AtomicBitmap,
Self::AtomicPfxSize: AtomicBitmap,
Self::PtrSize: Zero
+ Binary
+ Copy
+ Debug
+ std::ops::BitAnd<Output = Self::PtrSize>
+ PartialOrd
+ Zero,
{
type AtomicPfxSize;
type AtomicPtrSize;
type PtrSize;
const BITS: u8;
const STRIDE_LEN: u8;
fn get_bit_pos(
nibble: u32,
len: u8,
) -> <<Self as Stride>::AtomicPfxSize as AtomicBitmap>::InnerType;
fn get_bit_pos_as_u8(
nibble: u32,
len: u8,
) -> u8;
fn get_pfx_index(nibble: u32, len: u8) -> usize;
fn get_ptr_index(
bitmap: <<Self as Stride>::AtomicPtrSize as AtomicBitmap>::InnerType,
nibble: u32,
) -> usize;
#[allow(clippy::wrong_self_convention)]
fn into_node_id<AF: AddressFamily>(
addr_bits: AF,
len: u8,
) -> super::node::StrideNodeId<AF>;
#[allow(clippy::wrong_self_convention)]
fn into_stride_size(
bitmap: <<Self as Stride>::AtomicPtrSize as AtomicBitmap>::InnerType,
) -> <<Self as Stride>::AtomicPfxSize as AtomicBitmap>::InnerType;
#[allow(clippy::wrong_self_convention)]
fn into_ptrbitarr_size(
bitmap: <<Self as Stride>::AtomicPfxSize as AtomicBitmap>::InnerType,
) -> <<Self as Stride>::AtomicPtrSize as AtomicBitmap>::InnerType;
fn leading_zeros(self) -> u32;
}
impl_primitive_atomic_stride![3; 16; u16; AtomicStride3; u8; AtomicStride2];
impl_primitive_atomic_stride![4; 32; u32; AtomicStride4; u16; AtomicStride3];
impl_primitive_atomic_stride![5; 64; u64; AtomicStride5; u32; AtomicStride4];