use core::{
cell::UnsafeCell,
mem::MaybeUninit,
ptr::NonNull,
sync::atomic::{self, AtomicU8, Ordering},
};
use alloc::boxed::Box;
#[derive(Clone, Copy, Debug)]
#[repr(u8)]
pub(crate) enum Ptr {
Value1 = 0,
Value2 = 0b1000,
}
impl Ptr {
pub(crate) fn switch(&mut self) {
*self = match *self {
Ptr::Value1 => Self::Value2,
Ptr::Value2 => Self::Value1,
};
}
}
#[repr(transparent)]
#[derive(Debug, Clone, Copy)]
struct State(u8);
impl State {
const INITIAL: u8 = 0b0000;
const VALUE1_READ: u8 = 0b0010;
const VALUE2_READ: u8 = 0b0100;
const READ_MASK: u8 = 0b0110;
const NOREAD_MASK: u8 = 0b1001;
const UNIQUE_MASK: u8 = 0b0001;
const INV_UNIQUE_MASK: u8 = 0b1110;
const READ_PTR_MASK: u8 = 0b1000;
const INV_READ_PTR: u8 = 0b0111;
fn new(value: u8) -> Self {
debug_assert!(
(value & Self::READ_MASK).count_ones() <= 1,
"max 1 read. value: {value:b}"
);
debug_assert!(
value & 0b1111_0000 == 0,
"only lower 4 bits used. value: {value:b}"
);
Self(value)
}
fn is_unique(self) -> bool {
self.0 & Self::UNIQUE_MASK == 0
}
fn read_ptr(self) -> Ptr {
if self.0 & Self::READ_PTR_MASK == 0 {
Ptr::Value1
} else {
Ptr::Value2
}
}
fn with_read(self, ptr: Ptr) -> Self {
debug_assert_eq!(self.0 & Self::READ_MASK, 0, "No read currently");
let mask = match ptr {
Ptr::Value1 => Self::VALUE1_READ,
Ptr::Value2 => Self::VALUE2_READ,
};
Self(self.0 | mask)
}
fn can_write(self, ptr: Ptr) -> bool {
#[expect(
clippy::match_like_matches_macro,
reason = "i think it's more readable like this"
)]
match (self.0 & Self::READ_MASK, ptr) {
(Self::VALUE1_READ, Ptr::Value1) => false,
(Self::VALUE2_READ, Ptr::Value2) => false,
_ => true,
}
}
}
#[derive(Debug)]
pub(crate) struct Shared<T> {
pub(crate) value_1: UnsafeCell<T>,
pub(crate) value_2: UnsafeCell<T>,
state: AtomicU8,
}
impl<T> Shared<T> {
pub(crate) fn lock_read(&self) -> &UnsafeCell<T> {
let result = self
.state
.fetch_update(Ordering::Relaxed, Ordering::Acquire, |value| {
let state = State::new(value);
let ptr = state.read_ptr();
Some(state.with_read(ptr).0)
});
let result = unsafe { result.unwrap_unchecked() };
let ptr = State::new(result).read_ptr();
self.get_value(ptr)
}
pub(crate) fn release_read_lock(&self) {
self.state.fetch_and(State::NOREAD_MASK, Ordering::Release);
}
pub(crate) fn lock_write(&self, ptr: Ptr) -> Result<(), ()> {
let state = State::new(self.state.load(Ordering::Relaxed));
if state.can_write(ptr) {
atomic::fence(Ordering::Acquire);
Ok(())
} else {
Err(())
}
}
pub(crate) fn set_read_ptr(&self, ptr: Ptr) {
let value = match ptr {
Ptr::Value1 => self.state.fetch_and(State::INV_READ_PTR, Ordering::Release),
Ptr::Value2 => self.state.fetch_or(State::READ_PTR_MASK, Ordering::Release),
};
State::new(value);
}
pub(crate) fn new(value: T, second_value: fn(&T) -> T) -> (NonNull<Self>, Ptr) {
struct DropGuard<T>(*mut T);
impl<T> Drop for DropGuard<T> {
fn drop(&mut self) {
unsafe { self.0.drop_in_place() }
}
}
let mut this: Box<MaybeUninit<Self>> = Box::new_uninit();
let this_ptr = this.as_mut_ptr();
let this = unsafe {
let state_ptr = &raw mut (*this_ptr).state;
let value_1_ptr = UnsafeCell::raw_get(&raw mut (*this_ptr).value_1);
let value_2_ptr = UnsafeCell::raw_get(&raw mut (*this_ptr).value_2);
state_ptr.write(AtomicU8::new(State::INITIAL));
value_1_ptr.write(value);
let guard = DropGuard(value_1_ptr);
value_2_ptr.write(second_value(&*value_1_ptr));
core::mem::forget(guard);
this.assume_init()
};
(
unsafe { NonNull::new_unchecked(Box::into_raw(this)) },
Ptr::Value2,
)
}
pub(crate) fn get_value(&self, ptr: Ptr) -> &UnsafeCell<T> {
match ptr {
Ptr::Value1 => &self.value_1,
Ptr::Value2 => &self.value_2,
}
}
pub(crate) unsafe fn get_value_ref(&self, ptr: Ptr) -> &T {
unsafe { &*self.get_value(ptr).get() }
}
pub(crate) unsafe fn set_shared(&self) {
self.state.fetch_or(State::UNIQUE_MASK, Ordering::Relaxed);
}
pub(crate) fn is_unique(&self) -> bool {
State::new(self.state.load(Ordering::Acquire)).is_unique()
}
pub(crate) unsafe fn drop(this: NonNull<Self>) {
let old_state = unsafe { this.as_ref() }
.state
.fetch_and(State::INV_UNIQUE_MASK, Ordering::Release);
if State::new(old_state).is_unique() {
atomic::fence(Ordering::Acquire);
drop(Box::from_raw(this.as_ptr()));
}
}
}
unsafe impl<T: Sync> Sync for Shared<T> {}