#![warn(missing_docs)]
#![warn(clippy::pedantic)]
#![allow(clippy::inline_always)]
use std::{borrow, cell, fmt, hash, hint, marker, mem, ops, ptr, sync::atomic, thread};
const ACQUIRE_ATTEMPTS: usize = 14;
const YIELDS_BEFORE_INIT: usize = 2;
const LOCKED: usize = 0;
pub struct VLock<T, const N: usize> {
state: atomic::AtomicUsize,
length: atomic::AtomicUsize,
data: cell::UnsafeCell<[mem::MaybeUninit<Data<T>>; N]>,
}
impl<T, const N: usize> VLock<T, N> {
const STEP: usize = {
assert!(N > 1, "VLock requires at least 2 versions to work");
N.next_power_of_two()
};
const OFFSET: usize = Self::STEP - 1;
const COUNTER: usize = !Self::OFFSET;
#[inline(always)]
pub fn new(value: T) -> Self {
let mut data: [mem::MaybeUninit<Data<T>>; N] =
unsafe { mem::MaybeUninit::uninit().assume_init() };
data[0].write(Data {
state: atomic::AtomicUsize::new(0),
value,
});
Self {
state: atomic::AtomicUsize::new(0),
length: atomic::AtomicUsize::new(1),
data: cell::UnsafeCell::new(data),
}
}
#[inline(always)]
unsafe fn at(&self, offset: usize) -> &mem::MaybeUninit<Data<T>> {
&(*self.data.get())[offset]
}
#[allow(clippy::mut_from_ref)]
#[inline(always)]
unsafe fn at_mut(&self, offset: usize) -> &mut mem::MaybeUninit<Data<T>> {
&mut (*self.data.get())[offset]
}
#[must_use]
#[inline(always)]
pub fn read(&self) -> ReadRef<'_, T, N> {
let state = self.state.fetch_add(Self::STEP, atomic::Ordering::Acquire);
assert_ne!(
state.wrapping_add(Self::STEP),
state & Self::OFFSET,
"counter overflow"
);
ReadRef {
ptr: unsafe { self.at(state & Self::OFFSET) }.as_ptr(),
phantom: marker::PhantomData,
}
}
#[inline(always)]
fn lock(&self) -> usize {
loop {
let value = self.length.swap(LOCKED, atomic::Ordering::Acquire);
if value != LOCKED {
return value;
}
thread::yield_now();
}
}
#[inline(always)]
fn acquire<I>(&self, curr_offset: usize, length: usize, init: I) -> usize
where
I: FnOnce() -> T,
{
let mut remaining = ACQUIRE_ATTEMPTS;
loop {
'attempt: loop {
if length == 1 {
break 'attempt;
}
if let Some(state) = unsafe { &*self.data.get() }
.iter()
.enumerate()
.take(length)
.filter(|(offset, _)| *offset != curr_offset)
.map(|(_, init)| unsafe { init.assume_init_ref() })
.map(|version| version.state.load(atomic::Ordering::Acquire))
.find(|&state| state & Self::COUNTER == 0)
{
return state & Self::OFFSET;
}
if remaining == 1 {
break 'attempt;
}
if remaining > YIELDS_BEFORE_INIT + 1 {
for _ in 0..1 << ACQUIRE_ATTEMPTS.saturating_sub(remaining) {
hint::spin_loop();
}
} else {
thread::yield_now();
}
remaining -= 1;
}
if length < N {
unsafe { self.at_mut(length) }.write(Data {
state: atomic::AtomicUsize::new(length),
value: init(),
});
return length;
}
thread::yield_now();
}
}
#[inline(always)]
fn unlock(&self, value: usize) {
assert_ne!(value, LOCKED, "locking unlock");
assert_eq!(
self.length.swap(value, atomic::Ordering::Release),
LOCKED,
"unlock without lock"
);
}
#[inline(always)]
pub fn update<F, I>(&self, f: F, init: I)
where
F: FnOnce(&T, &mut T),
I: FnOnce() -> T,
{
self.compare_update(|_| true, f, init);
}
pub fn compare_update<P, F, I>(&self, pred: P, f: F, init: I) -> bool
where
P: FnOnce(&T) -> bool,
F: FnOnce(&T, &mut T),
I: FnOnce() -> T,
{
let mut length = self.lock();
let offset = self.state.load(atomic::Ordering::Relaxed) & Self::OFFSET;
let version = unsafe { self.at(offset).assume_init_ref() };
if !pred(&version.value) {
self.unlock(length);
return false;
}
let new_offset = self.acquire(offset, length, init);
if new_offset == length {
length = length.saturating_add(1);
}
f(
&version.value,
&mut unsafe { self.at_mut(new_offset).assume_init_mut() }.value,
);
let prev_state = self.state.swap(new_offset, atomic::Ordering::Release);
assert_eq!(
prev_state & Self::OFFSET,
offset,
"offset changed while writing"
);
version
.state
.fetch_add(prev_state & Self::COUNTER, atomic::Ordering::Relaxed);
self.unlock(length);
true
}
#[inline(always)]
pub fn get_mut(&mut self) -> &mut T {
let offset = *self.state.get_mut() & Self::OFFSET;
&mut unsafe { self.at_mut(offset).assume_init_mut() }.value
}
}
impl<T: Default, const N: usize> VLock<T, N> {
#[inline(always)]
pub fn update_default<F>(&self, f: F)
where
F: FnOnce(&T, &mut T),
{
self.update(f, T::default);
}
#[inline(always)]
pub fn compare_update_default<P, F>(&self, pred: P, f: F) -> bool
where
P: FnOnce(&T) -> bool,
F: FnOnce(&T, &mut T),
{
self.compare_update(pred, f, T::default)
}
}
impl<T, const N: usize> From<T> for VLock<T, N> {
#[inline(always)]
fn from(value: T) -> Self {
VLock::new(value)
}
}
impl<T: Default, const N: usize> Default for VLock<T, N> {
#[inline(always)]
fn default() -> Self {
VLock::new(T::default())
}
}
unsafe impl<T: Send + Sync, const N: usize> Send for VLock<T, N> {}
unsafe impl<T: Send + Sync, const N: usize> Sync for VLock<T, N> {}
impl<T: Clone, const N: usize> Clone for VLock<T, N> {
#[inline(always)]
fn clone(&self) -> Self {
Self::new(self.read().clone())
}
fn clone_from(&mut self, source: &Self) {
let offset = *self.state.get_mut() & Self::OFFSET;
let current = unsafe { self.at_mut(offset).assume_init_mut() };
*current.state.get_mut() = 0;
current.value.clone_from(&source.read());
if offset != 0 {
let first = unsafe { self.at_mut(0).assume_init_mut() };
mem::swap(first, current);
}
*self.state.get_mut() = 0;
for init in unsafe { &mut *self.data.get() }
.iter_mut()
.take(*self.length.get_mut())
.skip(1)
{
let state = &mut unsafe { init.assume_init_mut() }.state;
assert_eq!(*state.get_mut() & Self::COUNTER, 0);
unsafe { init.assume_init_drop() };
}
*self.length.get_mut() = 1;
}
}
impl<T, const N: usize> fmt::Debug for VLock<T, N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let length = self.length.load(atomic::Ordering::Relaxed);
if length == LOCKED {
f.debug_struct("VLock (locked)")
.field("state", &self.state)
.finish_non_exhaustive()
} else {
f.debug_struct("VLock")
.field("state", &self.state)
.field("length", &length)
.finish_non_exhaustive()
}
}
}
impl<T, const N: usize> Drop for VLock<T, N> {
#[inline(always)]
fn drop(&mut self) {
for init in unsafe { &mut *self.data.get() }
.iter_mut()
.take(*self.length.get_mut())
{
unsafe { init.assume_init_drop() };
}
}
}
#[derive(Debug)]
struct Data<T> {
state: atomic::AtomicUsize,
value: T,
}
pub struct ReadRef<'a, T: 'a, const N: usize> {
ptr: *const Data<T>,
phantom: marker::PhantomData<&'a Data<T>>,
}
impl<T, const N: usize> Eq for ReadRef<'_, T, N> {}
impl<T, const N: usize> PartialEq for ReadRef<'_, T, N> {
#[inline(always)]
fn eq(&self, other: &Self) -> bool {
self.ptr == other.ptr
}
}
impl<T, const N: usize> AsRef<T> for ReadRef<'_, T, N> {
#[inline(always)]
fn as_ref(&self) -> &T {
self
}
}
impl<T, const N: usize> borrow::Borrow<T> for ReadRef<'_, T, N> {
#[inline(always)]
fn borrow(&self) -> &T {
self
}
}
impl<T: fmt::Debug, const N: usize> fmt::Debug for ReadRef<'_, T, N> {
#[inline(always)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: fmt::Display, const N: usize> fmt::Display for ReadRef<'_, T, N> {
#[inline(always)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<T, const N: usize> fmt::Pointer for ReadRef<'_, T, N> {
#[inline(always)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&ptr::addr_of!(**self), f)
}
}
impl<T, const N: usize> hash::Hash for ReadRef<'_, T, N> {
#[inline(always)]
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.ptr.hash(state);
}
}
impl<T, const N: usize> ops::Deref for ReadRef<'_, T, N> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
&unsafe { &*self.ptr }.value
}
}
unsafe impl<T: Sync, const N: usize> Send for ReadRef<'_, T, N> {}
unsafe impl<T: Sync, const N: usize> Sync for ReadRef<'_, T, N> {}
impl<T, const N: usize> Drop for ReadRef<'_, T, N> {
#[inline(always)]
fn drop(&mut self) {
unsafe { &*self.ptr }.state.fetch_sub(
N.next_power_of_two(),
atomic::Ordering::Release,
);
}
}