#![deny(unsafe_op_in_unsafe_fn)]
use parking_lot::Mutex;
#[cfg(feature = "async")]
use std::{
future::Future,
task::{Poll, Waker},
};
use std::{
cell::UnsafeCell,
cmp::Ordering,
marker::PhantomData,
mem::ManuallyDrop,
ops::{Deref, DerefMut, Range, RangeBounds},
ptr::NonNull,
thread::Thread,
};
#[cfg(any(test, doctest))]
mod tests;
mod util;
enum Waiter {
Thread(Thread),
#[cfg(feature = "async")]
Task(Waker),
}
impl Waiter {
fn unpark(&self) {
match self {
Waiter::Thread(thread) => thread.unpark(),
#[cfg(feature = "async")]
Waiter::Task(waker) => waker.wake_by_ref(),
}
}
}
#[derive(Default)]
struct RangesUsed {
ranges: Vec<Range<usize>>,
waiting: Vec<(Waiter, Range<usize>)>,
}
struct Locked;
struct NotLocked;
impl RangesUsed {
const fn new() -> Self {
Self { ranges: Vec::new(), waiting: Vec::new() }
}
fn overlapping_range_idx(
&self,
range: &Range<usize>,
) -> Result<usize, usize> {
debug_assert!(
!range.is_empty(),
"empty ranges should be handled already"
);
self.ranges.binary_search_by(|locked_range| {
if locked_range.end <= range.start {
Ordering::Less
} else if locked_range.start >= range.end {
Ordering::Greater
} else {
Ordering::Equal
}
})
}
fn lock_range(
&mut self,
range: &Range<usize>,
make_waiter: Option<impl FnOnce() -> Waiter>,
) -> Result<Locked, NotLocked> {
let idx = self.overlapping_range_idx(range);
match idx {
Ok(_overlapping_range_idx) => {
if let Some(waiter) = make_waiter {
self.waiting.push((waiter(), range.clone()));
}
Err(NotLocked)
}
Err(insert_idx) => {
self.ranges.insert(insert_idx, range.clone());
Ok(Locked)
}
}
}
fn unlock_range(&mut self, range: &Range<usize>) {
let (Ok(idx) | Err(idx)) = self.overlapping_range_idx(range);
debug_assert_eq!(&self.ranges[idx], range, "range is locked");
self.ranges.remove(idx);
self.waiting.retain(|(unparker, waiting_range)| {
let should_unpark_and_remove = util::overlaps(range, waiting_range);
if should_unpark_and_remove {
unparker.unpark();
}
!should_unpark_and_remove
})
}
fn split_locked_range(
&mut self,
range: &Range<usize>,
mid: usize,
) -> (Range<usize>, Range<usize>) {
debug_assert!(mid <= range.len());
let (head, tail) =
(range.start..range.start + mid, range.start + mid..range.end);
let (Ok(idx) | Err(idx)) = self.overlapping_range_idx(range);
debug_assert_eq!(&self.ranges[idx], range, "range is locked");
self.ranges.splice(idx..=idx, [head.clone(), tail.clone()]);
(head, tail)
}
}
pub unsafe trait RangeMutexBackingStorage<T>:
AsMut<[T]> + AsRef<[T]>
{
type AsUnsafeCell: AsMut<[UnsafeCell<T>]> + AsRef<[UnsafeCell<T>]>;
fn into_unsafecell(self) -> Self::AsUnsafeCell;
fn from_unsafecell(value: Self::AsUnsafeCell) -> Self;
}
unsafe impl<'a, T, const N: usize> RangeMutexBackingStorage<T>
for &'a mut [T; N]
{
type AsUnsafeCell = &'a mut [UnsafeCell<T>; N];
fn into_unsafecell(self) -> Self::AsUnsafeCell {
util::wrap_unsafecell_slice(self).try_into().unwrap()
}
fn from_unsafecell(value: Self::AsUnsafeCell) -> Self {
util::unwrap_unsafecell_slice(value).try_into().unwrap()
}
}
unsafe impl<T, const N: usize> RangeMutexBackingStorage<T> for [T; N] {
type AsUnsafeCell = [UnsafeCell<T>; N];
fn into_unsafecell(self) -> Self::AsUnsafeCell {
util::wrap_unsafecell_array(self)
}
fn from_unsafecell(value: Self::AsUnsafeCell) -> Self {
util::unwrap_unsafecell_array(value)
}
}
unsafe impl<'a, T> RangeMutexBackingStorage<T> for &'a mut [T] {
type AsUnsafeCell = &'a mut [UnsafeCell<T>];
fn into_unsafecell(self) -> Self::AsUnsafeCell {
util::wrap_unsafecell_slice(self)
}
fn from_unsafecell(value: Self::AsUnsafeCell) -> Self {
util::unwrap_unsafecell_slice(value)
}
}
unsafe impl<T> RangeMutexBackingStorage<T> for Box<[T]> {
type AsUnsafeCell = Box<[UnsafeCell<T>]>;
fn into_unsafecell(self) -> Self::AsUnsafeCell {
util::wrap_unsafecell_vec(self.into_vec()).into_boxed_slice()
}
fn from_unsafecell(value: Self::AsUnsafeCell) -> Self {
util::unwrap_unsafecell_vec(value.into_vec()).into_boxed_slice()
}
}
unsafe impl<T> RangeMutexBackingStorage<T> for Vec<T> {
type AsUnsafeCell = Vec<UnsafeCell<T>>;
fn into_unsafecell(self) -> Self::AsUnsafeCell {
util::wrap_unsafecell_vec(self)
}
fn from_unsafecell(value: Self::AsUnsafeCell) -> Self {
util::unwrap_unsafecell_vec(value)
}
}
unsafe impl<'a, T> RangeMutexBackingStorage<T> for RangeMutexGuard<'a, T> {
type AsUnsafeCell = RangeMutexGuard<'a, UnsafeCell<T>>;
fn into_unsafecell(self) -> Self::AsUnsafeCell {
let this = ManuallyDrop::new(self);
RangeMutexGuard {
data: NonNull::new(this.data.as_ptr() as _).unwrap(),
range: this.range.clone(),
used: this.used,
_variance: PhantomData,
}
}
fn from_unsafecell(value: Self::AsUnsafeCell) -> Self {
let this = ManuallyDrop::new(value);
RangeMutexGuard {
data: NonNull::new(this.data.as_ptr() as _).unwrap(),
range: this.range.clone(),
used: this.used,
_variance: PhantomData,
}
}
}
pub struct RangeMutex<T, B: RangeMutexBackingStorage<T>> {
used: Mutex<RangesUsed>,
data: B::AsUnsafeCell,
}
unsafe impl<T: Send, B: RangeMutexBackingStorage<T> + Sync> Sync
for RangeMutex<T, B>
{
}
unsafe impl<T: Send, B: RangeMutexBackingStorage<T> + Send> Send
for RangeMutex<T, B>
{
}
impl<T, B: RangeMutexBackingStorage<T>> RangeMutex<T, B> {
pub fn new(values: B) -> Self {
let data = B::into_unsafecell(values);
Self { data, used: Mutex::new(RangesUsed::new()) }
}
pub fn into_inner(self) -> B {
B::from_unsafecell(self.data)
}
pub fn get_mut(&mut self) -> &mut [T] {
util::unwrap_unsafecell_slice(self.data.as_mut())
}
pub fn undo_leak(&mut self) -> &mut [T] {
let used = self.used.get_mut();
used.ranges.clear();
used.waiting.clear();
self.get_mut()
}
#[inline]
pub fn try_lock(
&self,
range: impl RangeBounds<usize>,
) -> Option<RangeMutexGuard<'_, T>> {
let range = util::range(self.len(), range);
if range.is_empty() {
return Some(RangeMutexGuard::empty());
}
unsafe { self.outlined_try_lock(range) }
}
unsafe fn outlined_try_lock(&self, range: Range<usize>) -> Option<RangeMutexGuard<'_, T>> {
debug_assert!(!range.is_empty() && range.start <= range.end && range.end <= self.len());
let mut used = self.used.lock();
match used.lock_range(&range, None::<fn() -> Waiter>) {
Err(NotLocked) => None,
Ok(Locked) => {
let data = &self.data.as_ref()[range.clone()];
let data = util::transpose_unsafecell_slice(data);
Some(RangeMutexGuard {
data: NonNull::new(data.get()).unwrap(),
range,
used: Some(&self.used),
_variance: PhantomData,
})
}
}
}
#[inline]
pub fn lock(
&self,
range: impl RangeBounds<usize>,
) -> RangeMutexGuard<'_, T> {
let range = util::range(self.len(), range);
if range.is_empty() {
return RangeMutexGuard::empty();
}
unsafe { self.outlined_lock(range) }
}
unsafe fn outlined_lock(&self, range: Range<usize>) -> RangeMutexGuard<'_, T> {
debug_assert!(!range.is_empty() && range.start <= range.end && range.end <= self.len());
let mut used = self.used.lock();
loop {
match used.lock_range(
&range,
Some(|| Waiter::Thread(std::thread::current())),
) {
Err(NotLocked) => {
drop(used);
std::thread::park();
used = self.used.lock();
}
Ok(Locked) => {
let data = &self.data.as_ref()[range.clone()];
let data = util::transpose_unsafecell_slice(data);
return RangeMutexGuard {
data: NonNull::new(data.get()).unwrap(),
range,
used: Some(&self.used),
_variance: PhantomData,
};
}
}
}
}
#[cfg(feature = "async")]
#[inline]
pub fn lock_async(
&self,
range: impl RangeBounds<usize>,
) -> impl Future<Output = RangeMutexGuard<'_, T>> {
let range = util::range(self.len(), range);
unsafe { self.outlined_lock_async(range) }
}
#[cfg(feature = "async")]
async unsafe fn outlined_lock_async(
&self,
range: Range<usize>,
) -> RangeMutexGuard<'_, T> {
debug_assert!(range.start <= range.end && range.end <= self.len());
std::future::poll_fn(move |ctx| {
if range.is_empty() {
return Poll::Ready(RangeMutexGuard::empty());
}
let mut used = self.used.lock();
match used
.lock_range(&range, Some(|| Waiter::Task(ctx.waker().clone())))
{
Err(NotLocked) => Poll::Pending,
Ok(Locked) => {
let data = &self.data.as_ref()[range.clone()];
let data = util::transpose_unsafecell_slice(data);
Poll::Ready(RangeMutexGuard {
data: NonNull::new(data.get()).unwrap(),
range: range.clone(),
used: Some(&self.used),
_variance: PhantomData,
})
}
}
})
.await
}
pub fn len(&self) -> usize {
self.data.as_ref().len()
}
pub fn is_empty(&self) -> bool {
self.data.as_ref().len() == 0
}
}
pub struct RangeMutexGuard<'l, T> {
data: NonNull<[T]>,
_variance: PhantomData<&'l mut [T]>,
range: Range<usize>,
used: Option<&'l Mutex<RangesUsed>>,
}
unsafe impl<T: Send> Send for RangeMutexGuard<'_, T> {}
unsafe impl<T: Sync> Sync for RangeMutexGuard<'_, T> {}
impl<T> Default for RangeMutexGuard<'_, T> {
fn default() -> Self {
Self::empty()
}
}
impl<'l, T> RangeMutexGuard<'l, T> {
pub fn empty() -> Self {
Self {
data: NonNull::<[T; 0]>::dangling(),
range: 0..0,
used: None,
_variance: PhantomData,
}
}
pub fn split_at(this: Self, mid: usize) -> (Self, Self) {
assert!(mid <= this.len());
if mid == 0 {
return (Self::empty(), this);
} else if mid == this.len() {
return (this, Self::empty());
}
let this = ManuallyDrop::new(this);
let mut used = this.used.as_ref().expect("this.len() > 0").lock();
let (head_data, tail_data) =
unsafe { util::split_slice_at(this.data, mid) };
let (head, tail) = used.split_locked_range(&this.range, mid);
(
Self {
data: head_data,
range: head,
used: this.used,
_variance: PhantomData,
},
Self {
data: tail_data,
range: tail,
used: this.used,
_variance: PhantomData,
},
)
}
pub fn slice(this: Self, range: impl RangeBounds<usize>) -> Self {
let range = util::range(this.len(), range);
let (this, _tail) = Self::split_at(this, range.end);
let (_head, this) = Self::split_at(this, range.start);
this
}
}
impl<T> Deref for RangeMutexGuard<'_, T> {
type Target = [T];
fn deref(&self) -> &Self::Target {
unsafe { self.data.as_ref() }
}
}
impl<T> DerefMut for RangeMutexGuard<'_, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { self.data.as_mut() }
}
}
impl<'l, T> Drop for RangeMutexGuard<'l, T> {
fn drop(&mut self) {
if let Some(used) = self.used {
let mut used = used.lock();
used.unlock_range(&self.range);
} else {
debug_assert_eq!(self.range.len(), 0)
}
}
}
impl<T> AsRef<[T]> for RangeMutexGuard<'_, T> {
fn as_ref(&self) -> &[T] {
self
}
}
impl<T> AsMut<[T]> for RangeMutexGuard<'_, T> {
fn as_mut(&mut self) -> &mut [T] {
self
}
}