use core::{
cell::{Cell, UnsafeCell},
cmp::Ordering,
fmt,
marker::PhantomPinned,
ops::Range,
pin::Pin,
ptr::NonNull,
};
use super::{IntervalRwLockCore, LockCallback, LockState, UnlockCallback};
use crate::utils::{
panicking::abort_on_unwind,
pin::{EarlyDrop, EarlyDropGuard},
rbtree,
};
#[cfg(test)]
mod tests;
pub struct RbTreeIntervalRwLockCore<Index, Priority, InProgress> {
reads: Option<NonNull<ReadNode<Index>>>,
writes: Option<NonNull<WriteNode<Index>>>,
pendings: Option<NonNull<PendingNode<Index, Priority, InProgress>>>,
_pin: PhantomPinned,
}
unsafe impl<Index: Send, Priority: Send, InProgress: Send> Send
for RbTreeIntervalRwLockCore<Index, Priority, InProgress>
{
}
unsafe impl<Index: Send, Priority: Send, InProgress: Send> Sync
for RbTreeIntervalRwLockCore<Index, Priority, InProgress>
{
}
type ReadNode<Index> = rbtree::Node<(Index, i8), isize>;
type WriteNode<Index> = rbtree::Node<Range<Index>, ()>;
type PendingNode<Index, Priority, InProgress> =
rbtree::Node<Pending<Index, Priority, InProgress>, ()>;
#[derive(Debug)]
struct Pending<Index, Priority, InProgress> {
range: Range<Index>,
priority: Priority,
parent: LockStatePtr<Index, Priority, InProgress>,
in_progress: InProgress,
}
#[derive(Debug)]
enum LockStatePtr<Index, Priority, InProgress> {
Read(NonNull<ReadLockStateInner<Index, Priority, InProgress>>),
Write(NonNull<WriteLockStateInner<Index, Priority, InProgress>>),
}
impl<Index, Priority, InProgress> Clone for LockStatePtr<Index, Priority, InProgress> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl<Index, Priority, InProgress> Copy for LockStatePtr<Index, Priority, InProgress> {}
macro_rules! impl_lock_state {
($(
$( #[$meta:meta] )*
pub struct $ty:ident { inner: $inner:ident }
)*) => {$(
$( #[$meta] )*
#[pin_project::pin_project]
pub struct $ty<Index, Priority, InProgress> {
#[pin]
inner: EarlyDropGuard<$inner<Index, Priority, InProgress>>
}
impl<Index, Priority, InProgress> $ty<Index, Priority, InProgress> {
#[inline]
pub const fn new() -> Self {
Self { inner: EarlyDropGuard::new() }
}
#[inline]
fn get(self: Pin<&mut Self>) -> Pin<&$inner<Index, Priority, InProgress>> {
self.project().inner.get_or_insert_default()
}
}
impl<Index, Priority, InProgress> fmt::Debug for $ty<Index, Priority, InProgress> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(stringify!($ty))?;
if let Some(parent) = self.inner.get().and_then(|inner|inner.parent.get()) {
write!(f, "(< borrow data for {:p} >)", parent)
} else {
f.write_str("(< empty >)")
}
}
}
impl<Index, Priority, InProgress> Default for $ty<Index, Priority, InProgress> {
#[inline]
fn default() -> Self {
Self::new()
}
}
unsafe impl<Index, Priority, InProgress> Send for $ty<Index, Priority, InProgress> {}
unsafe impl<Index, Priority, InProgress> Sync for $ty<Index, Priority, InProgress> {}
)*};
}
impl_lock_state! {
pub struct ReadLockState { inner: ReadLockStateInner }
pub struct WriteLockState { inner: WriteLockStateInner }
pub struct TryReadLockState { inner: TryReadLockStateInner }
pub struct TryWriteLockState { inner: TryWriteLockStateInner }
}
struct ReadLockStateInner<Index, Priority, InProgress> {
parent: Cell<Option<NonNull<RbTreeIntervalRwLockCore<Index, Priority, InProgress>>>>,
pending: UnsafeCell<Option<PendingNode<Index, Priority, InProgress>>>,
read: UnsafeCell<Option<[ReadNode<Index>; 2]>>,
}
struct WriteLockStateInner<Index, Priority, InProgress> {
parent: Cell<Option<NonNull<RbTreeIntervalRwLockCore<Index, Priority, InProgress>>>>,
pending: UnsafeCell<Option<PendingNode<Index, Priority, InProgress>>>,
write: UnsafeCell<Option<WriteNode<Index>>>,
}
struct TryReadLockStateInner<Index, Priority, InProgress> {
parent: Cell<Option<NonNull<RbTreeIntervalRwLockCore<Index, Priority, InProgress>>>>,
read: UnsafeCell<Option<[ReadNode<Index>; 2]>>,
}
struct TryWriteLockStateInner<Index, Priority, InProgress> {
parent: Cell<Option<NonNull<RbTreeIntervalRwLockCore<Index, Priority, InProgress>>>>,
write: UnsafeCell<Option<WriteNode<Index>>>,
}
impl<Index, Priority, InProgress> Default for ReadLockStateInner<Index, Priority, InProgress> {
#[inline]
fn default() -> Self {
Self {
parent: Cell::new(None),
read: UnsafeCell::new(None),
pending: UnsafeCell::new(None),
}
}
}
impl<Index, Priority, InProgress> Default for WriteLockStateInner<Index, Priority, InProgress> {
#[inline]
fn default() -> Self {
Self {
parent: Cell::new(None),
write: UnsafeCell::new(None),
pending: UnsafeCell::new(None),
}
}
}
impl<Index, Priority, InProgress> Default for TryReadLockStateInner<Index, Priority, InProgress> {
#[inline]
fn default() -> Self {
Self {
parent: Cell::new(None),
read: UnsafeCell::new(None),
}
}
}
impl<Index, Priority, InProgress> Default for TryWriteLockStateInner<Index, Priority, InProgress> {
#[inline]
fn default() -> Self {
Self {
parent: Cell::new(None),
write: UnsafeCell::new(None),
}
}
}
struct ReadNodeCallback;
impl<Index: Ord> rbtree::Callback<(Index, i8), isize> for ReadNodeCallback {
#[inline]
fn zero_summary(&mut self) -> isize {
0
}
#[inline]
fn element_to_summary(&mut self, element: &(Index, i8)) -> isize {
element.1 as isize
}
#[inline]
fn add_assign_summary(&mut self, lhs: &mut isize, rhs: &isize) {
*lhs = lhs.wrapping_add(*rhs);
}
#[inline]
fn sub_assign_summary(&mut self, lhs: &mut isize, rhs: &isize) {
*lhs = lhs.wrapping_sub(*rhs);
}
#[inline]
fn cmp_element(&mut self, e1: &(Index, i8), e2: &(Index, i8)) -> Ordering {
e1.0.cmp(&e2.0)
}
}
struct WriteNodeCallback;
impl<Index: Ord> rbtree::Callback<Range<Index>, ()> for WriteNodeCallback {
#[inline]
fn zero_summary(&mut self) {}
#[inline]
fn element_to_summary(&mut self, _element: &Range<Index>) {}
#[inline]
fn add_assign_summary(&mut self, _lhs: &mut (), _rhs: &()) {}
#[inline]
fn sub_assign_summary(&mut self, _lhs: &mut (), _rhs: &()) {}
#[inline]
fn cmp_element(&mut self, e1: &Range<Index>, e2: &Range<Index>) -> Ordering {
e1.start.cmp(&e2.start)
}
}
struct PendingNodeCallback;
impl<Index: Ord, Priority: Ord, InProgress>
rbtree::Callback<Pending<Index, Priority, InProgress>, ()> for PendingNodeCallback
{
#[inline]
fn zero_summary(&mut self) {}
#[inline]
fn element_to_summary(&mut self, _element: &Pending<Index, Priority, InProgress>) {}
#[inline]
fn add_assign_summary(&mut self, _lhs: &mut (), _rhs: &()) {}
#[inline]
fn sub_assign_summary(&mut self, _lhs: &mut (), _rhs: &()) {}
#[inline]
fn cmp_element(
&mut self,
new: &Pending<Index, Priority, InProgress>,
existing: &Pending<Index, Priority, InProgress>,
) -> Ordering {
(&new.range.start, &new.priority)
.cmp(&(&existing.range.start, &existing.priority))
.then(Ordering::Less)
}
}
impl<Index, Priority, InProgress> RbTreeIntervalRwLockCore<Index, Priority, InProgress> {
#[inline]
pub const fn new() -> Self {
Self {
reads: None,
writes: None,
pendings: None,
_pin: PhantomPinned,
}
}
}
unsafe impl<Index, Priority, InProgress> IntervalRwLockCore
for RbTreeIntervalRwLockCore<Index, Priority, InProgress>
where
Index: Clone + Ord,
Priority: Ord,
{
type Index = Index;
type Priority = Priority;
type ReadLockState = ReadLockState<Index, Priority, InProgress>;
type WriteLockState = WriteLockState<Index, Priority, InProgress>;
type TryReadLockState = TryReadLockState<Index, Priority, InProgress>;
type TryWriteLockState = TryWriteLockState<Index, Priority, InProgress>;
type InProgress = InProgress;
const INIT: Self = Self::new();
fn lock_read<Callback: LockCallback<Self::InProgress>>(
self: Pin<&mut Self>,
range: Range<Self::Index>,
priority: Self::Priority,
state: Pin<&mut Self::ReadLockState>,
callback: Callback,
) -> Callback::Output {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
debug_assert!(range.start < range.end, "invalid range");
let (output, pending) =
if let Some(first_conflict_index) = this.first_write_borrow_in_range(&range) {
let (output, in_progress) = callback.in_progress();
(
output,
Some(Pending {
range: first_conflict_index..range.end.clone(),
priority,
parent: LockStatePtr::Read((&*state).into()),
in_progress,
}),
)
} else {
drop(priority);
(callback.complete(), None)
};
let borrow_range = if let Some(pending) = &pending {
(pending.range.start != range.start).then(|| range.start..pending.range.start.clone())
} else {
Some(range)
};
if state.parent.get().is_some() {
panic_lock_state_in_use();
}
state.parent.set(Some(NonNull::from(&*this)));
abort_on_unwind(|| {
if let Some(borrow_range) = borrow_range {
let read_nodes: [NonNull<_>; 2] = {
let read_nodes = unsafe { &mut *state.read.get() };
debug_assert!(read_nodes.is_none());
let [n0, n1] = read_nodes.insert([
rbtree::Node::new((borrow_range.start, 1), 1),
rbtree::Node::new((borrow_range.end, -1), -1),
]);
[n0.into(), n1.into()]
};
unsafe {
rbtree::Node::insert(ReadNodeCallback, &mut this.reads, read_nodes[0]);
rbtree::Node::insert(ReadNodeCallback, &mut this.reads, read_nodes[1]);
};
}
if let Some(pending) = pending {
let pending_node: NonNull<_> = {
let pending_node = unsafe { &mut *state.pending.get() };
debug_assert!(pending_node.is_none());
pending_node.insert(rbtree::Node::new(pending, ())).into()
};
unsafe {
rbtree::Node::insert(PendingNodeCallback, &mut this.pendings, pending_node)
};
}
true
});
output
}
fn lock_write<Callback: LockCallback<Self::InProgress>>(
self: Pin<&mut Self>,
range: Range<Self::Index>,
priority: Self::Priority,
state: Pin<&mut Self::WriteLockState>,
callback: Callback,
) -> Callback::Output {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
debug_assert!(range.start < range.end, "invalid range");
let first_conflict_index = match (
this.first_write_borrow_in_range(&range),
this.first_read_borrow_in_range(&range),
) {
(None, None) => None,
(one @ Some(_), None) | (None, one @ Some(_)) => one,
(Some(i0), Some(i1)) => Some(i0.min(i1)),
};
let (output, pending) = if let Some(first_conflict_index) = first_conflict_index {
let (output, in_progress) = callback.in_progress();
(
output,
Some(Pending {
range: first_conflict_index..range.end.clone(),
priority,
parent: LockStatePtr::Write((&*state).into()),
in_progress,
}),
)
} else {
drop(priority);
(callback.complete(), None)
};
let borrow_range = if let Some(pending) = &pending {
(pending.range.start != range.start).then(|| range.start..pending.range.start.clone())
} else {
Some(range)
};
if state.parent.get().is_some() {
panic_lock_state_in_use();
}
state.parent.set(Some(NonNull::from(&*this)));
abort_on_unwind(|| {
if let Some(borrow_range) = borrow_range {
let write_node: NonNull<_> = {
let write_node = unsafe { &mut *state.write.get() };
debug_assert!(write_node.is_none());
write_node
.insert(rbtree::Node::new(borrow_range, ()))
.into()
};
unsafe { rbtree::Node::insert(WriteNodeCallback, &mut this.writes, write_node) };
}
if let Some(pending) = pending {
let pending_node: NonNull<_> = {
let pending_node = unsafe { &mut *state.pending.get() };
debug_assert!(pending_node.is_none());
pending_node.insert(rbtree::Node::new(pending, ())).into()
};
unsafe {
rbtree::Node::insert(PendingNodeCallback, &mut this.pendings, pending_node)
};
}
output
})
}
fn try_lock_read(
self: Pin<&mut Self>,
range: Range<Self::Index>,
state: Pin<&mut Self::TryReadLockState>,
) -> bool {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
debug_assert!(range.start < range.end, "invalid range");
if this.first_write_borrow_in_range(&range).is_some() {
return false;
}
if state.parent.get().is_some() {
panic_lock_state_in_use();
}
state.parent.set(Some(NonNull::from(&*this)));
abort_on_unwind(|| {
let read_nodes: [NonNull<_>; 2] = {
let read_nodes = unsafe { &mut *state.read.get() };
debug_assert!(read_nodes.is_none());
let [n0, n1] = read_nodes.insert([
ReadNode::new((range.start, 1), 1),
ReadNode::new((range.end, -1), -1),
]);
[n0.into(), n1.into()]
};
unsafe {
rbtree::Node::insert(ReadNodeCallback, &mut this.reads, read_nodes[0]);
rbtree::Node::insert(ReadNodeCallback, &mut this.reads, read_nodes[1]);
};
true
})
}
fn try_lock_write(
self: Pin<&mut Self>,
range: Range<Self::Index>,
state: Pin<&mut Self::TryWriteLockState>,
) -> bool {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
debug_assert!(range.start < range.end, "invalid range");
if this.first_write_borrow_in_range(&range).is_some()
|| this.first_read_borrow_in_range(&range).is_some()
{
return false;
}
if state.parent.get().is_some() {
panic_lock_state_in_use();
}
state.parent.set(Some(NonNull::from(&*this)));
abort_on_unwind(|| {
let write_node: NonNull<_> = {
let write_node = unsafe { &mut *state.write.get() };
debug_assert!(write_node.is_none());
NonNull::from(write_node.insert(WriteNode::new(range, ())))
};
unsafe { rbtree::Node::insert(WriteNodeCallback, &mut this.writes, write_node) };
true
})
}
fn unlock_read<Callback: UnlockCallback<Self::InProgress>>(
self: Pin<&mut Self>,
state: Pin<&mut Self::ReadLockState>,
callback: Callback,
) -> Option<Self::InProgress> {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
if state.parent.get() != Some(NonNull::from(&*this)) {
panic_lock_state_incorrect_parent();
}
let removed_pending_node = abort_on_unwind(|| {
let removed_pending_node =
unsafe { this.cancel_borrow(NonNull::new(state.pending.get()).unwrap()) };
unsafe { this.unlock_read_inner(NonNull::new(state.read.get()).unwrap(), callback) };
removed_pending_node
});
state.parent.set(None);
removed_pending_node.map(|node| node.element.in_progress)
}
fn unlock_write<Callback: UnlockCallback<Self::InProgress>>(
self: Pin<&mut Self>,
state: Pin<&mut Self::WriteLockState>,
callback: Callback,
) -> Option<Self::InProgress> {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
if state.parent.get() != Some(NonNull::from(&*this)) {
panic_lock_state_incorrect_parent();
}
let removed_pending_node = abort_on_unwind(|| {
let removed_pending_node =
unsafe { this.cancel_borrow(NonNull::new(state.pending.get()).unwrap()) };
unsafe { this.unlock_write_inner(NonNull::new(state.write.get()).unwrap(), callback) };
removed_pending_node
});
state.parent.set(None);
removed_pending_node.map(|node| node.element.in_progress)
}
fn unlock_try_read<Callback: UnlockCallback<Self::InProgress>>(
self: Pin<&mut Self>,
state: Pin<&mut Self::TryReadLockState>,
callback: Callback,
) {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
if state.parent.get() != Some(NonNull::from(&*this)) {
panic_lock_state_incorrect_parent();
}
abort_on_unwind(|| {
unsafe { this.unlock_read_inner(NonNull::new(state.read.get()).unwrap(), callback) };
});
state.parent.set(None);
}
fn unlock_try_write<Callback: UnlockCallback<Self::InProgress>>(
self: Pin<&mut Self>,
state: Pin<&mut Self::TryWriteLockState>,
callback: Callback,
) {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
if state.parent.get() != Some(NonNull::from(&*this)) {
panic_lock_state_incorrect_parent();
}
abort_on_unwind(|| {
unsafe { this.unlock_write_inner(NonNull::new(state.write.get()).unwrap(), callback) };
});
state.parent.set(None);
}
fn inspect_read_mut<'a>(
self: Pin<&'a mut Self>,
state: Pin<&'a mut Self::ReadLockState>,
) -> LockState<&'a mut Self::InProgress> {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
if state.parent.get() != Some(NonNull::from(&*this)) {
panic_lock_state_incorrect_parent();
}
match unsafe { &mut *state.pending.get() } {
Some(pending_node) => LockState::InProgress(&mut pending_node.element.in_progress),
None => LockState::Complete,
}
}
fn inspect_write_mut<'a>(
self: Pin<&'a mut Self>,
state: Pin<&'a mut Self::WriteLockState>,
) -> LockState<&'a mut Self::InProgress> {
let this = unsafe { self.get_unchecked_mut() };
let state = state.get();
if state.parent.get() != Some(NonNull::from(&*this)) {
panic_lock_state_incorrect_parent();
}
match unsafe { &mut *state.pending.get() } {
Some(pending_node) => LockState::InProgress(&mut pending_node.element.in_progress),
None => LockState::Complete,
}
}
}
impl<Index, Priority, InProgress> RbTreeIntervalRwLockCore<Index, Priority, InProgress>
where
Index: Clone + Ord,
Priority: Ord,
{
fn first_write_borrow_in_range(&self, range: &Range<Index>) -> Option<Index> {
let write_node = unsafe {
rbtree::Node::lower_bound(&self.writes, |e| {
range.start.cmp(&e.end).then(Ordering::Greater)
})
.map(|node| node.as_ref())
};
write_node
.filter(|node| node.element.start < range.end)
.map(|node| (&node.element.start).max(&range.start).clone())
}
fn first_read_borrow_in_range(&self, range: &Range<Index>) -> Option<Index> {
let read_node = unsafe {
rbtree::Node::lower_bound(&self.reads, |e| {
range.start.cmp(&e.0).then(Ordering::Greater)
})
.map(|node| node.as_ref())
};
if let Some(read_node) = read_node {
debug_assert!(read_node.element.0 > range.start);
let num_reads_at_start = unsafe {
rbtree::Node::prefix_sum(ReadNodeCallback, &self.reads, Some(read_node.into()))
};
if num_reads_at_start > 0 {
return Some(range.start.clone());
}
let read_starts_in_middle = read_node.element.0 < range.end;
debug_assert_eq!(
read_node.element.1, 1,
"found a read borrow upper bound point without a matching lower \
bound point"
);
if read_starts_in_middle {
return Some(read_node.element.0.clone());
}
} else {
}
None
}
unsafe fn cancel_borrow(
&mut self,
mut pending_node: NonNull<Option<PendingNode<Index, Priority, InProgress>>>,
) -> Option<PendingNode<Index, Priority, InProgress>> {
let pending_node_ptr = unsafe { option_as_ptr(pending_node)? };
unsafe { rbtree::Node::remove(PendingNodeCallback, &mut self.pendings, pending_node_ptr) };
unsafe { pending_node.as_mut() }.take()
}
unsafe fn unlock_read_inner<Callback: UnlockCallback<InProgress>>(
&mut self,
mut read_nodes_ptr: NonNull<Option<[ReadNode<Index>; 2]>>,
mut callback: Callback,
) {
let Some(mut read_nodes) =( unsafe { option_array2_as_ptr(read_nodes_ptr) })
else { return; };
let [start, mut end] = unsafe { read_nodes.map(|n| n.as_ref().element.0.clone()) };
let mut maybe_pending_node = unsafe {
if let Some(node) =
rbtree::Node::lower_bound(&self.pendings, |e| end.cmp(&e.range.start))
{
rbtree::Node::predecessor(node)
} else {
self.pendings.map(|root| rbtree::Node::max(root))
}
.filter(|node| node.as_ref().element.range.start >= start)
};
let mut complete = false;
while let Some(pending_node) = maybe_pending_node {
let next_maybe_pending_node = unsafe {
rbtree::Node::predecessor(pending_node)
.filter(|node| node.as_ref().element.range.start >= start)
};
let index = unsafe { pending_node.as_ref().element.range.start.clone() };
debug_assert!((&start..=&end).contains(&&index));
if !complete {
if index == end {
} else if index == start {
unsafe {
rbtree::Node::remove(ReadNodeCallback, &mut self.reads, read_nodes[1]);
rbtree::Node::remove(ReadNodeCallback, &mut self.reads, read_nodes[0]);
*read_nodes_ptr.as_mut() = None;
}
complete = true;
} else {
unsafe {
rbtree::Node::remove(ReadNodeCallback, &mut self.reads, read_nodes[1]);
read_nodes[1].as_mut().element.0 = index.clone();
read_nodes[1].as_mut().summary = -1;
rbtree::Node::insert(ReadNodeCallback, &mut self.reads, read_nodes[1]);
}
end = index;
}
}
unsafe { self.resume_borrow(pending_node, &mut callback) };
maybe_pending_node = next_maybe_pending_node;
}
if !complete {
unsafe {
rbtree::Node::remove(ReadNodeCallback, &mut self.reads, read_nodes[0]);
rbtree::Node::remove(ReadNodeCallback, &mut self.reads, read_nodes[1]);
*read_nodes_ptr.as_mut() = None;
}
}
}
unsafe fn unlock_write_inner<Callback: UnlockCallback<InProgress>>(
&mut self,
mut write_node_ptr: NonNull<Option<WriteNode<Index>>>,
mut callback: Callback,
) {
let Some(mut write_node) = (unsafe { option_as_ptr(write_node_ptr) })
else { return; };
let Range { start, mut end } = unsafe { write_node.as_ref().element.clone() };
let mut maybe_pending_node = unsafe {
if let Some(node) =
rbtree::Node::lower_bound(&self.pendings, |e| end.cmp(&e.range.start))
{
rbtree::Node::predecessor(node)
} else {
self.pendings.map(|root| rbtree::Node::max(root))
}
.filter(|node| node.as_ref().element.range.start >= start)
};
let mut complete = false;
while let Some(pending_node) = maybe_pending_node {
let next_maybe_pending_node = unsafe {
rbtree::Node::predecessor(pending_node)
.filter(|node| node.as_ref().element.range.start >= start)
};
let index = unsafe { pending_node.as_ref().element.range.start.clone() };
debug_assert!((&start..=&end).contains(&&index));
if !complete {
if index == start {
unsafe {
rbtree::Node::remove(WriteNodeCallback, &mut self.writes, write_node)
};
unsafe { *write_node_ptr.as_mut() = None };
complete = true;
} else {
unsafe { write_node.as_mut().element.end = index.clone() };
end = index;
}
}
unsafe { self.resume_borrow(pending_node, &mut callback) };
maybe_pending_node = next_maybe_pending_node;
}
if !complete {
unsafe { rbtree::Node::remove(WriteNodeCallback, &mut self.writes, write_node) };
unsafe { *write_node_ptr.as_mut() = None };
}
}
unsafe fn resume_borrow(
&mut self,
mut pending_node_ptr: NonNull<PendingNode<Index, Priority, InProgress>>,
callback: &mut impl UnlockCallback<InProgress>,
) {
let pending: &Pending<_, _, _> = unsafe { &pending_node_ptr.as_ref().element };
let parent = pending.parent;
let range = pending.range.clone();
let first_conflict_write_index = self.first_write_borrow_in_range(&range);
let first_conflict_read_index = if let LockStatePtr::Write(_) = parent {
self.first_read_borrow_in_range(&range)
} else {
None };
let first_conflict_index = match (first_conflict_write_index, first_conflict_read_index) {
(None, None) => None,
(one @ Some(_), None) | (None, one @ Some(_)) => one,
(Some(i0), Some(i1)) => Some(i0.min(i1)),
};
if first_conflict_index.as_ref() == Some(&range.start) {
return;
}
unsafe { rbtree::Node::remove(PendingNodeCallback, &mut self.pendings, pending_node_ptr) };
if let Some(first_conflict_index) = first_conflict_index.clone() {
unsafe { pending_node_ptr.as_mut() }.element.range.start = first_conflict_index;
unsafe {
rbtree::Node::insert(PendingNodeCallback, &mut self.pendings, pending_node_ptr);
}
} else {
let pending_cell: *mut Option<PendingNode<_, _, _>> = match parent {
LockStatePtr::Write(state) => unsafe { state.as_ref() }.pending.get(),
LockStatePtr::Read(state) => unsafe { state.as_ref() }.pending.get(),
};
debug_assert_eq!(Some(pending_node_ptr), unsafe {
option_as_ptr(NonNull::new(pending_cell).unwrap())
});
let in_progress = unsafe { (*pending_cell).take() }
.unwrap()
.element
.in_progress;
callback.complete(in_progress);
}
let new_end = first_conflict_index.unwrap_or(range.end);
match parent {
LockStatePtr::Write(state) => {
let state = unsafe { state.as_ref() };
debug_assert_eq!(state.parent.get(), Some(NonNull::from(&*self)));
let write_node_cell = unsafe { &mut *state.write.get() };
if let Some(write_node) = write_node_cell.as_mut() {
write_node.element.end = new_end;
} else {
let write_node =
write_node_cell.insert(WriteNode::new(range.start..new_end, ()));
unsafe {
rbtree::Node::insert(
WriteNodeCallback,
&mut self.writes,
NonNull::from(write_node),
);
}
}
}
LockStatePtr::Read(state) => {
let state = unsafe { state.as_ref() };
debug_assert_eq!(state.parent.get(), Some(NonNull::from(&*self)));
let read_node_cells = unsafe { &mut *state.read.get() };
if let Some([_, read_node1]) = read_node_cells.as_mut() {
unsafe {
rbtree::Node::remove(
ReadNodeCallback,
&mut self.reads,
NonNull::from(&mut *read_node1),
);
}
read_node1.element.0 = new_end;
read_node1.summary = -1;
unsafe {
rbtree::Node::insert(
ReadNodeCallback,
&mut self.reads,
NonNull::from(&mut *read_node1),
);
}
} else {
let read_nodes = read_node_cells.insert([
ReadNode::new((range.start, 1), 1),
ReadNode::new((new_end, -1), -1),
]);
unsafe {
rbtree::Node::insert(
ReadNodeCallback,
&mut self.reads,
NonNull::from(&mut read_nodes[0]),
);
rbtree::Node::insert(
ReadNodeCallback,
&mut self.reads,
NonNull::from(&mut read_nodes[1]),
);
}
}
}
}
}
}
#[cold]
#[track_caller]
fn panic_lock_state_in_use() -> ! {
panic!("attempted to occupy a `*LockState` that is still in use")
}
#[cold]
#[track_caller]
fn panic_lock_state_incorrect_parent() -> ! {
panic!("attempted to operate on a lock with an incorrect origin")
}
#[inline]
unsafe fn option_as_ptr<T>(mut p: NonNull<Option<T>>) -> Option<NonNull<T>> {
Some(unsafe { p.as_mut() }.as_mut()?.into())
}
#[inline]
unsafe fn option_array2_as_ptr<T>(mut p: NonNull<Option<[T; 2]>>) -> Option<[NonNull<T>; 2]> {
Some(
unsafe { p.as_mut() }
.as_mut()?
.each_mut()
.map(NonNull::from),
)
}
impl<Index, Priority, InProgress> Drop for RbTreeIntervalRwLockCore<Index, Priority, InProgress> {
#[inline]
#[track_caller]
fn drop(&mut self) {
if self.reads.is_some() || self.writes.is_some() || self.pendings.is_some() {
panic_drop_when_still_locked();
}
}
}
#[cold]
#[track_caller]
fn panic_drop_when_still_locked() -> ! {
panic!("attempted to drop an `RbTreeIntervalRwLockCore` while it's locked")
}
impl<Index, Priority, InProgress> EarlyDrop for ReadLockStateInner<Index, Priority, InProgress> {
#[inline]
#[track_caller]
unsafe fn early_drop(self: Pin<&Self>) {
if self.parent.get().is_some() {
panic_drop_when_still_linked();
}
}
}
impl<Index, Priority, InProgress> EarlyDrop for WriteLockStateInner<Index, Priority, InProgress> {
#[inline]
#[track_caller]
unsafe fn early_drop(self: Pin<&Self>) {
if self.parent.get().is_some() {
panic_drop_when_still_linked();
}
}
}
impl<Index, Priority, InProgress> EarlyDrop for TryReadLockStateInner<Index, Priority, InProgress> {
#[inline]
#[track_caller]
unsafe fn early_drop(self: Pin<&Self>) {
if self.parent.get().is_some() {
panic_drop_when_still_linked();
}
}
}
impl<Index, Priority, InProgress> EarlyDrop
for TryWriteLockStateInner<Index, Priority, InProgress>
{
#[inline]
#[track_caller]
unsafe fn early_drop(self: Pin<&Self>) {
if self.parent.get().is_some() {
panic_drop_when_still_linked();
}
}
}
#[cold]
#[track_caller]
fn panic_drop_when_still_linked() -> ! {
panic!("attempted to early-drop a `*LockState` while it's holding a lock")
}