#![no_std]
#![warn(
elided_lifetimes_in_paths,
explicit_outlives_requirements,
missing_debug_implementations,
missing_docs,
semicolon_in_expressions_from_macros,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_qualifications
)]
use core::cell::{Cell, UnsafeCell};
use core::mem::ManuallyDrop;
use core::ops::{Deref, DerefMut};
use core::pin::Pin;
use lilos::create_node_with_meta;
use lilos::exec::noop_waker;
use lilos::list::List;
use lilos::util::CancelSafe;
use pin_project::pin_project;
use scopeguard::ScopeGuard;
#[derive(Debug)]
#[pin_project]
pub struct RwLock<T> {
#[pin]
lock: LockImpl,
contents: UnsafeCell<T>,
}
impl<T> RwLock<T> {
pub async fn lock_shared(self: Pin<&Self>) -> SharedGuard<'_, T> {
let lock = self.project_ref().lock.lock_shared().await;
SharedGuard {
_lock: lock,
contents: unsafe { &*self.contents.get() },
}
}
pub fn try_lock_shared(
self: Pin<&Self>,
) -> Result<SharedGuard<'_, T>, InUse> {
let lock = self.project_ref().lock.try_lock_shared()?;
Ok(SharedGuard {
_lock: lock,
contents: unsafe { &*self.contents.get() },
})
}
pub async fn lock_exclusive(self: Pin<&Self>) -> ActionPermit<'_, T> {
let lock = self.project_ref().lock.lock_exclusive().await;
ActionPermit {
_lock: lock,
contents: unsafe { &mut *self.contents.get() },
}
}
pub fn try_lock_exclusive(
self: Pin<&Self>,
) -> Result<ActionPermit<'_, T>, InUse> {
let lock = self.project_ref().lock.try_lock_exclusive()?;
Ok(ActionPermit {
_lock: lock,
contents: unsafe { &mut *self.contents.get() },
})
}
pub unsafe fn new(contents: T) -> ManuallyDrop<Self> {
let list = unsafe { List::new_with_meta(Access::Exclusive) };
ManuallyDrop::new(Self {
lock: LockImpl {
readers: Cell::new(0),
waiters: ManuallyDrop::into_inner(list),
},
contents: UnsafeCell::new(contents),
})
}
pub unsafe fn finish_init(this: Pin<&mut Self>) {
unsafe {
List::finish_init(this.project().lock.project().waiters);
}
}
}
#[derive(Debug)]
#[pin_project]
struct LockImpl {
readers: Cell<isize>,
#[pin]
waiters: List<(), Access>,
}
impl LockImpl {
async fn lock_shared(self: Pin<&Self>) -> SharedInternal<'_> {
if let Ok(guard) = self.try_lock_shared() {
return guard;
}
create_node_with_meta!(node, (), Access::Shared, noop_waker());
self.project_ref()
.waiters
.insert_and_wait_with_cleanup(node, || {
unsafe {
self.release_shared();
}
})
.await;
SharedInternal { lock: self }
}
fn try_lock_shared(self: Pin<&Self>) -> Result<SharedInternal<'_>, InUse> {
if !self.waiters.is_empty() {
return Err(InUse);
}
let r = self.readers.get();
if (0..isize::MAX).contains(&r) {
self.readers.set(r + 1);
Ok(SharedInternal { lock: self })
} else {
Err(InUse)
}
}
fn process_exclusive_cancellation(self: Pin<&Self>) {
if self.readers.get() >= 0 {
self.project_ref().waiters.wake_while(|n| {
if n.meta() == &Access::Shared {
let r = self.readers.get();
if r < isize::MAX {
self.readers.set(self.readers.get() + 1);
return true;
}
}
false
});
}
}
async fn lock_exclusive(self: Pin<&Self>) -> ExclusiveInternal<'_> {
if let Ok(permit) = self.try_lock_exclusive() {
return permit;
}
let mut trap = Some(scopeguard::guard((), |_| {
self.process_exclusive_cancellation();
}));
create_node_with_meta!(node, (), Access::Exclusive, noop_waker());
self.project_ref()
.waiters
.insert_and_wait_with_cleanup(node, || {
ScopeGuard::into_inner(trap.take().unwrap());
unsafe {
self.release_exclusive();
}
})
.await;
ScopeGuard::into_inner(trap.take().unwrap());
ExclusiveInternal { lock: self }
}
fn try_lock_exclusive(
self: Pin<&Self>,
) -> Result<ExclusiveInternal<'_>, InUse> {
let r = self.readers.get();
if r == 0 {
self.readers.set(-1);
Ok(ExclusiveInternal { lock: self })
} else {
Err(InUse)
}
}
unsafe fn release_exclusive(self: Pin<&Self>) {
let prev = self.readers.get();
debug_assert!(
prev < 0,
"release_exclusive used with no exclusive lock outstanding"
);
self.readers.set(prev + 1);
if prev == -1 {
let p = self.project_ref();
if p.waiters.wake_one_if(|n| n.meta() == &Access::Exclusive) {
self.readers.set(-1);
} else {
p.waiters.wake_while(|n| {
if n.meta() == &Access::Shared {
let r = self.readers.get();
if r < isize::MAX {
self.readers.set(self.readers.get() + 1);
return true;
}
}
false
});
}
}
}
unsafe fn release_shared(self: Pin<&Self>) {
let prev = self.readers.get();
debug_assert!(
prev > 0,
"release_shared used with no shared lock outstanding"
);
self.readers.set(prev - 1);
match prev {
1 => {
if self
.project_ref()
.waiters
.wake_one_if(|n| n.meta() == &Access::Exclusive)
{
self.readers.set(-1);
}
}
isize::MAX => {
if self
.project_ref()
.waiters
.wake_one_if(|n| n.meta() == &Access::Shared)
{
self.readers.set(isize::MAX);
}
}
_ => (),
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
enum Access {
Shared,
Exclusive,
}
#[derive(Debug)]
struct SharedInternal<'a> {
lock: Pin<&'a LockImpl>,
}
impl Clone for SharedInternal<'_> {
fn clone(&self) -> Self {
let prev = self.lock.readers.get();
if prev == isize::MAX {
panic!();
}
self.lock.readers.set(prev + 1);
Self { lock: self.lock }
}
}
impl Drop for SharedInternal<'_> {
fn drop(&mut self) {
unsafe {
self.lock.release_shared();
}
}
}
#[derive(Debug)]
#[must_use = "simply dropping SharedGuard unlocks the RwLock immediately"]
pub struct SharedGuard<'a, T: ?Sized> {
_lock: SharedInternal<'a>,
contents: &'a T,
}
impl<'a, T> SharedGuard<'a, T>
where
T: ?Sized,
{
pub fn map<U>(guard: Self, f: impl FnOnce(&T) -> &U) -> SharedGuard<'a, U>
where
U: ?Sized,
{
SharedGuard {
_lock: guard._lock,
contents: f(guard.contents),
}
}
pub fn map_split<U, V>(
guard: Self,
f: impl FnOnce(&T) -> (&U, &V),
) -> (SharedGuard<'a, U>, SharedGuard<'a, V>)
where
U: ?Sized,
V: ?Sized,
{
let (u, v) = f(guard.contents);
(
SharedGuard {
_lock: guard._lock.clone(),
contents: u,
},
SharedGuard {
_lock: guard._lock,
contents: v,
},
)
}
}
impl<T> Deref for SharedGuard<'_, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.contents
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct InUse;
#[derive(Debug)]
#[must_use = "internal implementation issue"]
struct ExclusiveInternal<'a> {
lock: Pin<&'a LockImpl>,
}
impl Clone for ExclusiveInternal<'_> {
fn clone(&self) -> Self {
let prev = self.lock.readers.get();
if prev == isize::MIN {
panic!();
}
self.lock.readers.set(prev - 1);
Self { lock: self.lock }
}
}
impl Drop for ExclusiveInternal<'_> {
fn drop(&mut self) {
unsafe {
self.lock.release_exclusive();
}
}
}
#[derive(Debug)]
#[must_use = "simply dropping ActionPermit unlocks the RwLock immediately"]
pub struct ActionPermit<'a, T> {
_lock: ExclusiveInternal<'a>,
contents: &'a mut T,
}
impl<'a, T> ActionPermit<'a, T> {
pub fn perform<R>(self, action: impl FnOnce(&mut T) -> R) -> R {
let Self { _lock, contents } = self;
action(contents)
}
pub fn inspect(&self) -> &T {
self.contents
}
pub fn map<U>(
self,
projection: impl FnOnce(&mut T) -> &mut U,
) -> ActionPermit<'a, U> {
let Self { _lock, contents } = self;
ActionPermit {
_lock,
contents: projection(contents),
}
}
pub fn map_split<U, V>(
self,
split: impl FnOnce(&mut T) -> (&mut U, &mut V),
) -> (ActionPermit<'a, U>, ActionPermit<'a, V>) {
let Self { _lock, contents } = self;
let (u, v) = split(contents);
(
ActionPermit {
_lock: _lock.clone(),
contents: u,
},
ActionPermit { _lock, contents: v },
)
}
}
impl<T> RwLock<CancelSafe<T>> {
pub fn try_lock_exclusive_assuming_cancel_safe(
self: Pin<&Self>,
) -> Result<ExclusiveGuard<'_, T>, InUse> {
let ActionPermit { _lock, contents } = self.try_lock_exclusive()?;
Ok(ExclusiveGuard {
_lock,
contents: &mut contents.0,
})
}
pub async fn lock_exclusive_assuming_cancel_safe(
self: Pin<&Self>,
) -> ExclusiveGuard<'_, T> {
let ActionPermit {
_lock: lock,
contents,
} = self.lock_exclusive().await;
ExclusiveGuard {
_lock: lock,
contents: &mut contents.0,
}
}
}
#[derive(Debug)]
pub struct ExclusiveGuard<'a, T> {
_lock: ExclusiveInternal<'a>,
contents: &'a mut T,
}
impl<'a, T> ExclusiveGuard<'a, T> {
pub fn map<U>(
self,
projection: impl FnOnce(&mut T) -> &mut U,
) -> ExclusiveGuard<'a, U> {
let Self { _lock, contents } = self;
ExclusiveGuard {
_lock,
contents: projection(contents),
}
}
pub fn map_split<U, V>(
self,
split: impl FnOnce(&mut T) -> (&mut U, &mut V),
) -> (ExclusiveGuard<'a, U>, ExclusiveGuard<'a, V>) {
let Self { _lock, contents } = self;
let (u, v) = split(contents);
(
ExclusiveGuard {
_lock: _lock.clone(),
contents: u,
},
ExclusiveGuard { _lock, contents: v },
)
}
}
impl<T> Deref for ExclusiveGuard<'_, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.contents
}
}
impl<T> DerefMut for ExclusiveGuard<'_, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.contents
}
}
#[macro_export]
macro_rules! create_rwlock {
($var:ident, $contents:expr) => {
let mut $var = core::pin::pin!({
let __contents = $contents;
unsafe {
core::mem::ManuallyDrop::into_inner($crate::RwLock::new(
__contents,
))
}
});
unsafe {
$crate::RwLock::finish_init($var.as_mut());
}
let $var = $var.as_ref();
};
}