use crate::sync::{RawMutex, WatchGuardMut, WatchGuardRef};
use crossbeam_utils::CachePadded;
use std::cell::UnsafeCell;
use std::fmt::{Debug, Formatter};
use std::sync::atomic;
use std::sync::atomic::{AtomicUsize, Ordering};
pub(super) struct InnerRwLock<T> {
mutex: RawMutex,
ref_count: CachePadded<AtomicUsize>,
data: UnsafeCell<T>,
}
impl<T> InnerRwLock<T> {
fn new(val: T) -> Self {
Self {
mutex: RawMutex::new(),
ref_count: CachePadded::new(AtomicUsize::new(1)),
data: UnsafeCell::new(val),
}
}
}
#[repr(transparent)]
pub struct RwLock<T> {
ptr: *const InnerRwLock<T>,
}
unsafe impl<T: Send> Send for RwLock<T> {}
unsafe impl<T: Send + Sync> Sync for RwLock<T> {}
impl<T> RwLock<T> {
pub fn new(val: T) -> Self {
let ptr = Box::into_raw(Box::new(InnerRwLock::new(val)));
Self { ptr }
}
#[inline(always)]
fn inner(&self) -> &InnerRwLock<T> {
unsafe { &*self.ptr }
}
pub fn lock_exclusive(&self) -> WatchGuardMut<'_, T> {
let inner = self.inner();
inner.mutex.lock_exclusive();
WatchGuardMut::new(inner.data.get(), &inner.mutex)
}
pub fn with_exclusive<R>(&self, f: impl FnOnce(&mut T) -> R) -> R {
let mut guard = self.lock_exclusive();
f(&mut guard)
}
pub fn try_lock(&self) -> Option<WatchGuardMut<'_, T>> {
let inner = self.inner();
if inner.mutex.try_lock_exclusive() {
Some(WatchGuardMut::new(inner.data.get(), &inner.mutex))
} else {
None
}
}
pub fn try_with_exclusive<R>(&self, f: impl FnOnce(&mut T) -> R) -> Option<R> {
self.try_lock().map(|mut guard| f(&mut guard))
}
pub fn lock_shared(&self) -> WatchGuardRef<'_, T> {
let inner = self.inner();
inner.mutex.lock_shared();
WatchGuardRef::new(unsafe { &*inner.data.get() }, &inner.mutex)
}
pub fn with_shared<R>(&self, f: impl FnOnce(&T) -> R) -> R {
let guard = self.lock_shared();
f(&guard)
}
pub fn try_lock_shared(&self) -> Option<WatchGuardRef<'_, T>> {
let inner = self.inner();
if inner.mutex.try_lock_shared() {
Some(WatchGuardRef::new(
unsafe { &*inner.data.get() },
&inner.mutex,
))
} else {
None
}
}
pub fn try_with_shared<R>(&self, f: impl FnOnce(&T) -> R) -> Option<R> {
self.try_lock_shared().map(|guard| f(&guard))
}
pub fn is_locked(&self) -> bool {
self.inner().mutex.is_locked()
}
pub fn is_locked_exclusive(&self) -> bool {
self.inner().mutex.is_locked_exclusive()
}
pub fn is_locked_shared(&self) -> bool {
self.inner().mutex.is_locked_shared()
}
}
impl<T> Clone for RwLock<T> {
fn clone(&self) -> Self {
self.inner().ref_count.fetch_add(1, Ordering::Relaxed);
RwLock { ptr: self.ptr }
}
}
impl<T> Drop for RwLock<T> {
fn drop(&mut self) {
if self.inner().ref_count.fetch_sub(1, Ordering::Release) == 1 {
atomic::fence(Ordering::Acquire);
let ptr = self.ptr as *mut InnerRwLock<T>;
unsafe { drop(Box::from_raw(ptr)) };
}
}
}
impl<T: Default> Default for RwLock<T> {
fn default() -> Self {
Self::new(T::default())
}
}
impl<T: Debug> Debug for RwLock<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let locked = self.try_lock();
f.debug_struct("MutexCell").field("data", &locked).finish()
}
}