use core::{
cell::UnsafeCell,
fmt,
mem::{ManuallyDrop, drop, forget},
ops::{Deref, DerefMut},
};
struct NonAtomicUsize {
value: UnsafeCell<usize>,
}
#[non_exhaustive]
#[derive(Clone, Copy)]
struct Ordering;
impl Ordering {
#[allow(non_upper_case_globals)]
pub const Relaxed: Ordering = Ordering;
#[allow(non_upper_case_globals)]
pub const Release: Ordering = Ordering;
#[allow(non_upper_case_globals)]
pub const Acquire: Ordering = Ordering;
#[allow(non_upper_case_globals)]
pub const AcqRel: Ordering = Ordering;
#[allow(dead_code)]
#[allow(non_upper_case_globals)]
pub const SeqCst: Ordering = Ordering;
}
impl NonAtomicUsize {
pub const fn new(value: usize) -> NonAtomicUsize {
Self {
value: UnsafeCell::new(value),
}
}
pub fn fetch_add(&self, value: usize, _order: Ordering) -> usize {
self.update_with(|x| x + value)
}
pub fn fetch_sub(&self, value: usize, _order: Ordering) -> usize {
self.update_with(|x| x - value)
}
pub fn fetch_and(&self, value: usize, _order: Ordering) -> usize {
self.update_with(|x| x & value)
}
pub fn fetch_or(&self, value: usize, _order: Ordering) -> usize {
self.update_with(|x| x | value)
}
#[inline]
fn update_with<F>(&self, f: F) -> usize
where
F: Fn(usize) -> usize,
{
let value = self.get();
self.set(f(value));
value
}
#[inline]
fn get(&self) -> usize {
unsafe { *self.value.get() }
}
fn set(&self, value: usize) {
unsafe { *self.value.get() = value }
}
#[inline]
pub fn load(&self, _order: Ordering) -> usize {
self.get()
}
#[inline]
pub fn store(&self, value: usize, _order: Ordering) {
self.set(value);
}
pub fn compare_exchange(
&self,
current: usize,
new: usize,
_success: Ordering,
_failure: Ordering,
) -> Result<usize, usize> {
let value = self.get();
if value == current {
self.set(new);
Ok(new)
} else {
Err(value)
}
}
}
pub struct RwLock<T: ?Sized> {
lock: NonAtomicUsize,
data: UnsafeCell<T>,
}
const READER: usize = 1 << 2;
const UPGRADED: usize = 1 << 1;
const WRITER: usize = 1;
pub struct RwLockReadGuard<'a, T: 'a + ?Sized> {
lock: &'a NonAtomicUsize,
data: *const T,
}
pub struct RwLockWriteGuard<'a, T: 'a + ?Sized> {
inner: &'a RwLock<T>,
data: *mut T,
}
pub struct RwLockUpgradableGuard<'a, T: 'a + ?Sized> {
inner: &'a RwLock<T>,
data: *const T,
}
unsafe impl<T: ?Sized + Send> Send for RwLock<T> {}
unsafe impl<T: ?Sized + Send + Sync> Sync for RwLock<T> {}
unsafe impl<T: ?Sized + Send + Sync> Send for RwLockWriteGuard<'_, T> {}
unsafe impl<T: ?Sized + Send + Sync> Sync for RwLockWriteGuard<'_, T> {}
unsafe impl<T: ?Sized + Sync> Send for RwLockReadGuard<'_, T> {}
unsafe impl<T: ?Sized + Sync> Sync for RwLockReadGuard<'_, T> {}
unsafe impl<T: ?Sized + Send + Sync> Send for RwLockUpgradableGuard<'_, T> {}
unsafe impl<T: ?Sized + Send + Sync> Sync for RwLockUpgradableGuard<'_, T> {}
impl<T> RwLock<T> {
#[inline]
pub const fn new(data: T) -> Self {
RwLock {
lock: NonAtomicUsize::new(0),
data: UnsafeCell::new(data),
}
}
#[inline]
pub fn into_inner(self) -> T {
let RwLock { data, .. } = self;
data.into_inner()
}
#[inline(always)]
pub fn as_mut_ptr(&self) -> *mut T {
self.data.get()
}
}
impl<T: ?Sized> RwLock<T> {
#[inline]
pub fn read(&self) -> RwLockReadGuard<T> {
self.try_read()
.expect("Failed to get read lock, who are you waiting for?")
}
#[inline]
pub fn write(&self) -> RwLockWriteGuard<T> {
self.try_write()
.expect("Failed to get read lock, who are you waiting for?")
}
#[inline]
pub fn upgradeable_read(&self) -> RwLockUpgradableGuard<T> {
self.try_upgradeable_read()
.expect("Failed to get read lock, who are you waiting for?")
}
}
impl<T: ?Sized> RwLock<T> {
fn acquire_reader(&self) -> usize {
const MAX_READERS: usize = usize::MAX / READER / 2;
let value = self.lock.fetch_add(READER, Ordering::Acquire);
if value > MAX_READERS * READER {
self.lock.fetch_sub(READER, Ordering::Relaxed);
panic!("Too many lock readers, cannot safely proceed");
} else {
value
}
}
#[inline]
pub fn try_read(&self) -> Option<RwLockReadGuard<T>> {
let value = self.acquire_reader();
if value & (WRITER | UPGRADED) != 0 {
self.lock.fetch_sub(READER, Ordering::Release);
None
} else {
Some(RwLockReadGuard {
lock: &self.lock,
data: unsafe { &*self.data.get() },
})
}
}
pub fn reader_count(&self) -> usize {
let state = self.lock.load(Ordering::Relaxed);
state / READER + (state & UPGRADED) / UPGRADED
}
pub fn writer_count(&self) -> usize {
(self.lock.load(Ordering::Relaxed) & WRITER) / WRITER
}
#[inline]
pub unsafe fn force_read_decrement(&self) {
debug_assert!(self.lock.load(Ordering::Relaxed) & !WRITER > 0);
self.lock.fetch_sub(READER, Ordering::Release);
}
#[inline]
pub unsafe fn force_write_unlock(&self) {
debug_assert_eq!(self.lock.load(Ordering::Relaxed) & !(WRITER | UPGRADED), 0);
self.lock.fetch_and(!(WRITER | UPGRADED), Ordering::Release);
}
#[inline]
pub fn try_write(&self) -> Option<RwLockWriteGuard<T>> {
if self
.lock
.compare_exchange(0, WRITER, Ordering::Acquire, Ordering::Relaxed)
.is_ok()
{
Some(RwLockWriteGuard {
inner: self,
data: unsafe { &mut *self.data.get() },
})
} else {
None
}
}
#[inline]
pub fn try_write_weak(&self) -> Option<RwLockWriteGuard<T>> {
self.try_write()
}
#[inline]
pub fn try_upgradeable_read(&self) -> Option<RwLockUpgradableGuard<T>> {
if self.lock.fetch_or(UPGRADED, Ordering::Acquire) & (WRITER | UPGRADED) == 0 {
Some(RwLockUpgradableGuard {
inner: self,
data: unsafe { &*self.data.get() },
})
} else {
None
}
}
pub fn get_mut(&mut self) -> &mut T {
unsafe { &mut *self.data.get() }
}
}
impl<T: ?Sized + fmt::Debug> fmt::Debug for RwLock<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.try_read() {
Some(guard) => write!(f, "RwLock {{ data: ")
.and_then(|()| (*guard).fmt(f))
.and_then(|()| write!(f, " }}")),
None => write!(f, "RwLock {{ <locked> }}"),
}
}
}
impl<T: Default> Default for RwLock<T> {
fn default() -> Self {
Self::new(Default::default())
}
}
impl<T> From<T> for RwLock<T> {
fn from(data: T) -> Self {
Self::new(data)
}
}
impl<'rwlock, T: ?Sized> RwLockReadGuard<'rwlock, T> {
#[inline]
pub fn leak(this: Self) -> &'rwlock T {
let this = ManuallyDrop::new(this);
unsafe { &*this.data }
}
}
impl<T: ?Sized + fmt::Debug> fmt::Debug for RwLockReadGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: ?Sized + fmt::Display> fmt::Display for RwLockReadGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<'rwlock, T: ?Sized + fmt::Debug> RwLockUpgradableGuard<'rwlock, T> {
#[inline]
pub fn upgrade(self) -> RwLockWriteGuard<'rwlock, T> {
self.try_upgrade()
.expect("Failed to get read lock, who are you waiting for?")
}
}
impl<'rwlock, T: ?Sized> RwLockUpgradableGuard<'rwlock, T> {
#[inline]
pub fn try_upgrade(self) -> Result<RwLockWriteGuard<'rwlock, T>, Self> {
if self
.inner
.lock
.compare_exchange(UPGRADED, WRITER, Ordering::Acquire, Ordering::Relaxed)
.is_ok()
{
let inner = self.inner;
forget(self);
Ok(RwLockWriteGuard {
inner,
data: unsafe { &mut *inner.data.get() },
})
} else {
Err(self)
}
}
#[inline]
pub fn try_upgrade_weak(self) -> Result<RwLockWriteGuard<'rwlock, T>, Self> {
self.try_upgrade()
}
#[inline]
pub fn downgrade(self) -> RwLockReadGuard<'rwlock, T> {
self.inner.acquire_reader();
let inner = self.inner;
drop(self);
RwLockReadGuard {
lock: &inner.lock,
data: unsafe { &*inner.data.get() },
}
}
#[inline]
pub fn leak(this: Self) -> &'rwlock T {
let this = ManuallyDrop::new(this);
unsafe { &*this.data }
}
}
impl<T: ?Sized + fmt::Debug> fmt::Debug for RwLockUpgradableGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: ?Sized + fmt::Display> fmt::Display for RwLockUpgradableGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> {
#[inline]
pub fn downgrade(self) -> RwLockReadGuard<'rwlock, T> {
self.inner.acquire_reader();
let inner = self.inner;
drop(self);
RwLockReadGuard {
lock: &inner.lock,
data: unsafe { &*inner.data.get() },
}
}
#[inline]
pub fn downgrade_to_upgradeable(self) -> RwLockUpgradableGuard<'rwlock, T> {
debug_assert_eq!(
self.inner.lock.load(Ordering::Acquire) & (WRITER | UPGRADED),
WRITER
);
self.inner.lock.store(UPGRADED, Ordering::Release);
let inner = self.inner;
forget(self);
RwLockUpgradableGuard {
inner,
data: unsafe { &*inner.data.get() },
}
}
#[inline]
pub fn leak(this: Self) -> &'rwlock mut T {
let mut this = ManuallyDrop::new(this);
unsafe { &mut *this.data }
}
}
impl<T: ?Sized + fmt::Debug> fmt::Debug for RwLockWriteGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: ?Sized + fmt::Display> fmt::Display for RwLockWriteGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<T: ?Sized> Deref for RwLockReadGuard<'_, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.data }
}
}
impl<T: ?Sized> Deref for RwLockUpgradableGuard<'_, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.data }
}
}
impl<T: ?Sized> Deref for RwLockWriteGuard<'_, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.data }
}
}
impl<T: ?Sized> DerefMut for RwLockWriteGuard<'_, T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.data }
}
}
impl<T: ?Sized> Drop for RwLockReadGuard<'_, T> {
fn drop(&mut self) {
debug_assert!(self.lock.load(Ordering::Relaxed) & !(WRITER | UPGRADED) > 0);
self.lock.fetch_sub(READER, Ordering::Release);
}
}
impl<T: ?Sized> Drop for RwLockUpgradableGuard<'_, T> {
fn drop(&mut self) {
debug_assert_eq!(
self.inner.lock.load(Ordering::Relaxed) & (WRITER | UPGRADED),
UPGRADED
);
self.inner.lock.fetch_sub(UPGRADED, Ordering::AcqRel);
}
}
impl<T: ?Sized> Drop for RwLockWriteGuard<'_, T> {
fn drop(&mut self) {
debug_assert_eq!(self.inner.lock.load(Ordering::Relaxed) & WRITER, WRITER);
self.inner
.lock
.fetch_and(!(WRITER | UPGRADED), Ordering::Release);
}
}
#[cfg(feature = "lock_api")]
unsafe impl lock_api_crate::RawRwLock for RwLock<()> {
type GuardMarker = lock_api_crate::GuardSend;
#[allow(clippy::declare_interior_mutable_const)]
const INIT: Self = Self::new(());
#[inline(always)]
fn lock_exclusive(&self) {
core::mem::forget(self.write());
}
#[inline(always)]
fn try_lock_exclusive(&self) -> bool {
self.try_write().map(core::mem::forget).is_some()
}
#[inline(always)]
unsafe fn unlock_exclusive(&self) {
drop(RwLockWriteGuard {
inner: self,
data: &mut (),
});
}
#[inline(always)]
fn lock_shared(&self) {
core::mem::forget(self.read());
}
#[inline(always)]
fn try_lock_shared(&self) -> bool {
self.try_read().map(core::mem::forget).is_some()
}
#[inline(always)]
unsafe fn unlock_shared(&self) {
drop(RwLockReadGuard {
lock: &self.lock,
data: &(),
});
}
#[inline(always)]
fn is_locked(&self) -> bool {
self.lock.load(Ordering::Relaxed) != 0
}
}
#[cfg(feature = "lock_api")]
unsafe impl lock_api_crate::RawRwLockUpgrade for RwLock<()> {
#[inline(always)]
fn lock_upgradable(&self) {
core::mem::forget(self.upgradeable_read());
}
#[inline(always)]
fn try_lock_upgradable(&self) -> bool {
self.try_upgradeable_read().map(core::mem::forget).is_some()
}
#[inline(always)]
unsafe fn unlock_upgradable(&self) {
drop(RwLockUpgradableGuard {
inner: self,
data: &(),
});
}
#[inline(always)]
unsafe fn upgrade(&self) {
let tmp_guard = RwLockUpgradableGuard {
inner: self,
data: &(),
};
core::mem::forget(tmp_guard.upgrade());
}
#[inline(always)]
unsafe fn try_upgrade(&self) -> bool {
let tmp_guard = RwLockUpgradableGuard {
inner: self,
data: &(),
};
tmp_guard.try_upgrade().map(core::mem::forget).is_ok()
}
}
#[cfg(feature = "lock_api")]
unsafe impl lock_api_crate::RawRwLockDowngrade for RwLock<()> {
unsafe fn downgrade(&self) {
let tmp_guard = RwLockWriteGuard {
inner: self,
data: &mut (),
};
core::mem::forget(tmp_guard.downgrade());
}
}
#[cfg(feature = "lock_api")]
unsafe impl lock_api_crate::RawRwLockUpgradeDowngrade for RwLock<()> {
unsafe fn downgrade_upgradable(&self) {
let tmp_guard = RwLockUpgradableGuard {
inner: self,
data: &(),
};
core::mem::forget(tmp_guard.downgrade());
}
unsafe fn downgrade_to_upgradable(&self) {
let tmp_guard = RwLockWriteGuard {
inner: self,
data: &mut (),
};
core::mem::forget(tmp_guard.downgrade_to_upgradeable());
}
}
#[cfg(test)]
mod tests {
use std::prelude::v1::*;
use std::mem::forget;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
type RwLock<T> = super::RwLock<T>;
#[derive(Eq, PartialEq, Debug)]
struct NonCopy(i32);
#[test]
fn smoke() {
let l = RwLock::new(());
drop(l.read());
drop(l.write());
drop((l.read(), l.read()));
drop(l.write());
}
#[test]
fn test_rw_access_in_unwind() {
let arc = Arc::new(RwLock::new(1));
let arc2 = arc.clone();
let _ = thread::spawn(move || {
struct Unwinder {
i: Arc<RwLock<isize>>,
}
impl Drop for Unwinder {
fn drop(&mut self) {
let mut lock = self.i.write();
*lock += 1;
}
}
let _u = Unwinder { i: arc2 };
panic!();
})
.join();
let lock = arc.read();
assert_eq!(*lock, 2);
}
#[test]
fn test_rwlock_unsized() {
let rw: &RwLock<[i32]> = &RwLock::new([1, 2, 3]);
{
let b = &mut *rw.write();
b[0] = 4;
b[2] = 5;
}
let comp: &[i32] = &[4, 2, 5];
assert_eq!(&*rw.read(), comp);
}
#[test]
fn test_rwlock_try_write() {
use std::mem::drop;
let lock = RwLock::new(0isize);
let read_guard = lock.read();
let write_result = lock.try_write();
match write_result {
None => (),
Some(_) => panic!("try_write should not succeed while read_guard is in scope"),
}
drop(read_guard);
}
#[test]
fn test_rw_try_read() {
let m = RwLock::new(0);
forget(m.write());
assert!(m.try_read().is_none());
}
#[test]
fn test_into_inner() {
let m = RwLock::new(NonCopy(10));
assert_eq!(m.into_inner(), NonCopy(10));
}
#[test]
fn test_into_inner_drop() {
struct Foo(Arc<AtomicUsize>);
impl Drop for Foo {
fn drop(&mut self) {
self.0.fetch_add(1, Ordering::SeqCst);
}
}
let num_drops = Arc::new(AtomicUsize::new(0));
let m = RwLock::new(Foo(num_drops.clone()));
assert_eq!(num_drops.load(Ordering::SeqCst), 0);
{
let _inner = m.into_inner();
assert_eq!(num_drops.load(Ordering::SeqCst), 0);
}
assert_eq!(num_drops.load(Ordering::SeqCst), 1);
}
#[test]
fn test_force_read_decrement() {
let m = RwLock::new(());
forget(m.read());
forget(m.read());
forget(m.read());
assert!(m.try_write().is_none());
unsafe {
m.force_read_decrement();
m.force_read_decrement();
}
assert!(m.try_write().is_none());
unsafe {
m.force_read_decrement();
}
assert!(m.try_write().is_some());
}
#[test]
fn test_force_write_unlock() {
let m = RwLock::new(());
forget(m.write());
assert!(m.try_read().is_none());
unsafe {
m.force_write_unlock();
}
assert!(m.try_read().is_some());
}
#[test]
fn test_upgrade_downgrade() {
let m = RwLock::new(());
{
let _r = m.read();
let upg = m.try_upgradeable_read().unwrap();
assert!(m.try_read().is_none());
assert!(m.try_write().is_none());
assert!(upg.try_upgrade().is_err());
}
{
let w = m.write();
assert!(m.try_upgradeable_read().is_none());
let _r = w.downgrade();
assert!(m.try_upgradeable_read().is_some());
assert!(m.try_read().is_some());
assert!(m.try_write().is_none());
}
{
let _u = m.upgradeable_read();
assert!(m.try_upgradeable_read().is_none());
}
assert!(m.try_upgradeable_read().unwrap().try_upgrade().is_ok());
}
}