use core::sync::atomic::{AtomicIsize, Ordering};
pub type Lock = AtomicIsize;
pub const fn new_lock() -> Lock {
Lock::new(0)
}
const REF_LIMIT_FLAG: isize = 1 + (isize::MAX >> 1);
pub(crate) static DUMMY_LOCK: Lock = new_lock();
fn is_dummy(lock: &Lock) -> bool {
core::ptr::eq(lock, &DUMMY_LOCK)
}
#[inline(always)]
pub fn is_reading(v: isize) -> bool {
v > 0
}
#[inline(always)]
pub fn is_writing(v: isize) -> bool {
v < 0
}
#[inline(always)]
pub fn is_borrowed(v: isize) -> bool {
v != 0
}
#[inline(always)]
pub fn check_read_refs_count(v: isize) -> bool {
v & REF_LIMIT_FLAG == REF_LIMIT_FLAG
}
#[inline(always)]
pub fn check_write_refs_count(v: isize) -> bool {
v & REF_LIMIT_FLAG == 0
}
#[inline]
pub fn try_borrow(lock: &Lock) -> bool {
debug_assert!(!is_dummy(lock), "dummy lock cannot be used from outside");
loop {
let val = lock.load(Ordering::Relaxed);
if is_writing(val) {
return false;
}
if check_read_refs_count(val) {
too_many_refs();
}
let ok = lock
.compare_exchange_weak(val, val + 1, Ordering::Acquire, Ordering::Relaxed)
.is_ok();
if ok {
return true;
}
}
}
#[inline]
pub fn clone_borrow(lock: &Lock) {
if is_dummy(lock) {
return;
}
let old = lock.fetch_add(1, Ordering::Relaxed);
if check_read_refs_count(old) {
lock.fetch_sub(1, Ordering::Relaxed);
too_many_refs();
}
}
#[inline]
pub fn release_borrow(lock: &Lock) {
if is_dummy(lock) {
return;
}
debug_assert!(is_dummy(lock) || is_reading(lock.load(Ordering::Relaxed)));
lock.fetch_sub(1, Ordering::Release);
}
#[inline]
pub fn try_borrow_mut(lock: &Lock) -> bool {
debug_assert!(!is_dummy(lock), "dummy lock cannot be used from outside");
let ok = lock
.compare_exchange(0, -1, Ordering::Acquire, Ordering::Relaxed)
.is_ok();
ok
}
#[inline]
pub fn clone_borrow_mut(lock: &Lock) {
if is_dummy(lock) {
return;
}
let old = lock.fetch_sub(1, Ordering::Relaxed);
if check_write_refs_count(old) {
lock.fetch_add(1, Ordering::Relaxed);
too_many_refs();
}
}
#[inline]
pub fn release_borrow_mut(lock: &Lock) {
if is_dummy(lock) {
return;
}
debug_assert!(is_dummy(lock) || is_writing(lock.load(Ordering::Relaxed)));
lock.fetch_add(1, Ordering::Release);
}
#[inline(never)]
#[track_caller]
#[cold]
const fn too_many_refs() -> ! {
panic!("Too many `Ref` instances created");
}
#[repr(transparent)]
pub struct AtomicBorrow<'a> {
lock: &'a Lock,
}
impl<'a> AtomicBorrow<'a> {
#[inline(always)]
pub fn try_new(lock: &'a Lock) -> Option<Self> {
if try_borrow(lock) {
Some(AtomicBorrow { lock })
} else {
None
}
}
#[inline(always)]
pub unsafe fn restore_leaked(lock: &'a Lock) -> Self {
AtomicBorrow { lock }
}
#[inline(always)]
pub fn dummy() -> Self {
AtomicBorrow { lock: &DUMMY_LOCK }
}
#[inline(always)]
pub fn clone(&self) -> AtomicBorrow<'a> {
clone_borrow(self.lock);
AtomicBorrow { lock: self.lock }
}
}
impl<'a> Drop for AtomicBorrow<'a> {
#[inline(always)]
fn drop(&mut self) {
release_borrow(self.lock)
}
}
#[repr(transparent)]
pub struct AtomicBorrowMut<'a> {
lock: &'a Lock,
}
impl<'a> AtomicBorrowMut<'a> {
#[inline(always)]
pub fn try_new(lock: &'a Lock) -> Option<Self> {
if try_borrow_mut(lock) {
Some(AtomicBorrowMut { lock })
} else {
None
}
}
#[inline(always)]
pub unsafe fn restore_leaked(lock: &'a Lock) -> Self {
AtomicBorrowMut { lock }
}
#[inline(always)]
pub fn dummy() -> Self {
AtomicBorrowMut { lock: &DUMMY_LOCK }
}
#[inline(always)]
pub fn clone(&self) -> AtomicBorrowMut<'a> {
clone_borrow_mut(self.lock);
AtomicBorrowMut { lock: self.lock }
}
}
impl<'a> Drop for AtomicBorrowMut<'a> {
#[inline(always)]
fn drop(&mut self) {
release_borrow_mut(self.lock);
}
}