#![deny(clippy::missing_docs_in_private_items)]
#![deny(missing_docs)]
use std::sync::atomic::{fence, AtomicU32, Ordering};
use super::err::{Error, MMFResult};
pub trait MMFLock {
fn lock_read(&self) -> MMFResult<()>;
fn unlock_read(&self) -> MMFResult<()>;
fn lock_write(&self) -> MMFResult<()>;
fn unlock_write(&self) -> MMFResult<()>;
fn initialized(&self) -> bool;
fn spin_and_lock_read(&self, max_tries: usize) -> MMFResult<()>
where
Self: Sized;
fn spin_and_lock_write(&self, max_tries: usize) -> MMFResult<()>
where
Self: Sized;
unsafe fn from_existing(pointer: *mut u8) -> Self
where
Self: Sized;
unsafe fn from_raw(pointer: *mut u8) -> Self
where
Self: Sized;
fn set_init(&self);
fn initialize(self) -> Self
where
Self: Sized;
}
#[cfg(feature = "impl_lock")]
#[derive(Debug)]
pub struct RWLock<'a> {
chunk: &'a AtomicU32,
}
#[cfg(feature = "impl_lock")]
impl RWLock<'_> {
pub const INITIALIZE_MASK: u32 = 255 << 24;
pub const WRITE_LOCK_MASK: u32 = 0b1 << 31;
pub const READ_LOCK_MASK: u32 = !Self::INITIALIZE_MASK;
fn initialized(chunk: u32) -> bool {
(chunk & Self::INITIALIZE_MASK) < Self::INITIALIZE_MASK
}
fn readlocked(chunk: u32) -> bool {
(chunk & Self::READ_LOCK_MASK) > 0
}
fn writelocked(chunk: u32) -> bool {
(chunk & Self::WRITE_LOCK_MASK) == Self::WRITE_LOCK_MASK
}
}
#[cfg(feature = "impl_lock")]
impl MMFLock for RWLock<'_> {
unsafe fn from_existing(pointer: *mut u8) -> Self {
if pointer.is_null() {
panic!("Never, ever pass a null pointer into a lock!")
}
Self { chunk: AtomicU32::from_ptr(pointer.cast()) }
}
unsafe fn from_raw(pointer: *mut u8) -> Self {
if pointer.is_null() {
panic!("Never, ever pass a null pointer into a lock!")
}
let lock = Self { chunk: AtomicU32::from_ptr(pointer.cast()) };
lock.chunk.store(Self::INITIALIZE_MASK, Ordering::Release);
lock
}
fn set_init(&self) {
_ = self.chunk.compare_exchange(Self::INITIALIZE_MASK, 0, Ordering::Release, Ordering::Relaxed);
}
fn initialize(self) -> Self {
self.set_init();
self
}
fn initialized(&self) -> bool {
Self::initialized(self.chunk.load(Ordering::Acquire))
}
fn lock_read(&self) -> MMFResult<()> {
loop {
let chunk = self.chunk.load(Ordering::Acquire);
if !Self::initialized(chunk) {
return Err(Error::Uninitialized);
}
if Self::writelocked(chunk) {
return Err(Error::WriteLocked);
}
if (chunk & Self::READ_LOCK_MASK) == Self::READ_LOCK_MASK {
return Err(Error::MaxReaders);
}
if self.chunk.compare_exchange_weak(chunk, chunk + 1, Ordering::AcqRel, Ordering::Acquire).is_ok() {
break;
}
}
fence(Ordering::SeqCst);
Ok(())
}
fn unlock_read(&self) -> MMFResult<()> {
loop {
let chunk = self.chunk.load(Ordering::Acquire);
if !Self::initialized(chunk) {
return Err(Error::Uninitialized);
}
if Self::writelocked(chunk) {
return Err(Error::WriteLocked);
}
if chunk == 0 {
return Err(Error::GeneralFailure);
}
if self.chunk.compare_exchange_weak(chunk, chunk - 1, Ordering::AcqRel, Ordering::Acquire).is_ok() {
break;
}
}
fence(Ordering::SeqCst);
Ok(())
}
fn lock_write(&self) -> MMFResult<()> {
loop {
let chunk = self.chunk.load(Ordering::Acquire);
if !Self::initialized(chunk) {
return Err(Error::Uninitialized);
}
if Self::writelocked(chunk) {
return Err(Error::WriteLocked);
}
if Self::readlocked(chunk) {
return Err(Error::ReadLocked);
}
if self
.chunk
.compare_exchange_weak(chunk, chunk | Self::WRITE_LOCK_MASK, Ordering::AcqRel, Ordering::Acquire)
.is_ok()
{
break;
}
}
fence(Ordering::SeqCst);
Ok(())
}
fn unlock_write(&self) -> MMFResult<()> {
loop {
let chunk = self.chunk.load(Ordering::Acquire);
if !Self::initialized(chunk) {
return Err(Error::Uninitialized);
}
if !Self::writelocked(chunk) {
return Err(Error::WriteLocked);
}
if Self::readlocked(chunk) {
return Err(Error::ReadLocked);
}
if self
.chunk
.compare_exchange_weak(chunk, chunk ^ Self::WRITE_LOCK_MASK, Ordering::AcqRel, Ordering::Acquire)
.is_ok()
{
break;
}
}
fence(Ordering::SeqCst);
Ok(())
}
fn spin_and_lock_read(&self, max_tries: usize) -> MMFResult<()> {
let mut tries = 0;
while match self.lock_read() {
Ok(_) => false,
Err(Error::WriteLocked) => true,
err => return err,
} {
tries += 1;
if tries >= max_tries {
return Err(Error::MaxTriesReached);
}
}
Ok(())
}
fn spin_and_lock_write(&self, max_tries: usize) -> MMFResult<()> {
let mut tries = 0;
while match self.lock_write() {
Ok(_) => false,
Err(Error::WriteLocked | Error::ReadLocked) => true,
err => return err,
} {
tries += 1;
if tries >= max_tries {
return Err(Error::MaxTriesReached);
}
}
Ok(())
}
}