pub mod once;
pub use self::once::{InitOnce, Lazy};
use crate::{
blocking,
loom::sync::atomic::{AtomicBool, AtomicUsize, Ordering::*},
util::{fmt, Backoff},
};
use blocking::{RawMutex, RawRwLock};
#[derive(Debug)]
pub struct Spinlock {
locked: AtomicBool,
}
pub struct RwSpinlock {
state: AtomicUsize,
}
impl Spinlock {
loom_const_fn! {
pub fn new() -> Self {
Self { locked: AtomicBool::new(false) }
}
}
#[inline]
#[must_use]
fn is_locked(&self) -> bool {
self.locked.load(Relaxed)
}
}
impl Default for Spinlock {
fn default() -> Self {
Self::new()
}
}
unsafe impl RawMutex for Spinlock {
type GuardMarker = ();
#[cfg_attr(test, track_caller)]
fn lock(&self) {
let mut boff = Backoff::default();
while test_dbg!(self
.locked
.compare_exchange(false, true, Acquire, Acquire)
.is_err())
{
while test_dbg!(self.is_locked()) {
boff.spin();
}
}
}
#[cfg_attr(test, track_caller)]
#[inline]
fn try_lock(&self) -> bool {
test_dbg!(self
.locked
.compare_exchange(false, true, Acquire, Acquire)
.is_ok())
}
#[cfg_attr(test, track_caller)]
#[inline]
unsafe fn unlock(&self) {
test_dbg!(self.locked.store(false, Release));
}
#[inline]
fn is_locked(&self) -> bool {
Spinlock::is_locked(self)
}
}
#[cfg(not(loom))]
impl blocking::ConstInit for Spinlock {
#[allow(clippy::declare_interior_mutable_const)]
const INIT: Self = Spinlock::new();
}
const UNLOCKED: usize = 0;
const WRITER: usize = 1 << 0;
const READER: usize = 1 << 1;
impl RwSpinlock {
loom_const_fn! {
pub(crate) fn new() -> Self {
Self {
state: AtomicUsize::new(UNLOCKED),
}
}
}
pub(crate) fn reader_count(&self) -> usize {
self.state.load(Relaxed) >> 1
}
}
unsafe impl RawRwLock for RwSpinlock {
type GuardMarker = ();
#[cfg_attr(test, track_caller)]
fn lock_shared(&self) {
let mut boff = Backoff::new();
while !self.try_lock_shared() {
boff.spin();
}
}
#[cfg_attr(test, track_caller)]
fn try_lock_shared(&self) -> bool {
let state = test_dbg!(self.state.fetch_add(READER, Acquire));
assert!(
state < usize::MAX - (READER * 2),
"read lock counter overflow! this is very bad"
);
if state & WRITER == 1 {
test_dbg!(self.state.fetch_sub(READER, Release));
false
} else {
true
}
}
#[cfg_attr(test, track_caller)]
#[inline]
unsafe fn unlock_shared(&self) {
let _val = test_dbg!(self.state.fetch_sub(READER, Release));
debug_assert_eq!(
_val & WRITER,
0,
"tried to drop a read guard while write locked, something is Very Wrong!"
)
}
#[cfg_attr(test, track_caller)]
fn lock_exclusive(&self) {
let mut backoff = Backoff::new();
while test_dbg!(self
.state
.compare_exchange_weak(UNLOCKED, WRITER, Acquire, Relaxed))
.is_err()
{
test_dbg!(backoff.spin());
}
}
#[cfg_attr(test, track_caller)]
#[inline]
fn try_lock_exclusive(&self) -> bool {
test_dbg!(self
.state
.compare_exchange(UNLOCKED, WRITER, Acquire, Relaxed))
.is_ok()
}
#[cfg_attr(test, track_caller)]
#[inline]
unsafe fn unlock_exclusive(&self) {
let _val = test_dbg!(self.state.swap(UNLOCKED, Release));
}
#[inline]
fn is_locked(&self) -> bool {
self.state.load(Relaxed) & (WRITER | READER) != 0
}
#[inline]
fn is_locked_exclusive(&self) -> bool {
self.state.load(Relaxed) & WRITER == 1
}
}
#[cfg(not(loom))]
impl blocking::ConstInit for RwSpinlock {
#[allow(clippy::declare_interior_mutable_const)]
const INIT: Self = RwSpinlock::new();
}
impl fmt::Debug for RwSpinlock {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let state = &self.state.load(Relaxed);
f.debug_struct("RwSpinlock")
.field("readers", &(state >> 1))
.field("writer", &(state & WRITER))
.finish()
}
}