#[cfg(not(loom))]
use super::ConstInit;
use super::ScopedRawMutex;
#[must_use = "why create a `DefaultMutex` if you're not going to lock it?"]
pub struct DefaultMutex(Inner);
impl DefaultMutex {
loom_const_fn! {
#[track_caller]
#[inline]
pub fn new() -> Self {
Self(Inner::new())
}
}
}
impl Default for DefaultMutex {
#[track_caller] fn default() -> Self {
Self::new()
}
}
impl core::fmt::Debug for DefaultMutex {
#[inline]
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.0.fmt(f)
}
}
#[cfg(not(loom))]
impl ConstInit for DefaultMutex {
#[allow(clippy::declare_interior_mutable_const)]
const INIT: Self = Self::new();
}
unsafe impl ScopedRawMutex for DefaultMutex {
#[track_caller]
fn with_lock<R>(&self, f: impl FnOnce() -> R) -> R {
self.0.with_lock(f)
}
#[track_caller]
fn try_with_lock<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
self.0.try_with_lock(f)
}
#[inline]
fn is_locked(&self) -> bool {
self.0.is_locked()
}
}
#[cfg(loom)]
use loom_impl::LoomDefaultMutex as Inner;
#[cfg(all(not(loom), feature = "std"))]
use std_impl::StdDefaultMutex as Inner;
#[cfg(all(not(loom), not(feature = "std"), feature = "critical-section"))]
use cs_impl::CriticalSectionDefaultMutex as Inner;
#[cfg(all(not(loom), not(feature = "std"), not(feature = "critical-section")))]
use spin_impl::SpinDefaultMutex as Inner;
#[cfg(loom)]
mod loom_impl {
use super::ScopedRawMutex;
use core::panic::Location;
use tracing::{debug, debug_span};
#[derive(Debug)]
pub(super) struct LoomDefaultMutex(loom::sync::Mutex<()>);
impl LoomDefaultMutex {
#[track_caller]
pub(super) fn new() -> Self {
Self(loom::sync::Mutex::new(()))
}
}
unsafe impl ScopedRawMutex for LoomDefaultMutex {
#[track_caller]
#[inline]
fn with_lock<R>(&self, f: impl FnOnce() -> R) -> R {
let location = Location::caller();
trace!(
target: "maitake_sync::blocking",
%location,
"DefaultMutex::with_lock()",
);
let guard = self.0.lock();
let _span = debug_span!(
target: "maitake_sync::blocking",
"locked",
%location,
)
.entered();
debug!(
target: "maitake_sync::blocking",
"DefaultMutex::with_lock() -> locked",
);
let result = f();
drop(guard);
debug!(
target: "maitake_sync::blocking",
"DefaultMutex::with_lock() -> unlocked",
);
result
}
#[track_caller]
#[inline]
fn try_with_lock<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
let location = Location::caller();
trace!(
target: "maitake_sync::blocking",
%location,
"DefaultMutex::try_with_lock()",
);
match self.0.try_lock() {
Ok(guard) => {
let _span = debug_span!(target: "maitake_sync::blocking", "locked", %location)
.entered();
debug!(
target: "maitake_sync::blocking",
"DefaultMutex::try_with_lock() -> locked",
);
let result = f();
drop(guard);
debug!(
target: "maitake_sync::blocking",
"DefaultMutex::try_with_lock() -> unlocked",
);
Some(result)
}
Err(_) => {
debug!(
target: "maitake_sync::blocking",
%location,
"DefaultMutex::try_with_lock() -> already locked",
);
None
}
}
}
fn is_locked(&self) -> bool {
self.0.try_lock().is_err()
}
}
}
#[cfg(all(not(loom), feature = "std"))]
mod std_impl {
use super::ScopedRawMutex;
#[derive(Debug)]
#[must_use]
pub(super) struct StdDefaultMutex(std::sync::Mutex<()>);
impl StdDefaultMutex {
#[inline]
pub(super) const fn new() -> Self {
Self(std::sync::Mutex::new(()))
}
}
unsafe impl ScopedRawMutex for StdDefaultMutex {
#[track_caller]
#[inline(always)]
fn with_lock<R>(&self, f: impl FnOnce() -> R) -> R {
let _guard = self.0.lock().unwrap();
f()
}
#[track_caller]
#[inline(always)]
fn try_with_lock<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
let _guard = self.0.try_lock().ok()?;
Some(f())
}
fn is_locked(&self) -> bool {
self.0.try_lock().is_ok()
}
}
}
#[cfg(all(not(loom), not(feature = "std"), feature = "critical-section"))]
mod cs_impl {
use super::ScopedRawMutex;
use crate::spin::Spinlock;
#[derive(Debug)]
pub(super) struct CriticalSectionDefaultMutex(Spinlock);
impl CriticalSectionDefaultMutex {
#[inline]
pub(super) const fn new() -> Self {
Self(Spinlock::new())
}
}
unsafe impl ScopedRawMutex for CriticalSectionDefaultMutex {
#[track_caller]
#[inline(always)]
fn with_lock<R>(&self, f: impl FnOnce() -> R) -> R {
critical_section::with(|_cs| self.0.with_lock(f))
}
#[track_caller]
#[inline(always)]
fn try_with_lock<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
critical_section::with(|_cs| self.0.try_with_lock(f))
}
#[inline]
fn is_locked(&self) -> bool {
self.0.is_locked()
}
}
}
#[cfg(all(not(loom), not(feature = "std"), not(feature = "critical-section")))]
mod spin_impl {
use super::ScopedRawMutex;
use crate::spin::Spinlock;
#[derive(Debug)]
pub(super) struct SpinDefaultMutex(Spinlock);
impl SpinDefaultMutex {
#[inline]
pub(super) const fn new() -> Self {
Self(Spinlock::new())
}
}
unsafe impl ScopedRawMutex for SpinDefaultMutex {
#[track_caller]
#[inline(always)]
fn with_lock<R>(&self, f: impl FnOnce() -> R) -> R {
self.0.with_lock(f)
}
#[track_caller]
#[inline(always)]
fn try_with_lock<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
self.0.try_with_lock(f)
}
#[inline(always)]
fn is_locked(&self) -> bool {
self.0.is_locked()
}
}
}
#[cfg(test)]
mod test {
use super::DefaultMutex;
#[test]
fn default_mutex_trait_impls() {
fn assert_scoped_raw_mutex<T: mutex_traits::ScopedRawMutex>() {}
fn assert_send_and_sync<T: Send + Sync>() {}
fn assert_default<T: Default>() {}
fn assert_debug<T: core::fmt::Debug>() {}
assert_scoped_raw_mutex::<DefaultMutex>();
assert_send_and_sync::<DefaultMutex>();
assert_default::<DefaultMutex>();
assert_debug::<DefaultMutex>();
}
#[cfg(not(loom))]
#[test]
fn const_constructor() {
fn assert_const_init<T: mutex_traits::ConstInit>() {}
assert_const_init::<DefaultMutex>();
static _MY_COOL_MUTEX: DefaultMutex = DefaultMutex::new();
}
}