#![allow(clippy::new_without_default)]
#![allow(clippy::declare_interior_mutable_const)]
use core::marker::PhantomData;
use core::sync::atomic::{AtomicBool, Ordering};
use mutex_traits::{ConstInit, ScopedRawMutex};
pub use mutex_traits as traits;
#[cfg(feature = "impl-critical-section")]
pub mod cs {
use super::*;
#[cfg_attr(feature = "fmt", derive(Debug))]
pub struct CriticalSectionRawMutex {
taken: AtomicBool,
}
unsafe impl Send for CriticalSectionRawMutex {}
unsafe impl Sync for CriticalSectionRawMutex {}
impl CriticalSectionRawMutex {
pub const fn new() -> Self {
Self {
taken: AtomicBool::new(false),
}
}
}
impl ConstInit for CriticalSectionRawMutex {
const INIT: Self = Self::new();
}
unsafe impl ScopedRawMutex for CriticalSectionRawMutex {
#[inline]
fn try_with_lock<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
critical_section::with(|_| {
if self.taken.load(Ordering::Relaxed) {
return None;
}
self.taken.store(true, Ordering::Relaxed);
let ret = f();
self.taken.store(false, Ordering::Relaxed);
Some(ret)
})
}
#[inline]
fn with_lock<R>(&self, f: impl FnOnce() -> R) -> R {
self.try_with_lock(f).expect("Deadlocked")
}
fn is_locked(&self) -> bool {
self.taken.load(Ordering::Relaxed)
}
}
}
pub mod local {
use super::*;
#[cfg_attr(feature = "fmt", derive(Debug))]
pub struct LocalRawMutex {
taken: AtomicBool,
_phantom: PhantomData<*mut ()>,
}
impl LocalRawMutex {
pub const fn new() -> Self {
Self {
taken: AtomicBool::new(false),
_phantom: PhantomData,
}
}
}
unsafe impl Send for LocalRawMutex {}
impl ConstInit for LocalRawMutex {
const INIT: Self = Self::new();
}
unsafe impl ScopedRawMutex for LocalRawMutex {
#[inline]
fn try_with_lock<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
if self.taken.load(Ordering::Relaxed) {
return None;
}
self.taken.store(true, Ordering::Relaxed);
let ret = f();
self.taken.store(false, Ordering::Relaxed);
Some(ret)
}
#[inline]
fn with_lock<R>(&self, f: impl FnOnce() -> R) -> R {
self.try_with_lock(f).expect("Deadlocked")
}
fn is_locked(&self) -> bool {
self.taken.load(Ordering::Relaxed)
}
}
}
#[cfg(all(feature = "impl-unsafe-cortex-m-single-core", cortex_m))]
pub mod single_core_thread_mode {
use super::*;
#[cfg_attr(feature = "fmt", derive(Debug))]
pub struct ThreadModeRawMutex {
taken: AtomicBool,
}
unsafe impl Send for ThreadModeRawMutex {}
unsafe impl Sync for ThreadModeRawMutex {}
impl ThreadModeRawMutex {
pub const fn new() -> Self {
Self {
taken: AtomicBool::new(false),
}
}
}
impl ConstInit for ThreadModeRawMutex {
const INIT: Self = Self::new();
}
unsafe impl ScopedRawMutex for ThreadModeRawMutex {
#[inline]
fn try_with_lock<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
if !in_thread_mode() {
return None;
}
assert!(!self.taken.load(Ordering::Relaxed));
self.taken.store(true, Ordering::Relaxed);
let ret = f();
self.taken.store(false, Ordering::Relaxed);
Some(ret)
}
#[inline]
fn with_lock<R>(&self, f: impl FnOnce() -> R) -> R {
self.try_with_lock(f)
.expect("Deadlocked or attempted to access outside of thread mode")
}
fn is_locked(&self) -> bool {
self.taken.load(Ordering::Relaxed)
}
}
impl Drop for ThreadModeRawMutex {
fn drop(&mut self) {
assert!(
in_thread_mode(),
"ThreadModeMutex can only be dropped from thread mode."
);
}
}
fn in_thread_mode() -> bool {
return unsafe { (0xE000ED04 as *const u32).read_volatile() } & 0x1FF == 0;
}
}
#[cfg(feature = "impl-lock_api-0_4")]
pub mod lock_api_0_4 {
use ::lock_api_0_4 as lock_api;
use mutex_traits::{ConstInit, RawMutex};
#[cfg_attr(feature = "fmt", derive(Debug))]
pub struct LockApiRawMutex<T>(T);
impl<T: lock_api::RawMutex> ConstInit for LockApiRawMutex<T> {
const INIT: Self = LockApiRawMutex(T::INIT);
}
unsafe impl<T: lock_api::RawMutex> RawMutex for LockApiRawMutex<T> {
type GuardMarker = <T as lock_api::RawMutex>::GuardMarker;
#[inline]
#[track_caller]
fn lock(&self) {
self.0.lock();
}
#[inline]
#[track_caller]
fn try_lock(&self) -> bool {
self.0.try_lock()
}
#[inline]
#[track_caller]
unsafe fn unlock(&self) {
self.0.unlock()
}
#[inline]
#[track_caller]
fn is_locked(&self) -> bool {
self.0.is_locked()
}
}
}