use core::{
mem::ManuallyDrop,
sync::atomic::{self, AtomicU64, Ordering},
};
use super::utils::{Backoff, sc_fence};
#[cfg(portable_atomic_unsafe_assume_privileged)]
use crate::imp::interrupt::arch as interrupt;
use crate::utils::unlikely;
pub(super) type State = u64;
const LOCKED: State = 1;
pub(super) struct SeqLock {
state: AtomicU64,
}
impl SeqLock {
#[inline]
pub(super) const fn new() -> Self {
Self { state: AtomicU64::new(0) }
}
#[inline]
pub(super) fn optimistic_read(&self, order: Ordering) -> Option<State> {
if unlikely(order == Ordering::SeqCst) {
sc_fence();
}
let state = self.state.load(Ordering::Acquire);
if state == LOCKED { None } else { Some(state) }
}
#[inline]
pub(super) fn validate_read(&self, stamp: State, order: Ordering) -> bool {
atomic::fence(Ordering::Acquire);
let result = self.state.load(Ordering::Relaxed) == stamp;
if unlikely(order == Ordering::SeqCst) && result {
sc_fence();
}
result
}
#[inline]
pub(super) fn write(&self, order: Ordering) -> SeqLockWriteGuard<'_> {
let emit_sc_fence = order == Ordering::SeqCst;
if unlikely(emit_sc_fence) {
sc_fence();
}
#[cfg(portable_atomic_unsafe_assume_privileged)]
let interrupt_state = interrupt::disable();
let mut backoff = Backoff::new();
loop {
let previous = self.state.swap(LOCKED, Ordering::Acquire);
if previous != LOCKED {
atomic::fence(Ordering::Release);
return SeqLockWriteGuard {
lock: self,
state: previous,
#[cfg(portable_atomic_unsafe_assume_privileged)]
interrupt_state,
emit_sc_fence,
};
}
while self.state.load(Ordering::Relaxed) == LOCKED {
backoff.snooze();
}
}
}
}
#[must_use]
pub(super) struct SeqLockWriteGuard<'a> {
lock: &'a SeqLock,
state: State,
#[cfg(portable_atomic_unsafe_assume_privileged)]
interrupt_state: interrupt::State,
emit_sc_fence: bool,
}
impl SeqLockWriteGuard<'_> {
#[inline]
pub(super) fn abort(self) {
let this = ManuallyDrop::new(self);
this.lock.state.store(this.state, Ordering::Release);
#[cfg(portable_atomic_unsafe_assume_privileged)]
unsafe {
interrupt::restore(this.interrupt_state);
}
if unlikely(this.emit_sc_fence) {
sc_fence();
}
}
}
impl Drop for SeqLockWriteGuard<'_> {
#[inline]
fn drop(&mut self) {
self.lock.state.store(self.state.wrapping_add(2), Ordering::Release);
#[cfg(portable_atomic_unsafe_assume_privileged)]
unsafe {
interrupt::restore(self.interrupt_state);
}
if unlikely(self.emit_sc_fence) {
sc_fence();
}
}
}
#[cfg(test)]
mod tests {
use super::{Ordering, SeqLock};
#[test]
fn smoke() {
for &order in &[Ordering::AcqRel, Ordering::SeqCst] {
let lock = SeqLock::new();
let before = lock.optimistic_read(order).unwrap();
assert!(lock.validate_read(before, order));
{
let _guard = lock.write(order);
}
assert!(!lock.validate_read(before, order));
let after = lock.optimistic_read(order).unwrap();
assert_ne!(before, after);
}
}
#[test]
fn test_abort() {
for &order in &[Ordering::AcqRel, Ordering::SeqCst] {
let lock = SeqLock::new();
let before = lock.optimistic_read(order).unwrap();
{
let guard = lock.write(order);
guard.abort();
}
let after = lock.optimistic_read(order).unwrap();
assert_eq!(before, after, "aborted write does not update the stamp");
}
}
}