surelock 0.1.0

Deadlock-free locks for Rust with compile time guarantees, incremental locks, and atomic lock sets.
Documentation
//! Monotonic lock identity.
//!
//! Every [`Mutex`](crate::mutex::Mutex) is assigned a unique [`LockId`] at creation
//! time via a global atomic counter. The total order on `LockId` values
//! is the foundation of [`LockSet`](crate::set::LockSet)'s deadlock prevention: locks
//! are always acquired in ascending `LockId` order.
//!
//! By default, `LockId` uses `AtomicU32` (works on all targets including
//! 32-bit embedded). Enable the `atomic-u64` feature (on by default with
//! `std`) for `AtomicU64`, or `portable-atomic` for targets without
//! native CAS support (e.g., thumbv6m).

use crate::atomic::{AtomicId, IdInner, Ordering};

static NEXT_ID: AtomicId = AtomicId::new(0);

/// Unique, totally-ordered lock identifier.
///
/// Assigned once at [`Mutex`](crate::mutex::Mutex) creation, immutable thereafter.
/// The ordering on `LockId` values determines the acquisition order
/// within a [`LockSet`](crate::set::LockSet).
///
/// Uses `u32` by default (works on all targets). Enable `atomic-u64`
/// for `u64` identifiers.
///
/// `Relaxed` ordering on the counter is sufficient: we need uniqueness
/// and a total order on the _values_, not a happens-before relationship
/// between the `fetch_add` calls.
///
/// The counter wraps on overflow (`u32::MAX` or `u64::MAX` allocations).
/// With `atomic-u64` (default on `std`), exhaustion takes ~584 years at
/// one allocation per nanosecond. With the default `u32` counter on
/// `no_std`, wrap occurs at ~4 billion allocations -- safe for typical
/// embedded use, but a concern if mutexes are created in a hot loop.
/// A `debug_assert!` fires on wrap in debug builds.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct LockId(IdInner);

impl core::fmt::Display for LockId {
    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
        self.0.fmt(f)
    }
}

impl LockId {
    /// Allocate the next unique `LockId`.
    ///
    /// # Panics
    ///
    /// Debug-panics if the counter wraps (overflow). In release builds
    /// the counter silently wraps, which would violate the uniqueness
    /// invariant. Enable `atomic-u64` (default with `std`) to make
    /// exhaustion practically impossible.
    pub(crate) fn next() -> Self {
        let id = NEXT_ID.fetch_add(1, Ordering::Relaxed);
        debug_assert!(id < IdInner::MAX, "LockId counter overflow");
        Self(id)
    }
}