attachable-slab-allocator 0.1.0

A high-performance, $O(1)$, Master-Slave slab allocator designed for `no_std` environments, kernels, and embedded systems. This library provides fixed-size memory management with RAII safety while remaining completely agnostic of the underlying memory provider.
Documentation
//! # SlabBox: The RAII Smart Pointer
//!
//! `SlabBox` is an RAII wrapper around a raw pointer allocated from a `Slab`.
//! It enforces safe memory management by automatically returning the memory
//! to the slab allocator when the `SlabBox` is dropped.

use crate::locks::LockTrait;
use crate::mem_lay::get_layout;
use crate::slab::Slab;
use crate::utils::NonNullEx;
use core::alloc::Layout;
use core::marker::PhantomData;
use core::ops::{Deref, DerefMut};
use core::ptr::{NonNull, drop_in_place};

/// A smart pointer to an object of type `T` allocated in a slab.
///
/// When this struct goes out of scope, the memory is automatically returned to the
/// originating slab.
pub struct SlabBox<T, LOCK, const SLAB_SIZE: usize>
where
    LOCK: LockTrait,
{
    ptr: NonNull<T>,
    /// Tracks the lock implementation type for safety and trait bounds.
    _marker: PhantomData<LOCK>,
}

unsafe impl<T, LOCK, const SLAB_SIZE: usize> Send for SlabBox<T, LOCK, SLAB_SIZE>
where
    T: Send,
    LOCK: LockTrait + Sync,
{
}
unsafe impl<T, LOCK, const SLAB_SIZE: usize> Sync for SlabBox<T, LOCK, SLAB_SIZE>
where
    T: Sync,
    LOCK: LockTrait + Sync,
{
}

impl<T, LOCK, const SLAB_SIZE: usize> SlabBox<T, LOCK, SLAB_SIZE>
where
    LOCK: LockTrait,
{
    /// Cached layout for the slab size, used to calculate base slab alignment during drop.
    const SLAB_LAYOUT: Layout = get_layout(SLAB_SIZE);

    /// Wraps a raw pointer into a `SlabBox`.
    ///
    /// # Safety
    /// - The `ptr` must be a valid, allocated slot from a `Slab` with `SLAB_SIZE`.
    pub fn new(ptr: NonNull<T>) -> Self {
        Self {
            ptr,
            _marker: PhantomData,
        }
    }

    /// Manually triggers the deallocation of the slot.
    ///
    /// This is typically called automatically by `Drop`.
    fn free_ptr(&mut self) {
        unsafe {
            drop_in_place(self.ptr.as_ptr());
            Slab::<T, LOCK>::free_slot(self.ptr, Self::SLAB_LAYOUT).expect("internal error");
        }
    }

    /// Returns the raw pointer to the underlying data.
    pub fn as_ptr(&self) -> *mut T {
        self.ptr.as_ptr()
    }
}

impl<T, LOCK, const SLAB_SIZE: usize> Drop for SlabBox<T, LOCK, SLAB_SIZE>
where
    LOCK: LockTrait,
{
    /// Automatically returns the memory to the slab system.
    fn drop(&mut self) {
        self.free_ptr();
    }
}

impl<T, LOCK, const SLAB_SIZE: usize> Deref for SlabBox<T, LOCK, SLAB_SIZE>
where
    LOCK: LockTrait,
{
    type Target = T;

    /// Provides immutable access to the underlying data.
    fn deref(&self) -> &Self::Target {
        self.ptr.unsafe_ref()
    }
}

impl<T, LOCK, const SLAB_SIZE: usize> DerefMut for SlabBox<T, LOCK, SLAB_SIZE>
where
    LOCK: LockTrait,
{
    /// Provides mutable access to the underlying data.
    fn deref_mut(&mut self) -> &mut Self::Target {
        self.ptr.unsafe_mut_ref()
    }
}