Skip to main content

attachable_slab_allocator/
slab_box.rs

1//! # SlabBox: The RAII Smart Pointer
2//!
3//! `SlabBox` is an RAII wrapper around a raw pointer allocated from a `Slab`.
4//! It enforces safe memory management by automatically returning the memory
5//! to the slab allocator when the `SlabBox` is dropped.
6
7use crate::locks::LockTrait;
8use crate::mem_lay::get_layout;
9use crate::slab::Slab;
10use crate::utils::NonNullEx;
11use core::alloc::Layout;
12use core::marker::PhantomData;
13use core::ops::{Deref, DerefMut};
14use core::ptr::{NonNull, drop_in_place};
15
16/// A smart pointer to an object of type `T` allocated in a slab.
17///
18/// When this struct goes out of scope, the memory is automatically returned to the
19/// originating slab.
20pub struct SlabBox<T, LOCK, const SLAB_SIZE: usize>
21where
22    LOCK: LockTrait,
23{
24    ptr: NonNull<T>,
25    /// Tracks the lock implementation type for safety and trait bounds.
26    _marker: PhantomData<LOCK>,
27}
28
29unsafe impl<T, LOCK, const SLAB_SIZE: usize> Send for SlabBox<T, LOCK, SLAB_SIZE>
30where
31    T: Send,
32    LOCK: LockTrait + Sync,
33{
34}
35unsafe impl<T, LOCK, const SLAB_SIZE: usize> Sync for SlabBox<T, LOCK, SLAB_SIZE>
36where
37    T: Sync,
38    LOCK: LockTrait + Sync,
39{
40}
41
42impl<T, LOCK, const SLAB_SIZE: usize> SlabBox<T, LOCK, SLAB_SIZE>
43where
44    LOCK: LockTrait,
45{
46    /// Cached layout for the slab size, used to calculate base slab alignment during drop.
47    const SLAB_LAYOUT: Layout = get_layout(SLAB_SIZE);
48
49    /// Wraps a raw pointer into a `SlabBox`.
50    ///
51    /// # Safety
52    /// - The `ptr` must be a valid, allocated slot from a `Slab` with `SLAB_SIZE`.
53    pub fn new(ptr: NonNull<T>) -> Self {
54        Self {
55            ptr,
56            _marker: PhantomData,
57        }
58    }
59
60    /// Manually triggers the deallocation of the slot.
61    ///
62    /// This is typically called automatically by `Drop`.
63    fn free_ptr(&mut self) {
64        unsafe {
65            drop_in_place(self.ptr.as_ptr());
66            Slab::<T, LOCK>::free_slot(self.ptr, Self::SLAB_LAYOUT).expect("internal error");
67        }
68    }
69
70    /// Returns the raw pointer to the underlying data.
71    pub fn as_ptr(&self) -> *mut T {
72        self.ptr.as_ptr()
73    }
74}
75
76impl<T, LOCK, const SLAB_SIZE: usize> Drop for SlabBox<T, LOCK, SLAB_SIZE>
77where
78    LOCK: LockTrait,
79{
80    /// Automatically returns the memory to the slab system.
81    fn drop(&mut self) {
82        self.free_ptr();
83    }
84}
85
86impl<T, LOCK, const SLAB_SIZE: usize> Deref for SlabBox<T, LOCK, SLAB_SIZE>
87where
88    LOCK: LockTrait,
89{
90    type Target = T;
91
92    /// Provides immutable access to the underlying data.
93    fn deref(&self) -> &Self::Target {
94        self.ptr.unsafe_ref()
95    }
96}
97
98impl<T, LOCK, const SLAB_SIZE: usize> DerefMut for SlabBox<T, LOCK, SLAB_SIZE>
99where
100    LOCK: LockTrait,
101{
102    /// Provides mutable access to the underlying data.
103    fn deref_mut(&mut self) -> &mut Self::Target {
104        self.ptr.unsafe_mut_ref()
105    }
106}