Skip to main content

nexus_slab/
bounded.rs

1//! Fixed-capacity slab allocator.
2//!
3//! This module provides a bounded (fixed-capacity) slab allocator.
4//!
5//! # Example
6//!
7//! ```
8//! use nexus_slab::bounded::Slab;
9//!
10//! let slab = Slab::with_capacity(1024);
11//! let slot = slab.alloc(42u64);
12//! assert_eq!(*slot, 42);
13//! // SAFETY: slot was allocated from this slab
14//! unsafe { slab.free(slot) };
15//! ```
16
17use std::cell::Cell;
18use std::fmt;
19use std::mem;
20use std::ptr;
21
22use crate::alloc::Full;
23use crate::shared::{RawSlot, SlotCell};
24
25// =============================================================================
26// Claim
27// =============================================================================
28
29/// A claimed slot that has not yet been written to.
30///
31/// Created by [`Slab::claim()`]. Must be consumed via [`write()`](Self::write)
32/// to complete the allocation. If dropped without calling `write()`, the slot
33/// is returned to the freelist.
34///
35/// The `write()` method is `#[inline]`, enabling the compiler to potentially
36/// optimize the value write as a placement new (constructing directly into
37/// the slot memory).
38pub struct Claim<'a, T> {
39    slot_ptr: *mut SlotCell<T>,
40    slab: &'a Slab<T>,
41}
42
43impl<T> Claim<'_, T> {
44    /// Writes the value to the claimed slot and returns the [`RawSlot`] handle.
45    ///
46    /// This consumes the claim. The value is written directly to the slot's
47    /// memory, which may enable placement new optimization.
48    #[inline]
49    pub fn write(self, value: T) -> RawSlot<T> {
50        let slot_ptr = self.slot_ptr;
51        // SAFETY: We own this slot from claim(), it's valid and vacant
52        unsafe {
53            (*slot_ptr).write_value(value);
54        }
55        // Don't run Drop - we're completing the allocation
56        mem::forget(self);
57        // SAFETY: slot_ptr is valid and now occupied
58        unsafe { RawSlot::from_ptr(slot_ptr) }
59    }
60}
61
62impl<T> fmt::Debug for Claim<'_, T> {
63    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
64        f.debug_struct("Claim")
65            .field("slot_ptr", &self.slot_ptr)
66            .finish()
67    }
68}
69
70impl<T> Drop for Claim<'_, T> {
71    fn drop(&mut self) {
72        // Abandoned claim - return slot to freelist
73        // SAFETY: slot_ptr is valid and still vacant (never written to)
74        let free_head = self.slab.free_head.get();
75        unsafe {
76            (*self.slot_ptr).set_next_free(free_head);
77        }
78        self.slab.free_head.set(self.slot_ptr);
79    }
80}
81
82// =============================================================================
83// Slab
84// =============================================================================
85
86/// Fixed-capacity slab allocator.
87///
88/// Uses pointer-based freelist for O(1) allocation.
89///
90/// # Drop Behavior
91///
92/// Dropping a `Slab` does **not** drop values in occupied slots. `SlotCell` is
93/// a union, so the compiler cannot know which slots contain live values. The
94/// caller must free all outstanding [`RawSlot`] handles before dropping the
95/// slab, or the values will be leaked.
96///
97/// This is acceptable for the primary use case (TLS via `thread_local!`) where
98/// thread exit leaks all TLS storage anyway.
99///
100/// # Const Construction
101///
102/// Supports const construction via [`new()`](Self::new) followed by
103/// runtime initialization via [`init()`](Self::init). This enables use with
104/// `thread_local!` using the `const { }` block syntax for zero-overhead TLS access.
105///
106/// ```ignore
107/// thread_local! {
108///     static SLAB: Slab<MyType> = const { Slab::new() };
109/// }
110///
111/// // Later, at runtime:
112/// SLAB.with(|s| s.init(1024));
113/// ```
114///
115/// For direct usage, prefer [`with_capacity()`](Self::with_capacity).
116pub struct Slab<T> {
117    /// Slot storage. Wrapped in UnsafeCell for interior mutability.
118    slots: std::cell::UnsafeCell<Vec<SlotCell<T>>>,
119    /// Capacity. Wrapped in Cell so it can be set during init.
120    capacity: Cell<usize>,
121    /// Head of freelist - raw pointer for fast allocation.
122    /// NULL when slab is full or uninitialized.
123    pub(crate) free_head: Cell<*mut SlotCell<T>>,
124}
125
126impl<T> Slab<T> {
127    /// Creates an empty, uninitialized slab.
128    ///
129    /// This is a const function that performs no allocation. Call [`init()`](Self::init)
130    /// to allocate storage before use.
131    ///
132    /// For direct usage, prefer [`with_capacity()`](Self::with_capacity).
133    ///
134    /// # Example
135    ///
136    /// ```ignore
137    /// // For use with thread_local! const initialization
138    /// thread_local! {
139    ///     static SLAB: Slab<u64> = const { Slab::new() };
140    /// }
141    /// ```
142    #[inline]
143    pub const fn new() -> Self {
144        Self {
145            slots: std::cell::UnsafeCell::new(Vec::new()),
146            capacity: Cell::new(0),
147            free_head: Cell::new(ptr::null_mut()),
148        }
149    }
150
151    /// Creates a new slab with the given capacity.
152    ///
153    /// # Panics
154    ///
155    /// Panics if capacity is zero.
156    #[inline]
157    pub fn with_capacity(capacity: usize) -> Self {
158        let slab = Self::new();
159        slab.init(capacity);
160        slab
161    }
162
163    /// Initializes the slab with the given capacity.
164    ///
165    /// This allocates slot storage and builds the freelist. Must be called
166    /// exactly once before any allocations.
167    ///
168    /// # Panics
169    ///
170    /// - Panics if the slab is already initialized (capacity > 0)
171    /// - Panics if capacity is zero
172    pub fn init(&self, capacity: usize) {
173        assert!(self.capacity.get() == 0, "Slab already initialized");
174        assert!(capacity > 0, "capacity must be non-zero");
175
176        // SAFETY: We have &self and verified capacity == 0, so no other code
177        // can be accessing slots. This is the only mutation point.
178        let slots = unsafe { &mut *self.slots.get() };
179
180        // Allocate slots — all initially vacant
181        slots.reserve_exact(capacity);
182        for _ in 0..capacity {
183            slots.push(SlotCell::vacant(ptr::null_mut()));
184        }
185
186        // Wire up the freelist: each slot's next_free points to the next slot
187        for i in 0..(capacity - 1) {
188            let next_ptr = slots.as_mut_ptr().wrapping_add(i + 1);
189            // SAFETY: Slot is vacant, wiring up the freelist during init
190            unsafe { slots[i].set_next_free(next_ptr) };
191        }
192        // Last slot points to NULL (end of freelist) — already null from vacant()
193
194        let free_head = slots.as_mut_ptr();
195        self.capacity.set(capacity);
196        self.free_head.set(free_head);
197    }
198
199    /// Returns true if the slab has been initialized.
200    #[inline]
201    pub fn is_initialized(&self) -> bool {
202        self.capacity.get() > 0
203    }
204
205    /// Returns the capacity.
206    #[inline]
207    pub fn capacity(&self) -> usize {
208        self.capacity.get()
209    }
210
211    /// Returns the base pointer to the slots array.
212    #[inline]
213    pub(crate) fn slots_ptr(&self) -> *mut SlotCell<T> {
214        // SAFETY: We're returning a pointer for use with raw pointer access
215        let slots = unsafe { &*self.slots.get() };
216        slots.as_ptr().cast_mut()
217    }
218
219    /// Returns `true` if `ptr` falls within this slab's slot array.
220    ///
221    /// O(1) range check. Used in `debug_assert!` to validate provenance.
222    #[doc(hidden)]
223    #[inline]
224    pub fn contains_ptr(&self, ptr: *const ()) -> bool {
225        let base = self.slots_ptr() as usize;
226        let end = base + self.capacity.get() * std::mem::size_of::<SlotCell<T>>();
227        let addr = ptr as usize;
228        addr >= base && addr < end
229    }
230
231    // =========================================================================
232    // Allocation API
233    // =========================================================================
234
235    /// Claims a slot from the freelist without writing a value.
236    ///
237    /// Returns `None` if the slab is full. The returned [`Claim`] must be
238    /// consumed via [`Claim::write()`] to complete the allocation.
239    ///
240    /// This two-phase allocation enables placement new optimization: the
241    /// value can be constructed directly into the slot memory.
242    ///
243    /// # Example
244    ///
245    /// ```
246    /// use nexus_slab::bounded::Slab;
247    ///
248    /// let slab = Slab::with_capacity(10);
249    /// if let Some(claim) = slab.claim() {
250    ///     let slot = claim.write(42u64);
251    ///     assert_eq!(*slot, 42);
252    ///     // SAFETY: slot was allocated from this slab
253    ///     unsafe { slab.free(slot) };
254    /// }
255    /// ```
256    #[inline]
257    pub fn claim(&self) -> Option<Claim<'_, T>> {
258        self.claim_ptr().map(|slot_ptr| Claim {
259            slot_ptr,
260            slab: self,
261        })
262    }
263
264    /// Claims a slot from the freelist, returning the raw pointer.
265    ///
266    /// Returns `None` if the slab is full. This is a low-level API for
267    /// macro-generated code that needs to escape TLS closures.
268    ///
269    /// # Safety Contract
270    ///
271    /// The caller MUST either:
272    /// - Write a value to the slot and use it as an allocated slot, OR
273    /// - Return the pointer to the freelist via `free_ptr()` if abandoning
274    #[doc(hidden)]
275    #[inline]
276    pub fn claim_ptr(&self) -> Option<*mut SlotCell<T>> {
277        let slot_ptr = self.free_head.get();
278
279        if slot_ptr.is_null() {
280            return None;
281        }
282
283        // SAFETY: slot_ptr came from the freelist within this slab.
284        // The slot is vacant, so next_free is the active union field.
285        let next_free = unsafe { (*slot_ptr).get_next_free() };
286
287        // Update freelist head
288        self.free_head.set(next_free);
289
290        Some(slot_ptr)
291    }
292
293    /// Allocates a slot and writes the value.
294    ///
295    /// # Panics
296    ///
297    /// Panics if the slab is full.
298    #[inline]
299    pub fn alloc(&self, value: T) -> RawSlot<T> {
300        self.try_alloc(value)
301            .unwrap_or_else(|_| panic!("slab full"))
302    }
303
304    /// Tries to allocate a slot and write the value.
305    ///
306    /// Returns `Err(Full(value))` if the slab is at capacity.
307    #[inline]
308    pub fn try_alloc(&self, value: T) -> Result<RawSlot<T>, Full<T>> {
309        let Some(slot_ptr) = self.claim_ptr() else {
310            return Err(Full(value));
311        };
312
313        // SAFETY: Slot is claimed from freelist, we have exclusive access
314        unsafe {
315            (*slot_ptr).write_value(value);
316        }
317
318        // SAFETY: slot_ptr is valid and occupied
319        Ok(unsafe { RawSlot::from_ptr(slot_ptr) })
320    }
321
322    /// Frees a slot, dropping the value and returning storage to the freelist.
323    ///
324    /// # Safety
325    ///
326    /// - `slot` must have been allocated from **this** slab
327    /// - No references to the slot's value may exist
328    #[inline]
329    #[allow(clippy::needless_pass_by_value)]
330    pub unsafe fn free(&self, slot: RawSlot<T>) {
331        let slot_ptr = slot.into_ptr();
332        debug_assert!(
333            self.contains_ptr(slot_ptr as *const ()),
334            "slot was not allocated from this slab"
335        );
336        // SAFETY: Caller guarantees slot is valid and occupied
337        unsafe {
338            (*slot_ptr).drop_value_in_place();
339            self.free_ptr(slot_ptr);
340        }
341    }
342
343    /// Frees a slot and returns the value without dropping it.
344    ///
345    /// # Safety
346    ///
347    /// - `slot` must have been allocated from **this** slab
348    /// - No references to the slot's value may exist
349    #[inline]
350    #[allow(clippy::needless_pass_by_value)]
351    pub unsafe fn take(&self, slot: RawSlot<T>) -> T {
352        let slot_ptr = slot.into_ptr();
353        debug_assert!(
354            self.contains_ptr(slot_ptr as *const ()),
355            "slot was not allocated from this slab"
356        );
357        // SAFETY: Caller guarantees slot is valid and occupied
358        unsafe {
359            let value = (*slot_ptr).read_value();
360            self.free_ptr(slot_ptr);
361            value
362        }
363    }
364
365    /// Returns a slot to the freelist by pointer.
366    ///
367    /// Does NOT drop the value — caller must drop before calling.
368    ///
369    /// # Safety
370    ///
371    /// - `slot_ptr` must point to a slot within this slab
372    /// - Value must already be dropped or moved out
373    #[doc(hidden)]
374    #[inline]
375    pub unsafe fn free_ptr(&self, slot_ptr: *mut SlotCell<T>) {
376        debug_assert!(
377            self.contains_ptr(slot_ptr as *const ()),
378            "slot was not allocated from this slab"
379        );
380        let free_head = self.free_head.get();
381        // SAFETY: Caller guarantees slot_ptr is valid, transitioning to vacant
382        unsafe {
383            (*slot_ptr).set_next_free(free_head);
384        }
385        self.free_head.set(slot_ptr);
386    }
387}
388
389impl<T> Default for Slab<T> {
390    fn default() -> Self {
391        Self::new()
392    }
393}
394
395impl<T> fmt::Debug for Slab<T> {
396    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
397        f.debug_struct("Slab")
398            .field("capacity", &self.capacity.get())
399            .finish()
400    }
401}
402
403// =============================================================================
404// Tests
405// =============================================================================
406
407#[cfg(test)]
408mod tests {
409    use super::*;
410    use std::borrow::{Borrow, BorrowMut};
411
412    #[test]
413    fn slab_basic() {
414        let slab = Slab::<u64>::with_capacity(100);
415        assert_eq!(slab.capacity(), 100);
416
417        let slot = slab.alloc(42);
418        assert_eq!(*slot, 42);
419        // SAFETY: slot was allocated from this slab
420        unsafe { slab.free(slot) };
421    }
422
423    #[test]
424    fn slab_full() {
425        let slab = Slab::<u64>::with_capacity(2);
426        let s1 = slab.alloc(1);
427        let s2 = slab.alloc(2);
428
429        let result = slab.try_alloc(3);
430        assert!(result.is_err());
431        let recovered = result.unwrap_err().into_inner();
432        assert_eq!(recovered, 3);
433
434        // SAFETY: slots were allocated from this slab
435        unsafe {
436            slab.free(s1);
437            slab.free(s2);
438        }
439    }
440
441    #[test]
442    fn slot_deref_mut() {
443        let slab = Slab::<String>::with_capacity(10);
444        let mut slot = slab.alloc("hello".to_string());
445        slot.push_str(" world");
446        assert_eq!(&*slot, "hello world");
447        // SAFETY: slot was allocated from this slab
448        unsafe { slab.free(slot) };
449    }
450
451    #[test]
452    fn slot_dealloc_take() {
453        let slab = Slab::<String>::with_capacity(10);
454        let slot = slab.alloc("hello".to_string());
455
456        // SAFETY: slot was allocated from this slab
457        let value = unsafe { slab.take(slot) };
458        assert_eq!(value, "hello");
459    }
460
461    #[test]
462    fn slot_size() {
463        assert_eq!(std::mem::size_of::<RawSlot<u64>>(), 8);
464    }
465
466    #[test]
467    fn slab_debug() {
468        let slab = Slab::<u64>::with_capacity(10);
469        let s = slab.alloc(42);
470        let debug = format!("{:?}", slab);
471        assert!(debug.contains("Slab"));
472        assert!(debug.contains("capacity"));
473        // SAFETY: slot was allocated from slab
474        unsafe { slab.free(s) };
475    }
476
477    #[test]
478    fn borrow_traits() {
479        let slab = Slab::<u64>::with_capacity(10);
480        let mut slot = slab.alloc(42);
481
482        let borrowed: &u64 = slot.borrow();
483        assert_eq!(*borrowed, 42);
484
485        let borrowed_mut: &mut u64 = slot.borrow_mut();
486        *borrowed_mut = 100;
487        assert_eq!(*slot, 100);
488
489        // SAFETY: slot was allocated from slab
490        unsafe { slab.free(slot) };
491    }
492
493    #[cfg(debug_assertions)]
494    #[test]
495    fn rawslot_debug_drop_panics() {
496        let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
497            let slab = Slab::<u64>::with_capacity(10);
498            let _slot = slab.alloc(42u64);
499            // slot drops here without being freed
500        }));
501        assert!(
502            result.is_err(),
503            "RawSlot should panic on drop in debug mode"
504        );
505    }
506
507    #[test]
508    fn capacity_one() {
509        let slab = Slab::<u64>::with_capacity(1);
510
511        assert_eq!(slab.capacity(), 1);
512
513        let slot = slab.alloc(42);
514        assert!(slab.try_alloc(100).is_err());
515
516        // SAFETY: slot was allocated from this slab
517        unsafe { slab.free(slot) };
518
519        let slot2 = slab.alloc(100);
520        assert_eq!(*slot2, 100);
521        // SAFETY: slot2 was allocated from this slab
522        unsafe { slab.free(slot2) };
523    }
524}