Skip to main content

nexus_slab/
bounded.rs

1//! Fixed-capacity slab allocator.
2//!
3//! This module provides a bounded (fixed-capacity) slab allocator.
4//!
5//! # Example
6//!
7//! ```
8//! use nexus_slab::bounded::Slab;
9//!
10//! let slab = Slab::with_capacity(1024);
11//! let slot = slab.alloc(42u64);
12//! assert_eq!(*slot, 42);
13//! // SAFETY: slot was allocated from this slab
14//! unsafe { slab.free(slot) };
15//! ```
16
17use std::cell::Cell;
18use std::fmt;
19use std::mem::{self, ManuallyDrop, MaybeUninit};
20use std::ptr;
21
22use crate::alloc::Full;
23use crate::shared::{RawSlot, SlotCell};
24
25// =============================================================================
26// Claim
27// =============================================================================
28
29/// A claimed slot that has not yet been written to.
30///
31/// Created by [`Slab::claim()`]. Must be consumed via [`write()`](Self::write)
32/// to complete the allocation. If dropped without calling `write()`, the slot
33/// is returned to the freelist.
34///
35/// The `write()` method is `#[inline]`, enabling the compiler to potentially
36/// optimize the value write as a placement new (constructing directly into
37/// the slot memory).
38pub struct Claim<'a, T> {
39    slot_ptr: *mut SlotCell<T>,
40    slab: &'a Slab<T>,
41}
42
43impl<T> Claim<'_, T> {
44    /// Writes the value to the claimed slot and returns the [`RawSlot`] handle.
45    ///
46    /// This consumes the claim. The value is written directly to the slot's
47    /// memory, which may enable placement new optimization.
48    #[inline]
49    pub fn write(self, value: T) -> RawSlot<T> {
50        let slot_ptr = self.slot_ptr;
51        // SAFETY: We own this slot from claim(), it's valid and vacant
52        unsafe {
53            (*slot_ptr).value = ManuallyDrop::new(MaybeUninit::new(value));
54        }
55        // Don't run Drop - we're completing the allocation
56        mem::forget(self);
57        // SAFETY: slot_ptr is valid and now occupied
58        unsafe { RawSlot::from_ptr(slot_ptr) }
59    }
60}
61
62impl<T> Drop for Claim<'_, T> {
63    fn drop(&mut self) {
64        // Abandoned claim - return slot to freelist
65        // SAFETY: slot_ptr is valid and still vacant (never written to)
66        let free_head = self.slab.free_head.get();
67        unsafe {
68            (*self.slot_ptr).next_free = free_head;
69        }
70        self.slab.free_head.set(self.slot_ptr);
71    }
72}
73
74// =============================================================================
75// Slab
76// =============================================================================
77
78/// Fixed-capacity slab allocator.
79///
80/// Uses pointer-based freelist for O(1) allocation.
81///
82/// # Const Construction
83///
84/// Supports const construction via [`new()`](Self::new) followed by
85/// runtime initialization via [`init()`](Self::init). This enables use with
86/// `thread_local!` using the `const { }` block syntax for zero-overhead TLS access.
87///
88/// ```ignore
89/// thread_local! {
90///     static SLAB: Slab<MyType> = const { Slab::new() };
91/// }
92///
93/// // Later, at runtime:
94/// SLAB.with(|s| s.init(1024));
95/// ```
96///
97/// For direct usage, prefer [`with_capacity()`](Self::with_capacity).
98pub struct Slab<T> {
99    /// Slot storage. Wrapped in UnsafeCell for interior mutability.
100    slots: std::cell::UnsafeCell<Vec<SlotCell<T>>>,
101    /// Capacity. Wrapped in Cell so it can be set during init.
102    capacity: Cell<usize>,
103    /// Head of freelist - raw pointer for fast allocation.
104    /// NULL when slab is full or uninitialized.
105    #[doc(hidden)]
106    pub free_head: Cell<*mut SlotCell<T>>,
107}
108
109impl<T> Slab<T> {
110    /// Creates an empty, uninitialized slab.
111    ///
112    /// This is a const function that performs no allocation. Call [`init()`](Self::init)
113    /// to allocate storage before use.
114    ///
115    /// For direct usage, prefer [`with_capacity()`](Self::with_capacity).
116    ///
117    /// # Example
118    ///
119    /// ```ignore
120    /// // For use with thread_local! const initialization
121    /// thread_local! {
122    ///     static SLAB: Slab<u64> = const { Slab::new() };
123    /// }
124    /// ```
125    #[inline]
126    pub const fn new() -> Self {
127        Self {
128            slots: std::cell::UnsafeCell::new(Vec::new()),
129            capacity: Cell::new(0),
130            free_head: Cell::new(ptr::null_mut()),
131        }
132    }
133
134    /// Creates a new slab with the given capacity.
135    ///
136    /// # Panics
137    ///
138    /// Panics if capacity is zero.
139    #[inline]
140    pub fn with_capacity(capacity: usize) -> Self {
141        let slab = Self::new();
142        slab.init(capacity);
143        slab
144    }
145
146    /// Initializes the slab with the given capacity.
147    ///
148    /// This allocates slot storage and builds the freelist. Must be called
149    /// exactly once before any allocations.
150    ///
151    /// # Panics
152    ///
153    /// - Panics if the slab is already initialized (capacity > 0)
154    /// - Panics if capacity is zero
155    pub fn init(&self, capacity: usize) {
156        assert!(self.capacity.get() == 0, "Slab already initialized");
157        assert!(capacity > 0, "capacity must be non-zero");
158
159        // SAFETY: We have &self and verified capacity == 0, so no other code
160        // can be accessing slots. This is the only mutation point.
161        let slots = unsafe { &mut *self.slots.get() };
162
163        // Allocate slots — all initially vacant
164        slots.reserve_exact(capacity);
165        for _ in 0..capacity {
166            slots.push(SlotCell::vacant(ptr::null_mut()));
167        }
168
169        // Wire up the freelist: each slot's next_free points to the next slot
170        for i in 0..(capacity - 1) {
171            let next_ptr = slots.as_mut_ptr().wrapping_add(i + 1);
172            slots[i].next_free = next_ptr;
173        }
174        // Last slot points to NULL (end of freelist) — already null from vacant()
175
176        let free_head = slots.as_mut_ptr();
177        self.capacity.set(capacity);
178        self.free_head.set(free_head);
179    }
180
181    /// Returns true if the slab has been initialized.
182    #[inline]
183    pub fn is_initialized(&self) -> bool {
184        self.capacity.get() > 0
185    }
186
187    /// Returns the capacity.
188    #[inline]
189    pub fn capacity(&self) -> usize {
190        self.capacity.get()
191    }
192
193    /// Returns the base pointer to the slots array.
194    #[inline]
195    pub(crate) fn slots_ptr(&self) -> *mut SlotCell<T> {
196        // SAFETY: We're returning a pointer for use with raw pointer access
197        let slots = unsafe { &*self.slots.get() };
198        slots.as_ptr().cast_mut()
199    }
200
201    /// Returns `true` if `ptr` falls within this slab's slot array.
202    ///
203    /// O(1) range check. Used in `debug_assert!` to validate provenance.
204    #[doc(hidden)]
205    #[inline]
206    pub fn contains_ptr(&self, ptr: *const ()) -> bool {
207        let base = self.slots_ptr() as usize;
208        let end = base + self.capacity.get() * std::mem::size_of::<SlotCell<T>>();
209        let addr = ptr as usize;
210        addr >= base && addr < end
211    }
212
213    // =========================================================================
214    // Allocation API
215    // =========================================================================
216
217    /// Claims a slot from the freelist without writing a value.
218    ///
219    /// Returns `None` if the slab is full. The returned [`Claim`] must be
220    /// consumed via [`Claim::write()`] to complete the allocation.
221    ///
222    /// This two-phase allocation enables placement new optimization: the
223    /// value can be constructed directly into the slot memory.
224    ///
225    /// # Example
226    ///
227    /// ```
228    /// use nexus_slab::bounded::Slab;
229    ///
230    /// let slab = Slab::with_capacity(10);
231    /// if let Some(claim) = slab.claim() {
232    ///     let slot = claim.write(42u64);
233    ///     assert_eq!(*slot, 42);
234    ///     // SAFETY: slot was allocated from this slab
235    ///     unsafe { slab.free(slot) };
236    /// }
237    /// ```
238    #[inline]
239    pub fn claim(&self) -> Option<Claim<'_, T>> {
240        self.claim_ptr().map(|slot_ptr| Claim {
241            slot_ptr,
242            slab: self,
243        })
244    }
245
246    /// Claims a slot from the freelist, returning the raw pointer.
247    ///
248    /// Returns `None` if the slab is full. This is a low-level API for
249    /// macro-generated code that needs to escape TLS closures.
250    ///
251    /// # Safety Contract
252    ///
253    /// The caller MUST either:
254    /// - Write a value to the slot and use it as an allocated slot, OR
255    /// - Return the pointer to the freelist via `free_ptr()` if abandoning
256    #[doc(hidden)]
257    #[inline]
258    pub fn claim_ptr(&self) -> Option<*mut SlotCell<T>> {
259        let slot_ptr = self.free_head.get();
260
261        if slot_ptr.is_null() {
262            return None;
263        }
264
265        // SAFETY: slot_ptr came from the freelist within this slab.
266        // The slot is vacant, so next_free is the active union field.
267        let next_free = unsafe { (*slot_ptr).next_free };
268
269        // Update freelist head
270        self.free_head.set(next_free);
271
272        Some(slot_ptr)
273    }
274
275    /// Allocates a slot and writes the value.
276    ///
277    /// # Panics
278    ///
279    /// Panics if the slab is full.
280    #[inline]
281    pub fn alloc(&self, value: T) -> RawSlot<T> {
282        self.try_alloc(value)
283            .unwrap_or_else(|_| panic!("slab full"))
284    }
285
286    /// Tries to allocate a slot and write the value.
287    ///
288    /// Returns `Err(Full(value))` if the slab is at capacity.
289    #[inline]
290    pub fn try_alloc(&self, value: T) -> Result<RawSlot<T>, Full<T>> {
291        let slot_ptr = self.free_head.get();
292
293        if slot_ptr.is_null() {
294            return Err(Full(value));
295        }
296
297        // SAFETY: slot_ptr came from the freelist within this slab.
298        // The slot is vacant, so next_free is the active union field.
299        let next_free = unsafe { (*slot_ptr).next_free };
300
301        // Write the value — this overwrites next_free (union semantics)
302        // SAFETY: Slot is claimed from freelist, we have exclusive access
303        unsafe {
304            (*slot_ptr).value = ManuallyDrop::new(MaybeUninit::new(value));
305        }
306
307        // Update freelist head
308        self.free_head.set(next_free);
309
310        // SAFETY: slot_ptr is valid and occupied
311        Ok(unsafe { RawSlot::from_ptr(slot_ptr) })
312    }
313
314    /// Frees a slot, dropping the value and returning storage to the freelist.
315    ///
316    /// # Safety
317    ///
318    /// - `slot` must have been allocated from **this** slab
319    /// - No references to the slot's value may exist
320    #[inline]
321    #[allow(clippy::needless_pass_by_value)]
322    pub unsafe fn free(&self, slot: RawSlot<T>) {
323        let slot_ptr = slot.into_ptr();
324        debug_assert!(
325            self.contains_ptr(slot_ptr as *const ()),
326            "slot was not allocated from this slab"
327        );
328        // SAFETY: Caller guarantees slot is valid and occupied
329        unsafe {
330            ptr::drop_in_place((*(*slot_ptr).value).as_mut_ptr());
331            self.free_ptr(slot_ptr);
332        }
333    }
334
335    /// Frees a slot and returns the value without dropping it.
336    ///
337    /// # Safety
338    ///
339    /// - `slot` must have been allocated from **this** slab
340    /// - No references to the slot's value may exist
341    #[inline]
342    #[allow(clippy::needless_pass_by_value)]
343    pub unsafe fn take(&self, slot: RawSlot<T>) -> T {
344        let slot_ptr = slot.into_ptr();
345        debug_assert!(
346            self.contains_ptr(slot_ptr as *const ()),
347            "slot was not allocated from this slab"
348        );
349        // SAFETY: Caller guarantees slot is valid and occupied
350        unsafe {
351            let value = ptr::read((*slot_ptr).value.as_ptr());
352            self.free_ptr(slot_ptr);
353            value
354        }
355    }
356
357    /// Returns a slot to the freelist by pointer.
358    ///
359    /// Does NOT drop the value — caller must drop before calling.
360    ///
361    /// # Safety
362    ///
363    /// - `slot_ptr` must point to a slot within this slab
364    /// - Value must already be dropped or moved out
365    #[doc(hidden)]
366    #[inline]
367    pub unsafe fn free_ptr(&self, slot_ptr: *mut SlotCell<T>) {
368        debug_assert!(
369            self.contains_ptr(slot_ptr as *const ()),
370            "slot was not allocated from this slab"
371        );
372        let free_head = self.free_head.get();
373        // SAFETY: Caller guarantees slot_ptr is valid
374        unsafe {
375            (*slot_ptr).next_free = free_head;
376        }
377        self.free_head.set(slot_ptr);
378    }
379}
380
381impl<T> Default for Slab<T> {
382    fn default() -> Self {
383        Self::new()
384    }
385}
386
387impl<T> fmt::Debug for Slab<T> {
388    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
389        f.debug_struct("Slab")
390            .field("capacity", &self.capacity.get())
391            .finish()
392    }
393}
394
395// =============================================================================
396// Tests
397// =============================================================================
398
399#[cfg(test)]
400mod tests {
401    use super::*;
402    use std::borrow::{Borrow, BorrowMut};
403
404    #[test]
405    fn slab_basic() {
406        let slab = Slab::<u64>::with_capacity(100);
407        assert_eq!(slab.capacity(), 100);
408
409        let slot = slab.alloc(42);
410        assert_eq!(*slot, 42);
411        // SAFETY: slot was allocated from this slab
412        unsafe { slab.free(slot) };
413    }
414
415    #[test]
416    fn slab_full() {
417        let slab = Slab::<u64>::with_capacity(2);
418        let s1 = slab.alloc(1);
419        let s2 = slab.alloc(2);
420
421        let result = slab.try_alloc(3);
422        assert!(result.is_err());
423        let recovered = result.unwrap_err().into_inner();
424        assert_eq!(recovered, 3);
425
426        // SAFETY: slots were allocated from this slab
427        unsafe {
428            slab.free(s1);
429            slab.free(s2);
430        }
431    }
432
433    #[test]
434    fn slot_deref_mut() {
435        let slab = Slab::<String>::with_capacity(10);
436        let mut slot = slab.alloc("hello".to_string());
437        slot.push_str(" world");
438        assert_eq!(&*slot, "hello world");
439        // SAFETY: slot was allocated from this slab
440        unsafe { slab.free(slot) };
441    }
442
443    #[test]
444    fn slot_dealloc_take() {
445        let slab = Slab::<String>::with_capacity(10);
446        let slot = slab.alloc("hello".to_string());
447
448        // SAFETY: slot was allocated from this slab
449        let value = unsafe { slab.take(slot) };
450        assert_eq!(value, "hello");
451    }
452
453    #[test]
454    fn slot_size() {
455        assert_eq!(std::mem::size_of::<RawSlot<u64>>(), 8);
456    }
457
458    #[test]
459    fn slab_debug() {
460        let slab = Slab::<u64>::with_capacity(10);
461        let s = slab.alloc(42);
462        let debug = format!("{:?}", slab);
463        assert!(debug.contains("Slab"));
464        assert!(debug.contains("capacity"));
465        // SAFETY: slot was allocated from slab
466        unsafe { slab.free(s) };
467    }
468
469    #[test]
470    fn borrow_traits() {
471        let slab = Slab::<u64>::with_capacity(10);
472        let mut slot = slab.alloc(42);
473
474        let borrowed: &u64 = slot.borrow();
475        assert_eq!(*borrowed, 42);
476
477        let borrowed_mut: &mut u64 = slot.borrow_mut();
478        *borrowed_mut = 100;
479        assert_eq!(*slot, 100);
480
481        // SAFETY: slot was allocated from slab
482        unsafe { slab.free(slot) };
483    }
484
485    #[cfg(debug_assertions)]
486    #[test]
487    fn rawslot_debug_drop_panics() {
488        let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
489            let slab = Slab::<u64>::with_capacity(10);
490            let _slot = slab.alloc(42u64);
491            // slot drops here without being freed
492        }));
493        assert!(
494            result.is_err(),
495            "RawSlot should panic on drop in debug mode"
496        );
497    }
498
499    #[test]
500    fn capacity_one() {
501        let slab = Slab::<u64>::with_capacity(1);
502
503        assert_eq!(slab.capacity(), 1);
504
505        let slot = slab.alloc(42);
506        assert!(slab.try_alloc(100).is_err());
507
508        // SAFETY: slot was allocated from this slab
509        unsafe { slab.free(slot) };
510
511        let slot2 = slab.alloc(100);
512        assert_eq!(*slot2, 100);
513        // SAFETY: slot2 was allocated from this slab
514        unsafe { slab.free(slot2) };
515    }
516}