Skip to main content

nexus_slab/
unbounded.rs

1//! Growable slab allocator.
2//!
3//! This module provides an unbounded (growable) slab allocator.
4//! Growth happens by adding independent chunks — no copying.
5//!
6//! # Example
7//!
8//! ```
9//! use nexus_slab::unbounded::Slab;
10//!
11//! let slab = Slab::with_chunk_capacity(4096);
12//! let slot = slab.alloc(42u64);
13//! assert_eq!(*slot, 42);
14//! // SAFETY: slot was allocated from this slab
15//! unsafe { slab.free(slot) };
16//! ```
17
18use std::cell::Cell;
19use std::fmt;
20use std::mem::{self, ManuallyDrop, MaybeUninit};
21use std::ptr;
22
23use crate::bounded::Slab as BoundedSlab;
24use crate::shared::{Slot, SlotCell};
25
26// =============================================================================
27// Claim
28// =============================================================================
29
30/// A claimed slot that has not yet been written to.
31///
32/// Created by [`Slab::claim()`]. Must be consumed via [`write()`](Self::write)
33/// to complete the allocation. If dropped without calling `write()`, the slot
34/// is returned to the freelist.
35///
36/// The `write()` method is `#[inline]`, enabling the compiler to potentially
37/// optimize the value write as a placement new (constructing directly into
38/// the slot memory).
39pub struct Claim<'a, T> {
40    slot_ptr: *mut SlotCell<T>,
41    slab: &'a Slab<T>,
42    chunk_idx: usize,
43}
44
45impl<T> Claim<'_, T> {
46    /// Writes the value to the claimed slot and returns the [`Slot`] handle.
47    ///
48    /// This consumes the claim. The value is written directly to the slot's
49    /// memory, which may enable placement new optimization.
50    #[inline]
51    pub fn write(self, value: T) -> Slot<T> {
52        let slot_ptr = self.slot_ptr;
53        // SAFETY: We own this slot from claim(), it's valid and vacant
54        unsafe {
55            (*slot_ptr).value = ManuallyDrop::new(MaybeUninit::new(value));
56        }
57        // Don't run Drop - we're completing the allocation
58        mem::forget(self);
59        // SAFETY: slot_ptr is valid and now occupied
60        unsafe { Slot::from_ptr(slot_ptr) }
61    }
62}
63
64impl<T> Drop for Claim<'_, T> {
65    fn drop(&mut self) {
66        // Abandoned claim - return slot to the correct chunk's freelist
67        let chunk = self.slab.chunk(self.chunk_idx);
68        let chunk_slab = &*chunk.inner;
69
70        let free_head = chunk_slab.free_head.get();
71        let was_full = free_head.is_null();
72
73        // SAFETY: slot_ptr is valid and still vacant (never written to)
74        unsafe {
75            (*self.slot_ptr).next_free = free_head;
76        }
77        chunk_slab.free_head.set(self.slot_ptr);
78
79        // If chunk was full, add it back to the available-space list
80        if was_full {
81            chunk.next_with_space.set(self.slab.head_with_space.get());
82            self.slab.head_with_space.set(self.chunk_idx);
83        }
84    }
85}
86
87// =============================================================================
88// Constants
89// =============================================================================
90
91/// Sentinel for chunk freelist
92const CHUNK_NONE: usize = usize::MAX;
93
94// =============================================================================
95// ChunkEntry
96// =============================================================================
97
98/// Internal wrapper for a chunk in the growable slab.
99struct ChunkEntry<T> {
100    inner: Box<BoundedSlab<T>>,
101    next_with_space: Cell<usize>,
102}
103
104// =============================================================================
105// Slab
106// =============================================================================
107
108/// Growable slab allocator.
109///
110/// Uses independent chunks for growth — no copying when the slab grows.
111///
112/// # Const Construction
113///
114/// Supports const construction via [`new()`](Self::new) followed by
115/// runtime initialization via [`init()`](Self::init). This enables use with
116/// `thread_local!` using the `const { }` block syntax for zero-overhead TLS access.
117///
118/// ```ignore
119/// thread_local! {
120///     static SLAB: Slab<MyType> = const { Slab::new() };
121/// }
122///
123/// // Later, at runtime:
124/// SLAB.with(|s| s.init(4096));
125/// ```
126///
127/// For direct usage, prefer [`with_chunk_capacity()`](Self::with_chunk_capacity).
128pub struct Slab<T> {
129    chunks: std::cell::UnsafeCell<Vec<ChunkEntry<T>>>,
130    chunk_capacity: Cell<usize>,
131    head_with_space: Cell<usize>,
132}
133
134impl<T> Slab<T> {
135    /// Creates an empty, uninitialized slab.
136    ///
137    /// This is a const function that performs no allocation. Call [`init()`](Self::init)
138    /// to configure chunk capacity before use.
139    ///
140    /// For direct usage, prefer [`with_chunk_capacity()`](Self::with_chunk_capacity).
141    ///
142    /// # Example
143    ///
144    /// ```ignore
145    /// // For use with thread_local! const initialization
146    /// thread_local! {
147    ///     static SLAB: Slab<u64> = const { Slab::new() };
148    /// }
149    /// ```
150    #[inline]
151    pub const fn new() -> Self {
152        Self {
153            chunks: std::cell::UnsafeCell::new(Vec::new()),
154            chunk_capacity: Cell::new(0),
155            head_with_space: Cell::new(CHUNK_NONE),
156        }
157    }
158
159    /// Creates a new slab with the given chunk capacity.
160    ///
161    /// Chunks are allocated on-demand when slots are requested.
162    ///
163    /// # Panics
164    ///
165    /// Panics if chunk_capacity is zero.
166    #[inline]
167    pub fn with_chunk_capacity(chunk_capacity: usize) -> Self {
168        let slab = Self::new();
169        slab.init(chunk_capacity);
170        slab
171    }
172
173    /// Initializes the slab with the given chunk capacity.
174    ///
175    /// This configures the chunk parameters. Chunks are allocated on-demand
176    /// when slots are requested. Must be called exactly once before any allocations.
177    ///
178    /// # Panics
179    ///
180    /// - Panics if the slab is already initialized (chunk_capacity > 0)
181    /// - Panics if chunk_capacity is zero
182    pub fn init(&self, chunk_capacity: usize) {
183        assert!(self.chunk_capacity.get() == 0, "Slab already initialized");
184        assert!(chunk_capacity > 0, "chunk_capacity must be non-zero");
185
186        self.chunk_capacity.set(chunk_capacity);
187    }
188
189    /// Returns true if the slab has been initialized.
190    #[inline]
191    pub fn is_initialized(&self) -> bool {
192        self.chunk_capacity.get() > 0
193    }
194
195    /// Returns the total capacity across all chunks.
196    #[inline]
197    pub fn capacity(&self) -> usize {
198        self.chunks().len() * self.chunk_capacity.get()
199    }
200
201    /// Returns the chunk capacity.
202    #[inline]
203    pub fn chunk_capacity(&self) -> usize {
204        self.chunk_capacity.get()
205    }
206
207    #[inline]
208    fn chunks(&self) -> &Vec<ChunkEntry<T>> {
209        // SAFETY: Single-threaded access guaranteed by !Send
210        unsafe { &*self.chunks.get() }
211    }
212
213    #[inline]
214    #[allow(clippy::mut_from_ref)]
215    fn chunks_mut(&self) -> &mut Vec<ChunkEntry<T>> {
216        // SAFETY: Single-threaded access guaranteed by !Send
217        unsafe { &mut *self.chunks.get() }
218    }
219
220    fn chunk(&self, chunk_idx: usize) -> &ChunkEntry<T> {
221        let chunks = self.chunks();
222        debug_assert!(chunk_idx < chunks.len());
223        unsafe { chunks.get_unchecked(chunk_idx) }
224    }
225
226    /// Returns the number of allocated chunks.
227    #[inline]
228    pub fn chunk_count(&self) -> usize {
229        self.chunks().len()
230    }
231
232    /// Ensures at least `count` chunks are allocated.
233    ///
234    /// No-op if the slab already has `count` or more chunks. Only allocates
235    /// the difference.
236    pub fn reserve_chunks(&self, count: usize) {
237        let current = self.chunks().len();
238        for _ in current..count {
239            self.grow();
240        }
241    }
242
243    /// Grows the slab by adding a single new chunk.
244    fn grow(&self) {
245        let chunks = self.chunks_mut();
246        let chunk_idx = chunks.len();
247        let inner = Box::new(BoundedSlab::with_capacity(self.chunk_capacity.get()));
248
249        let entry = ChunkEntry {
250            inner,
251            next_with_space: Cell::new(self.head_with_space.get()),
252        };
253
254        chunks.push(entry);
255        self.head_with_space.set(chunk_idx);
256    }
257
258    // =========================================================================
259    // Allocation API
260    // =========================================================================
261
262    /// Claims a slot from the freelist without writing a value.
263    ///
264    /// Always succeeds — grows the slab if needed. The returned [`Claim`]
265    /// must be consumed via [`Claim::write()`] to complete the allocation.
266    ///
267    /// This two-phase allocation enables placement new optimization: the
268    /// value can be constructed directly into the slot memory.
269    ///
270    /// # Example
271    ///
272    /// ```
273    /// use nexus_slab::unbounded::Slab;
274    ///
275    /// let slab = Slab::with_chunk_capacity(16);
276    /// let claim = slab.claim();
277    /// let slot = claim.write(42u64);
278    /// assert_eq!(*slot, 42);
279    /// // SAFETY: slot was allocated from this slab
280    /// unsafe { slab.free(slot) };
281    /// ```
282    #[inline]
283    pub fn claim(&self) -> Claim<'_, T> {
284        let (slot_ptr, chunk_idx) = self.claim_ptr();
285        Claim {
286            slot_ptr,
287            slab: self,
288            chunk_idx,
289        }
290    }
291
292    /// Claims a slot from the freelist, returning the raw pointer and chunk index.
293    ///
294    /// Always succeeds — grows the slab if needed. This is a low-level API for
295    /// macro-generated code that needs to escape TLS closures.
296    ///
297    /// # Safety Contract
298    ///
299    /// The caller MUST either:
300    /// - Write a value to the slot and use it as an allocated slot, OR
301    /// - Return the pointer to the freelist via `free_ptr()` if abandoning
302    #[doc(hidden)]
303    #[inline]
304    pub fn claim_ptr(&self) -> (*mut SlotCell<T>, usize) {
305        // Ensure we have space (grow if needed)
306        if self.head_with_space.get() == CHUNK_NONE {
307            self.grow();
308        }
309
310        // Get the chunk with space
311        let chunk_idx = self.head_with_space.get();
312        let chunk = self.chunk(chunk_idx);
313        let chunk_slab = &*chunk.inner;
314
315        // Load freelist head pointer from chunk
316        let slot_ptr = chunk_slab.free_head.get();
317        debug_assert!(!slot_ptr.is_null(), "chunk on freelist has no free slots");
318
319        // SAFETY: slot_ptr came from the freelist. Slot is vacant, so next_free is active.
320        let next_free = unsafe { (*slot_ptr).next_free };
321
322        // Update chunk's freelist head
323        chunk_slab.free_head.set(next_free);
324
325        // If chunk is now full, remove from slab's available-chunk list
326        if next_free.is_null() {
327            self.head_with_space.set(chunk.next_with_space.get());
328        }
329
330        (slot_ptr, chunk_idx)
331    }
332
333    /// Allocates a slot and writes the value.
334    ///
335    /// Always succeeds — grows the slab if needed.
336    #[inline]
337    pub fn alloc(&self, value: T) -> Slot<T> {
338        // Ensure we have space (grow if needed)
339        if self.head_with_space.get() == CHUNK_NONE {
340            self.grow();
341        }
342
343        // Get the chunk with space
344        let chunk_idx = self.head_with_space.get();
345        let chunk = self.chunk(chunk_idx);
346        let chunk_slab = &*chunk.inner;
347
348        // Load freelist head pointer from chunk
349        let slot_ptr = chunk_slab.free_head.get();
350        debug_assert!(!slot_ptr.is_null(), "chunk on freelist has no free slots");
351
352        // SAFETY: slot_ptr came from the freelist. Slot is vacant, so next_free is active.
353        let next_free = unsafe { (*slot_ptr).next_free };
354
355        // Write the value — overwrites next_free (union semantics)
356        // SAFETY: Slot is claimed from freelist, we have exclusive access
357        unsafe {
358            (*slot_ptr).value = ManuallyDrop::new(MaybeUninit::new(value));
359        }
360
361        // Update chunk's freelist head
362        chunk_slab.free_head.set(next_free);
363
364        // If chunk is now full, remove from slab's available-chunk list
365        if next_free.is_null() {
366            self.head_with_space.set(chunk.next_with_space.get());
367        }
368
369        // SAFETY: slot_ptr is valid and occupied
370        unsafe { Slot::from_ptr(slot_ptr) }
371    }
372
373    /// Frees a slot, dropping the value and returning storage to the freelist.
374    ///
375    /// # Performance
376    ///
377    /// O(n) where n = chunk count, due to chunk lookup. Typically 1-5 chunks.
378    ///
379    /// # Safety
380    ///
381    /// - `slot` must have been allocated from **this** slab
382    /// - No references to the slot's value may exist
383    #[inline]
384    #[allow(clippy::needless_pass_by_value)]
385    pub unsafe fn free(&self, slot: Slot<T>) {
386        let slot_ptr = slot.as_ptr();
387        // SAFETY: Caller guarantees slot is valid and occupied
388        unsafe {
389            ptr::drop_in_place((*(*slot_ptr).value).as_mut_ptr());
390            self.free_ptr(slot_ptr);
391        }
392    }
393
394    /// Frees a slot and returns the value without dropping it.
395    ///
396    /// # Performance
397    ///
398    /// O(n) where n = chunk count, due to chunk lookup. Typically 1-5 chunks.
399    ///
400    /// # Safety
401    ///
402    /// - `slot` must have been allocated from **this** slab
403    /// - No references to the slot's value may exist
404    #[inline]
405    #[allow(clippy::needless_pass_by_value)]
406    pub unsafe fn take(&self, slot: Slot<T>) -> T {
407        let slot_ptr = slot.as_ptr();
408        // SAFETY: Caller guarantees slot is valid and occupied
409        unsafe {
410            let value = ptr::read((*slot_ptr).value.as_ptr());
411            self.free_ptr(slot_ptr);
412            value
413        }
414    }
415
416    /// Returns a slot to the freelist by pointer.
417    ///
418    /// Does NOT drop the value — caller must drop before calling.
419    /// Finds the owning chunk via linear scan (typically 1-5 chunks).
420    ///
421    /// # Safety
422    ///
423    /// - `slot_ptr` must point to a slot within this slab
424    /// - Value must already be dropped or moved out
425    #[doc(hidden)]
426    pub unsafe fn free_ptr(&self, slot_ptr: *mut SlotCell<T>) {
427        let chunks = self.chunks();
428        let cap = self.chunk_capacity.get();
429
430        // Find which chunk owns this pointer
431        for (chunk_idx, chunk) in chunks.iter().enumerate() {
432            let chunk_slab = &*chunk.inner;
433            let base = chunk_slab.slots_ptr();
434            let end = base.wrapping_add(cap);
435
436            if slot_ptr >= base && slot_ptr < end {
437                let free_head = chunk_slab.free_head.get();
438                let was_full = free_head.is_null();
439
440                // SAFETY: slot_ptr is within this chunk's range
441                unsafe {
442                    (*slot_ptr).next_free = free_head;
443                }
444                chunk_slab.free_head.set(slot_ptr);
445
446                if was_full {
447                    chunk.next_with_space.set(self.head_with_space.get());
448                    self.head_with_space.set(chunk_idx);
449                }
450                return;
451            }
452        }
453
454        debug_assert!(false, "free_ptr: slot_ptr not found in any chunk");
455    }
456}
457
458impl<T> Default for Slab<T> {
459    fn default() -> Self {
460        Self::new()
461    }
462}
463
464impl<T> fmt::Debug for Slab<T> {
465    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
466        f.debug_struct("Slab")
467            .field("capacity", &self.capacity())
468            .finish()
469    }
470}
471
472// =============================================================================
473// Tests
474// =============================================================================
475
476#[cfg(test)]
477mod tests {
478    use super::*;
479    use std::borrow::{Borrow, BorrowMut};
480
481    #[test]
482    fn slab_basic() {
483        let slab = Slab::<u64>::with_chunk_capacity(16);
484
485        let slot = slab.alloc(42);
486        assert_eq!(*slot, 42);
487        // SAFETY: slot was allocated from this slab
488        unsafe { slab.free(slot) };
489    }
490
491    #[test]
492    fn slab_grows() {
493        let slab = Slab::<u64>::with_chunk_capacity(4);
494
495        let mut slots = Vec::new();
496        for i in 0..10 {
497            slots.push(slab.alloc(i));
498        }
499
500        assert!(slab.capacity() >= 10);
501
502        // Free all slots
503        for slot in slots {
504            // SAFETY: each slot was allocated from this slab
505            unsafe { slab.free(slot) };
506        }
507    }
508
509    #[test]
510    fn slot_deref_mut() {
511        let slab = Slab::<String>::with_chunk_capacity(16);
512        let mut slot = slab.alloc("hello".to_string());
513        slot.push_str(" world");
514        assert_eq!(&*slot, "hello world");
515        // SAFETY: slot was allocated from this slab
516        unsafe { slab.free(slot) };
517    }
518
519    #[test]
520    fn slot_dealloc_take() {
521        let slab = Slab::<String>::with_chunk_capacity(16);
522        let slot = slab.alloc("hello".to_string());
523
524        // SAFETY: slot was allocated from this slab
525        let value = unsafe { slab.take(slot) };
526        assert_eq!(value, "hello");
527    }
528
529    #[test]
530    fn chunk_freelist_maintenance() {
531        let slab = Slab::<u64>::with_chunk_capacity(2);
532
533        // Fill first chunk
534        let s1 = slab.alloc(1);
535        let s2 = slab.alloc(2);
536        // Triggers growth
537        let s3 = slab.alloc(3);
538
539        // Free from first chunk — should add it back to available list
540        // SAFETY: s1 was allocated from this slab
541        unsafe { slab.free(s1) };
542
543        // Should reuse the freed slot
544        let s4 = slab.alloc(4);
545
546        // SAFETY: all slots were allocated from this slab
547        unsafe {
548            slab.free(s2);
549            slab.free(s3);
550            slab.free(s4);
551        }
552    }
553
554    #[test]
555    fn slot_size() {
556        assert_eq!(std::mem::size_of::<Slot<u64>>(), 8);
557    }
558
559    #[test]
560    fn borrow_traits() {
561        let slab = Slab::<u64>::with_chunk_capacity(16);
562        let mut slot = slab.alloc(42);
563
564        let borrowed: &u64 = slot.borrow();
565        assert_eq!(*borrowed, 42);
566
567        let borrowed_mut: &mut u64 = slot.borrow_mut();
568        *borrowed_mut = 100;
569        assert_eq!(*slot, 100);
570
571        // SAFETY: slot was allocated from slab
572        unsafe { slab.free(slot) };
573    }
574}