Skip to main content

nexus_slab/
bounded.rs

1//! Fixed-capacity slab allocator.
2//!
3//! This module provides a bounded (fixed-capacity) slab allocator.
4//!
5//! # Example
6//!
7//! ```
8//! use nexus_slab::bounded::Slab;
9//!
10//! // SAFETY: caller guarantees slab contract (see struct docs)
11//! let slab = unsafe { Slab::with_capacity(1024) };
12//! let slot = slab.alloc(42u64);
13//! assert_eq!(*slot, 42);
14//! slab.free(slot);
15//! ```
16
17use core::cell::Cell;
18use core::fmt;
19use core::mem;
20use core::ptr;
21
22use alloc::vec::Vec;
23
24use crate::shared::{Full, Slot, SlotCell};
25
26// =============================================================================
27// Claim
28// =============================================================================
29
30/// A claimed slot that has not yet been written to.
31///
32/// Created by [`Slab::claim()`]. Must be consumed via [`write()`](Self::write)
33/// to complete the allocation. If dropped without calling `write()`, the slot
34/// is returned to the freelist.
35///
36/// The `write()` method is `#[inline]`, enabling the compiler to potentially
37/// optimize the value write as a placement new (constructing directly into
38/// the slot memory).
39pub struct Claim<'a, T> {
40    slot_ptr: *mut SlotCell<T>,
41    slab: &'a Slab<T>,
42}
43
44impl<T> Claim<'_, T> {
45    /// Writes the value to the claimed slot and returns the [`Slot`] handle.
46    ///
47    /// This consumes the claim. The value is written directly to the slot's
48    /// memory, which may enable placement new optimization.
49    #[inline]
50    pub fn write(self, value: T) -> Slot<T> {
51        let slot_ptr = self.slot_ptr;
52        // SAFETY: We own this slot from claim(), it's valid and vacant
53        unsafe {
54            (*slot_ptr).write_value(value);
55        }
56        // Don't run Drop - we're completing the allocation
57        mem::forget(self);
58        // SAFETY: slot_ptr is valid and now occupied
59        unsafe { Slot::from_ptr(slot_ptr) }
60    }
61
62    /// Extract the raw slot pointer, consuming the claim without writing.
63    ///
64    /// Transfers ownership to the caller — the slot will NOT be returned
65    /// to the freelist on drop. The caller must either write a value and
66    /// eventually free it, or return the slot via `free_ptr()`.
67    #[inline]
68    pub(crate) fn into_ptr(self) -> *mut SlotCell<T> {
69        let ptr = self.slot_ptr;
70        mem::forget(self);
71        ptr
72    }
73}
74
75impl<T> fmt::Debug for Claim<'_, T> {
76    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
77        f.debug_struct("Claim")
78            .field("slot_ptr", &self.slot_ptr)
79            .finish()
80    }
81}
82
83impl<T> Drop for Claim<'_, T> {
84    fn drop(&mut self) {
85        // Abandoned claim - return slot to freelist
86        // SAFETY: slot_ptr is valid and still vacant (never written to)
87        let free_head = self.slab.free_head.get();
88        unsafe {
89            (*self.slot_ptr).set_next_free(free_head);
90        }
91        self.slab.free_head.set(self.slot_ptr);
92    }
93}
94
95// =============================================================================
96// Slab
97// =============================================================================
98
99/// Fixed-capacity slab allocator for manual memory management.
100///
101/// Uses pointer-based freelist for O(1) allocation. ~20-24 cycle operations.
102///
103/// # Safety Contract
104///
105/// Construction is `unsafe` because it opts you into manual memory
106/// management. By creating a slab, you accept these invariants:
107///
108/// - **Free from the correct slab.** Passing a [`Slot`] to a different
109///   slab's `free()` is undefined behavior — it corrupts the freelist.
110///   In debug builds, this is caught by `debug_assert!`.
111/// - **Free everything you allocate.** Dropping the slab does NOT drop
112///   values in occupied slots. Unfreed slots leak silently.
113/// - **Single-threaded.** The slab is `!Send` and `!Sync`.
114///
115/// ## Why `free()` is safe
116///
117/// The safety contract is accepted once, at construction. After that:
118/// - [`Slot`] is move-only (no `Copy`, no `Clone`) — double-free is
119///   prevented by the type system.
120/// - `free()` consumes the `Slot` — the handle cannot be used after.
121/// - Cross-slab misuse is the only remaining hazard, and it was
122///   accepted as the caller's responsibility at construction time.
123pub struct Slab<T> {
124    /// Slot storage. Wrapped in UnsafeCell for interior mutability.
125    slots: core::cell::UnsafeCell<Vec<SlotCell<T>>>,
126    /// Fixed capacity, set at construction.
127    capacity: usize,
128    /// Head of freelist — raw pointer for fast allocation.
129    /// NULL when the slab is full.
130    pub(crate) free_head: Cell<*mut SlotCell<T>>,
131}
132
133impl<T> Slab<T> {
134    /// Creates a new slab with the given capacity.
135    ///
136    /// # Safety
137    ///
138    /// See [struct-level safety contract](Self).
139    ///
140    /// # Panics
141    ///
142    /// Panics if capacity is zero.
143    #[inline]
144    pub unsafe fn with_capacity(capacity: usize) -> Self {
145        assert!(capacity > 0, "capacity must be non-zero");
146
147        let mut slots = Vec::with_capacity(capacity);
148        for _ in 0..capacity {
149            slots.push(SlotCell::vacant(ptr::null_mut()));
150        }
151
152        // Wrap in UnsafeCell BEFORE wiring the freelist so all pointers
153        // are derived with write provenance from the UnsafeCell. Deriving
154        // pointers from the owned Vec and then moving into UnsafeCell gives
155        // them stale (read-only) provenance under stacked borrows.
156        let slots = core::cell::UnsafeCell::new(slots);
157        let base = unsafe { (*slots.get()).as_mut_ptr() };
158
159        // Wire up the freelist: each slot's next_free points to the next slot
160        for i in 0..(capacity - 1) {
161            let next_ptr = base.wrapping_add(i + 1);
162            // SAFETY: Slot is vacant, wiring up the freelist during init.
163            // base is derived from UnsafeCell with write provenance.
164            unsafe { (*base.add(i)).set_next_free(next_ptr) };
165        }
166        // Last slot points to NULL (end of freelist) — already null from vacant()
167
168        Self {
169            slots,
170            capacity,
171            free_head: Cell::new(base),
172        }
173    }
174
175    /// Returns the capacity.
176    #[inline]
177    pub fn capacity(&self) -> usize {
178        self.capacity
179    }
180
181    /// Returns the base pointer to the slots array.
182    #[inline]
183    pub(crate) fn slots_ptr(&self) -> *mut SlotCell<T> {
184        // Derive from *mut Vec (via UnsafeCell::get) to preserve write provenance.
185        // Creating &Vec first would give read-only provenance — writes through
186        // the returned pointer would be UB under stacked/tree borrows.
187        unsafe { (*self.slots.get()).as_mut_ptr() }
188    }
189
190    /// Returns `true` if `ptr` falls within this slab's slot array.
191    ///
192    /// O(1) range check. Used in `debug_assert!` to validate provenance.
193    #[doc(hidden)]
194    #[inline]
195    pub fn contains_ptr(&self, ptr: *const ()) -> bool {
196        let base = self.slots_ptr() as usize;
197        let end = base + self.capacity * core::mem::size_of::<SlotCell<T>>();
198        let addr = ptr as usize;
199        addr >= base && addr < end
200    }
201
202    // =========================================================================
203    // Allocation API
204    // =========================================================================
205
206    /// Claims a slot from the freelist without writing a value.
207    ///
208    /// Returns `None` if the slab is full. The returned [`Claim`] must be
209    /// consumed via [`Claim::write()`] to complete the allocation.
210    ///
211    /// This two-phase allocation enables placement new optimization: the
212    /// value can be constructed directly into the slot memory.
213    ///
214    /// # Example
215    ///
216    /// ```
217    /// use nexus_slab::bounded::Slab;
218    ///
219    /// // SAFETY: caller guarantees slab contract (see struct docs)
220    /// let slab = unsafe { Slab::with_capacity(10) };
221    /// if let Some(claim) = slab.claim() {
222    ///     let slot = claim.write(42u64);
223    ///     assert_eq!(*slot, 42);
224    ///     slab.free(slot);
225    /// }
226    /// ```
227    #[inline]
228    pub fn claim(&self) -> Option<Claim<'_, T>> {
229        self.claim_ptr().map(|slot_ptr| Claim {
230            slot_ptr,
231            slab: self,
232        })
233    }
234
235    /// Claims a slot from the freelist, returning the raw pointer.
236    ///
237    /// Returns `None` if the slab is full. This is a low-level API for
238    /// macro-generated code that needs to escape TLS closures.
239    ///
240    /// # Safety Contract
241    ///
242    /// The caller MUST either:
243    /// - Write a value to the slot and use it as an allocated slot, OR
244    /// - Return the pointer to the freelist via `free_ptr()` if abandoning
245    #[doc(hidden)]
246    #[inline]
247    pub(crate) fn claim_ptr(&self) -> Option<*mut SlotCell<T>> {
248        let slot_ptr = self.free_head.get();
249
250        if slot_ptr.is_null() {
251            return None;
252        }
253
254        // SAFETY: slot_ptr came from the freelist within this slab.
255        // The slot is vacant, so next_free is the active union field.
256        let next_free = unsafe { (*slot_ptr).get_next_free() };
257
258        // Update freelist head
259        self.free_head.set(next_free);
260
261        Some(slot_ptr)
262    }
263
264    /// Allocates a slot and writes the value.
265    ///
266    /// # Panics
267    ///
268    /// Panics if the slab is full.
269    #[inline]
270    pub fn alloc(&self, value: T) -> Slot<T> {
271        self.claim().expect("slab full").write(value)
272    }
273
274    /// Tries to allocate a slot and write the value.
275    ///
276    /// Returns `Err(Full(value))` if the slab is at capacity.
277    #[inline]
278    pub fn try_alloc(&self, value: T) -> Result<Slot<T>, Full<T>> {
279        match self.claim() {
280            Some(claim) => Ok(claim.write(value)),
281            None => Err(Full(value)),
282        }
283    }
284
285    /// Frees a slot, dropping the value and returning storage to the freelist.
286    ///
287    /// Consumes the handle — the slot cannot be used after this call.
288    /// The caller's safety obligation (free from the correct slab) was
289    /// accepted at construction time.
290    #[inline]
291    #[allow(clippy::needless_pass_by_value)]
292    pub fn free(&self, slot: Slot<T>) {
293        let slot_ptr = slot.into_raw();
294        debug_assert!(
295            self.contains_ptr(slot_ptr as *const ()),
296            "slot was not allocated from this slab"
297        );
298        // SAFETY: Caller guarantees slot is valid and occupied
299        unsafe {
300            (*slot_ptr).drop_value_in_place();
301            self.free_ptr(slot_ptr);
302        }
303    }
304
305    /// Frees a slot and returns the value without dropping it.
306    ///
307    /// Consumes the handle — the slot cannot be used after this call.
308    #[inline]
309    #[allow(clippy::needless_pass_by_value)]
310    pub fn take(&self, slot: Slot<T>) -> T {
311        let slot_ptr = slot.into_raw();
312        debug_assert!(
313            self.contains_ptr(slot_ptr as *const ()),
314            "slot was not allocated from this slab"
315        );
316        // SAFETY: Caller guarantees slot is valid and occupied
317        unsafe {
318            let value = (*slot_ptr).read_value();
319            self.free_ptr(slot_ptr);
320            value
321        }
322    }
323
324    /// Returns a slot to the freelist by pointer.
325    ///
326    /// Does NOT drop the value — caller must drop before calling.
327    ///
328    /// # Safety
329    ///
330    /// - `slot_ptr` must point to a slot within this slab
331    /// - Value must already be dropped or moved out
332    #[doc(hidden)]
333    #[inline]
334    pub(crate) unsafe fn free_ptr(&self, slot_ptr: *mut SlotCell<T>) {
335        debug_assert!(
336            self.contains_ptr(slot_ptr as *const ()),
337            "slot was not allocated from this slab"
338        );
339        let free_head = self.free_head.get();
340        // SAFETY: Caller guarantees slot_ptr is valid, transitioning to vacant
341        unsafe {
342            (*slot_ptr).set_next_free(free_head);
343        }
344        self.free_head.set(slot_ptr);
345    }
346}
347
348impl<T> fmt::Debug for Slab<T> {
349    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
350        f.debug_struct("Slab")
351            .field("capacity", &self.capacity)
352            .finish()
353    }
354}
355
356// =============================================================================
357// Tests
358// =============================================================================
359
360#[cfg(test)]
361mod tests {
362    use super::*;
363    use std::borrow::{Borrow, BorrowMut};
364
365    #[test]
366    fn slab_basic() {
367        let slab = unsafe { Slab::<u64>::with_capacity(100) };
368        assert_eq!(slab.capacity(), 100);
369
370        let slot = slab.alloc(42);
371        assert_eq!(*slot, 42);
372        slab.free(slot);
373    }
374
375    #[test]
376    fn slab_full() {
377        let slab = unsafe { Slab::<u64>::with_capacity(2) };
378        let s1 = slab.alloc(1);
379        let s2 = slab.alloc(2);
380
381        let result = slab.try_alloc(3);
382        assert!(result.is_err());
383        let recovered = result.unwrap_err().into_inner();
384        assert_eq!(recovered, 3);
385
386        slab.free(s1);
387        slab.free(s2);
388    }
389
390    #[test]
391    fn slot_deref_mut() {
392        let slab = unsafe { Slab::<String>::with_capacity(10) };
393        let mut slot = slab.alloc("hello".to_string());
394        slot.push_str(" world");
395        assert_eq!(&*slot, "hello world");
396        slab.free(slot);
397    }
398
399    #[test]
400    fn slot_dealloc_take() {
401        let slab = unsafe { Slab::<String>::with_capacity(10) };
402        let slot = slab.alloc("hello".to_string());
403
404        let value = slab.take(slot);
405        assert_eq!(value, "hello");
406    }
407
408    #[test]
409    fn slot_size() {
410        assert_eq!(std::mem::size_of::<Slot<u64>>(), 8);
411    }
412
413    #[test]
414    fn slab_debug() {
415        let slab = unsafe { Slab::<u64>::with_capacity(10) };
416        let s = slab.alloc(42);
417        let debug = format!("{:?}", slab);
418        assert!(debug.contains("Slab"));
419        assert!(debug.contains("capacity"));
420        slab.free(s);
421    }
422
423    #[test]
424    fn borrow_traits() {
425        let slab = unsafe { Slab::<u64>::with_capacity(10) };
426        let mut slot = slab.alloc(42);
427
428        let borrowed: &u64 = slot.borrow();
429        assert_eq!(*borrowed, 42);
430
431        let borrowed_mut: &mut u64 = slot.borrow_mut();
432        *borrowed_mut = 100;
433        assert_eq!(*slot, 100);
434
435        slab.free(slot);
436    }
437
438    #[cfg(debug_assertions)]
439    #[test]
440    fn slot_debug_drop_panics() {
441        let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
442            let slab = unsafe { Slab::<u64>::with_capacity(10) };
443            let _slot = slab.alloc(42u64);
444            // slot drops here without being freed
445        }));
446        assert!(result.is_err(), "Slot should panic on drop in debug mode");
447    }
448
449    #[test]
450    fn capacity_one() {
451        let slab = unsafe { Slab::<u64>::with_capacity(1) };
452
453        assert_eq!(slab.capacity(), 1);
454
455        let slot = slab.alloc(42);
456        assert!(slab.try_alloc(100).is_err());
457
458        slab.free(slot);
459
460        let slot2 = slab.alloc(100);
461        assert_eq!(*slot2, 100);
462        slab.free(slot2);
463    }
464}