Skip to main content

nexus_slab/
bounded.rs

1//! Fixed-capacity slab allocator.
2//!
3//! This module provides a bounded (fixed-capacity) slab allocator.
4//!
5//! # Example
6//!
7//! ```
8//! use nexus_slab::bounded::Slab;
9//!
10//! // SAFETY: caller guarantees slab contract (see struct docs)
11//! let slab = unsafe { Slab::with_capacity(1024) };
12//! let slot = slab.alloc(42u64);
13//! assert_eq!(*slot, 42);
14//! slab.free(slot);
15//! ```
16
17use core::cell::Cell;
18use core::fmt;
19use core::mem;
20use core::ptr;
21
22use alloc::vec::Vec;
23
24use crate::shared::{Full, Slot, SlotCell};
25
26// =============================================================================
27// Claim
28// =============================================================================
29
30/// A claimed slot that has not yet been written to.
31///
32/// Created by [`Slab::claim()`]. Must be consumed via [`write()`](Self::write)
33/// to complete the allocation. If dropped without calling `write()`, the slot
34/// is returned to the freelist.
35///
36/// The `write()` method is `#[inline]`, enabling the compiler to potentially
37/// optimize the value write as a placement new (constructing directly into
38/// the slot memory).
39pub struct Claim<'a, T> {
40    slot_ptr: *mut SlotCell<T>,
41    slab: &'a Slab<T>,
42}
43
44impl<T> Claim<'_, T> {
45    /// Writes the value to the claimed slot and returns the [`Slot`] handle.
46    ///
47    /// This consumes the claim. The value is written directly to the slot's
48    /// memory, which may enable placement new optimization.
49    #[inline]
50    pub fn write(self, value: T) -> Slot<T> {
51        let slot_ptr = self.slot_ptr;
52        // SAFETY: We own this slot from claim(), it's valid and vacant
53        unsafe {
54            (*slot_ptr).write_value(value);
55        }
56        // Don't run Drop - we're completing the allocation
57        mem::forget(self);
58        // SAFETY: slot_ptr is valid and now occupied
59        unsafe { Slot::from_ptr(slot_ptr) }
60    }
61
62    /// Extract the raw slot pointer, consuming the claim without writing.
63    ///
64    /// Transfers ownership to the caller — the slot will NOT be returned
65    /// to the freelist on drop. The caller must either write a value and
66    /// eventually free it, or return the slot via `free_ptr()`.
67    #[inline]
68    pub(crate) fn into_ptr(self) -> *mut SlotCell<T> {
69        let ptr = self.slot_ptr;
70        mem::forget(self);
71        ptr
72    }
73}
74
75impl<T> fmt::Debug for Claim<'_, T> {
76    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
77        f.debug_struct("Claim")
78            .field("slot_ptr", &self.slot_ptr)
79            .finish()
80    }
81}
82
83impl<T> Drop for Claim<'_, T> {
84    fn drop(&mut self) {
85        // Abandoned claim - return slot to freelist
86        // SAFETY: slot_ptr is valid and still vacant (never written to)
87        let free_head = self.slab.free_head.get();
88        unsafe {
89            (*self.slot_ptr).set_next_free(free_head);
90        }
91        self.slab.free_head.set(self.slot_ptr);
92    }
93}
94
95// =============================================================================
96// Slab
97// =============================================================================
98
99/// Fixed-capacity slab allocator for manual memory management.
100///
101/// Uses pointer-based freelist for O(1) allocation. ~20-24 cycle operations.
102///
103/// # Safety Contract
104///
105/// Construction is `unsafe` because it opts you into manual memory
106/// management. By creating a slab, you accept these invariants:
107///
108/// - **Free from the correct slab.** Passing a [`Slot`] to a different
109///   slab's `free()` is undefined behavior — it corrupts the freelist.
110///   In debug builds, this is caught by `debug_assert!`.
111/// - **Free everything you allocate.** Dropping the slab does NOT drop
112///   values in occupied slots. Unfreed slots leak silently.
113/// - **Single-threaded.** The slab is `!Send` and `!Sync`.
114///
115/// ## Why `free()` is safe
116///
117/// The safety contract is accepted once, at construction. After that:
118/// - [`Slot`] is move-only (no `Copy`, no `Clone`) — double-free is
119///   prevented by the type system.
120/// - `free()` consumes the `Slot` — the handle cannot be used after.
121/// - Cross-slab misuse is the only remaining hazard, and it was
122///   accepted as the caller's responsibility at construction time.
123pub struct Slab<T> {
124    /// Slot storage. Wrapped in UnsafeCell for interior mutability.
125    slots: core::cell::UnsafeCell<Vec<SlotCell<T>>>,
126    /// Fixed capacity, set at construction.
127    capacity: usize,
128    /// Head of freelist — raw pointer for fast allocation.
129    /// NULL when the slab is full.
130    pub(crate) free_head: Cell<*mut SlotCell<T>>,
131}
132
133impl<T> Slab<T> {
134    /// Creates a new slab with the given capacity.
135    ///
136    /// # Safety
137    ///
138    /// See [struct-level safety contract](Self).
139    ///
140    /// # Panics
141    ///
142    /// Panics if capacity is zero.
143    #[inline]
144    pub unsafe fn with_capacity(capacity: usize) -> Self {
145        assert!(capacity > 0, "capacity must be non-zero");
146
147        let mut slots = Vec::with_capacity(capacity);
148        for _ in 0..capacity {
149            slots.push(SlotCell::vacant(ptr::null_mut()));
150        }
151
152        // Wire up the freelist: each slot's next_free points to the next slot
153        for i in 0..(capacity - 1) {
154            let next_ptr = slots.as_mut_ptr().wrapping_add(i + 1);
155            // SAFETY: Slot is vacant, wiring up the freelist during init
156            unsafe { slots[i].set_next_free(next_ptr) };
157        }
158        // Last slot points to NULL (end of freelist) — already null from vacant()
159
160        let free_head = slots.as_mut_ptr();
161
162        Self {
163            slots: core::cell::UnsafeCell::new(slots),
164            capacity,
165            free_head: Cell::new(free_head),
166        }
167    }
168
169    /// Returns the capacity.
170    #[inline]
171    pub fn capacity(&self) -> usize {
172        self.capacity
173    }
174
175    /// Returns the base pointer to the slots array.
176    #[inline]
177    pub(crate) fn slots_ptr(&self) -> *mut SlotCell<T> {
178        // SAFETY: We're returning a pointer for use with raw pointer access
179        let slots = unsafe { &*self.slots.get() };
180        slots.as_ptr().cast_mut()
181    }
182
183    /// Returns `true` if `ptr` falls within this slab's slot array.
184    ///
185    /// O(1) range check. Used in `debug_assert!` to validate provenance.
186    #[doc(hidden)]
187    #[inline]
188    pub fn contains_ptr(&self, ptr: *const ()) -> bool {
189        let base = self.slots_ptr() as usize;
190        let end = base + self.capacity * core::mem::size_of::<SlotCell<T>>();
191        let addr = ptr as usize;
192        addr >= base && addr < end
193    }
194
195    // =========================================================================
196    // Allocation API
197    // =========================================================================
198
199    /// Claims a slot from the freelist without writing a value.
200    ///
201    /// Returns `None` if the slab is full. The returned [`Claim`] must be
202    /// consumed via [`Claim::write()`] to complete the allocation.
203    ///
204    /// This two-phase allocation enables placement new optimization: the
205    /// value can be constructed directly into the slot memory.
206    ///
207    /// # Example
208    ///
209    /// ```
210    /// use nexus_slab::bounded::Slab;
211    ///
212    /// // SAFETY: caller guarantees slab contract (see struct docs)
213    /// let slab = unsafe { Slab::with_capacity(10) };
214    /// if let Some(claim) = slab.claim() {
215    ///     let slot = claim.write(42u64);
216    ///     assert_eq!(*slot, 42);
217    ///     slab.free(slot);
218    /// }
219    /// ```
220    #[inline]
221    pub fn claim(&self) -> Option<Claim<'_, T>> {
222        self.claim_ptr().map(|slot_ptr| Claim {
223            slot_ptr,
224            slab: self,
225        })
226    }
227
228    /// Claims a slot from the freelist, returning the raw pointer.
229    ///
230    /// Returns `None` if the slab is full. This is a low-level API for
231    /// macro-generated code that needs to escape TLS closures.
232    ///
233    /// # Safety Contract
234    ///
235    /// The caller MUST either:
236    /// - Write a value to the slot and use it as an allocated slot, OR
237    /// - Return the pointer to the freelist via `free_ptr()` if abandoning
238    #[doc(hidden)]
239    #[inline]
240    pub(crate) fn claim_ptr(&self) -> Option<*mut SlotCell<T>> {
241        let slot_ptr = self.free_head.get();
242
243        if slot_ptr.is_null() {
244            return None;
245        }
246
247        // SAFETY: slot_ptr came from the freelist within this slab.
248        // The slot is vacant, so next_free is the active union field.
249        let next_free = unsafe { (*slot_ptr).get_next_free() };
250
251        // Update freelist head
252        self.free_head.set(next_free);
253
254        Some(slot_ptr)
255    }
256
257    /// Allocates a slot and writes the value.
258    ///
259    /// # Panics
260    ///
261    /// Panics if the slab is full.
262    #[inline]
263    pub fn alloc(&self, value: T) -> Slot<T> {
264        self.claim().expect("slab full").write(value)
265    }
266
267    /// Tries to allocate a slot and write the value.
268    ///
269    /// Returns `Err(Full(value))` if the slab is at capacity.
270    #[inline]
271    pub fn try_alloc(&self, value: T) -> Result<Slot<T>, Full<T>> {
272        match self.claim() {
273            Some(claim) => Ok(claim.write(value)),
274            None => Err(Full(value)),
275        }
276    }
277
278    /// Frees a slot, dropping the value and returning storage to the freelist.
279    ///
280    /// Consumes the handle — the slot cannot be used after this call.
281    /// The caller's safety obligation (free from the correct slab) was
282    /// accepted at construction time.
283    #[inline]
284    #[allow(clippy::needless_pass_by_value)]
285    pub fn free(&self, slot: Slot<T>) {
286        let slot_ptr = slot.into_raw();
287        debug_assert!(
288            self.contains_ptr(slot_ptr as *const ()),
289            "slot was not allocated from this slab"
290        );
291        // SAFETY: Caller guarantees slot is valid and occupied
292        unsafe {
293            (*slot_ptr).drop_value_in_place();
294            self.free_ptr(slot_ptr);
295        }
296    }
297
298    /// Frees a slot and returns the value without dropping it.
299    ///
300    /// Consumes the handle — the slot cannot be used after this call.
301    #[inline]
302    #[allow(clippy::needless_pass_by_value)]
303    pub fn take(&self, slot: Slot<T>) -> T {
304        let slot_ptr = slot.into_raw();
305        debug_assert!(
306            self.contains_ptr(slot_ptr as *const ()),
307            "slot was not allocated from this slab"
308        );
309        // SAFETY: Caller guarantees slot is valid and occupied
310        unsafe {
311            let value = (*slot_ptr).read_value();
312            self.free_ptr(slot_ptr);
313            value
314        }
315    }
316
317    /// Returns a slot to the freelist by pointer.
318    ///
319    /// Does NOT drop the value — caller must drop before calling.
320    ///
321    /// # Safety
322    ///
323    /// - `slot_ptr` must point to a slot within this slab
324    /// - Value must already be dropped or moved out
325    #[doc(hidden)]
326    #[inline]
327    pub(crate) unsafe fn free_ptr(&self, slot_ptr: *mut SlotCell<T>) {
328        debug_assert!(
329            self.contains_ptr(slot_ptr as *const ()),
330            "slot was not allocated from this slab"
331        );
332        let free_head = self.free_head.get();
333        // SAFETY: Caller guarantees slot_ptr is valid, transitioning to vacant
334        unsafe {
335            (*slot_ptr).set_next_free(free_head);
336        }
337        self.free_head.set(slot_ptr);
338    }
339}
340
341impl<T> fmt::Debug for Slab<T> {
342    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
343        f.debug_struct("Slab")
344            .field("capacity", &self.capacity)
345            .finish()
346    }
347}
348
349// =============================================================================
350// Tests
351// =============================================================================
352
353#[cfg(test)]
354mod tests {
355    use super::*;
356    use std::borrow::{Borrow, BorrowMut};
357
358    #[test]
359    fn slab_basic() {
360        let slab = unsafe { Slab::<u64>::with_capacity(100) };
361        assert_eq!(slab.capacity(), 100);
362
363        let slot = slab.alloc(42);
364        assert_eq!(*slot, 42);
365        slab.free(slot);
366    }
367
368    #[test]
369    fn slab_full() {
370        let slab = unsafe { Slab::<u64>::with_capacity(2) };
371        let s1 = slab.alloc(1);
372        let s2 = slab.alloc(2);
373
374        let result = slab.try_alloc(3);
375        assert!(result.is_err());
376        let recovered = result.unwrap_err().into_inner();
377        assert_eq!(recovered, 3);
378
379        slab.free(s1);
380        slab.free(s2);
381    }
382
383    #[test]
384    fn slot_deref_mut() {
385        let slab = unsafe { Slab::<String>::with_capacity(10) };
386        let mut slot = slab.alloc("hello".to_string());
387        slot.push_str(" world");
388        assert_eq!(&*slot, "hello world");
389        slab.free(slot);
390    }
391
392    #[test]
393    fn slot_dealloc_take() {
394        let slab = unsafe { Slab::<String>::with_capacity(10) };
395        let slot = slab.alloc("hello".to_string());
396
397        let value = slab.take(slot);
398        assert_eq!(value, "hello");
399    }
400
401    #[test]
402    fn slot_size() {
403        assert_eq!(std::mem::size_of::<Slot<u64>>(), 8);
404    }
405
406    #[test]
407    fn slab_debug() {
408        let slab = unsafe { Slab::<u64>::with_capacity(10) };
409        let s = slab.alloc(42);
410        let debug = format!("{:?}", slab);
411        assert!(debug.contains("Slab"));
412        assert!(debug.contains("capacity"));
413        slab.free(s);
414    }
415
416    #[test]
417    fn borrow_traits() {
418        let slab = unsafe { Slab::<u64>::with_capacity(10) };
419        let mut slot = slab.alloc(42);
420
421        let borrowed: &u64 = slot.borrow();
422        assert_eq!(*borrowed, 42);
423
424        let borrowed_mut: &mut u64 = slot.borrow_mut();
425        *borrowed_mut = 100;
426        assert_eq!(*slot, 100);
427
428        slab.free(slot);
429    }
430
431    #[cfg(debug_assertions)]
432    #[test]
433    fn slot_debug_drop_panics() {
434        let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
435            let slab = unsafe { Slab::<u64>::with_capacity(10) };
436            let _slot = slab.alloc(42u64);
437            // slot drops here without being freed
438        }));
439        assert!(result.is_err(), "Slot should panic on drop in debug mode");
440    }
441
442    #[test]
443    fn capacity_one() {
444        let slab = unsafe { Slab::<u64>::with_capacity(1) };
445
446        assert_eq!(slab.capacity(), 1);
447
448        let slot = slab.alloc(42);
449        assert!(slab.try_alloc(100).is_err());
450
451        slab.free(slot);
452
453        let slot2 = slab.alloc(100);
454        assert_eq!(*slot2, 100);
455        slab.free(slot2);
456    }
457}