nexus_slab/bounded.rs
1//! Fixed-capacity slab allocator.
2//!
3//! This module provides a bounded (fixed-capacity) slab allocator.
4//!
5//! # Example
6//!
7//! ```
8//! use nexus_slab::bounded::Slab;
9//!
10//! // SAFETY: caller guarantees slab contract (see struct docs)
11//! let slab = unsafe { Slab::with_capacity(1024) };
12//! let slot = slab.alloc(42u64);
13//! assert_eq!(*slot, 42);
14//! slab.free(slot);
15//! ```
16
17use core::cell::Cell;
18use core::fmt;
19use core::mem;
20use core::ptr;
21
22use alloc::vec::Vec;
23
24use crate::shared::{Full, Slot, SlotCell};
25
26// =============================================================================
27// Claim
28// =============================================================================
29
30/// A claimed slot that has not yet been written to.
31///
32/// Created by [`Slab::claim()`]. Must be consumed via [`write()`](Self::write)
33/// to complete the allocation. If dropped without calling `write()`, the slot
34/// is returned to the freelist.
35///
36/// The `write()` method is `#[inline]`, enabling the compiler to potentially
37/// optimize the value write as a placement new (constructing directly into
38/// the slot memory).
39pub struct Claim<'a, T> {
40 slot_ptr: *mut SlotCell<T>,
41 slab: &'a Slab<T>,
42}
43
44impl<T> Claim<'_, T> {
45 /// Writes the value to the claimed slot and returns the [`Slot`] handle.
46 ///
47 /// This consumes the claim. The value is written directly to the slot's
48 /// memory, which may enable placement new optimization.
49 #[inline]
50 pub fn write(self, value: T) -> Slot<T> {
51 let slot_ptr = self.slot_ptr;
52 // SAFETY: We own this slot from claim(), it's valid and vacant
53 unsafe {
54 (*slot_ptr).write_value(value);
55 }
56 // Don't run Drop - we're completing the allocation
57 mem::forget(self);
58 // SAFETY: slot_ptr is valid and now occupied
59 unsafe { Slot::from_ptr(slot_ptr) }
60 }
61}
62
63impl<T> fmt::Debug for Claim<'_, T> {
64 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
65 f.debug_struct("Claim")
66 .field("slot_ptr", &self.slot_ptr)
67 .finish()
68 }
69}
70
71impl<T> Drop for Claim<'_, T> {
72 fn drop(&mut self) {
73 // Abandoned claim - return slot to freelist
74 // SAFETY: slot_ptr is valid and still vacant (never written to)
75 let free_head = self.slab.free_head.get();
76 unsafe {
77 (*self.slot_ptr).set_next_free(free_head);
78 }
79 self.slab.free_head.set(self.slot_ptr);
80 }
81}
82
83// =============================================================================
84// Slab
85// =============================================================================
86
87/// Fixed-capacity slab allocator for manual memory management.
88///
89/// Construction is `unsafe` — by creating a slab, you accept the contract:
90///
91/// - **Free everything you allocate.** Dropping the slab does NOT drop
92/// values in occupied slots. Unfree'd slots leak silently.
93/// - **Free from the same slab.** Passing a [`Slot`] to a different
94/// slab's `free()` corrupts the freelist.
95/// - **Don't share across threads.** The slab is `!Send` and `!Sync`.
96///
97/// In practice, most systems have one slab per type in `thread_local!`
98/// storage. Cross-slab free is a programming error caught by `debug_assert`.
99///
100/// Uses pointer-based freelist for O(1) allocation. ~20-24 cycle operations.
101///
102/// # Const Construction
103///
104/// Supports const construction via [`new()`](Self::new) followed by
105/// runtime initialization via [`init()`](Self::init). This enables use with
106/// `thread_local!` using the `const { }` block syntax for zero-overhead TLS access.
107///
108/// ```no_run
109/// use nexus_slab::bounded::Slab;
110///
111/// struct MyType(u64);
112///
113/// // SAFETY: single slab per type, freed before thread exit
114/// thread_local! {
115/// static SLAB: Slab<MyType> = const { unsafe { Slab::new() } };
116/// }
117///
118/// // Later, at runtime:
119/// SLAB.with(|s| unsafe { s.init(1024) });
120/// ```
121///
122/// For direct usage, prefer [`with_capacity()`](Self::with_capacity).
123pub struct Slab<T> {
124 /// Slot storage. Wrapped in UnsafeCell for interior mutability.
125 slots: core::cell::UnsafeCell<Vec<SlotCell<T>>>,
126 /// Capacity. Wrapped in Cell so it can be set during init.
127 capacity: Cell<usize>,
128 /// Head of freelist - raw pointer for fast allocation.
129 /// NULL when slab is full or uninitialized.
130 pub(crate) free_head: Cell<*mut SlotCell<T>>,
131}
132
133impl<T> Slab<T> {
134 /// Creates an empty, uninitialized slab.
135 ///
136 /// This is a const function that performs no allocation. Call [`init()`](Self::init)
137 /// to allocate storage before use.
138 ///
139 /// # Safety
140 ///
141 /// See [struct-level safety contract](Self).
142 ///
143 /// # Example
144 ///
145 /// ```no_run
146 /// use nexus_slab::bounded::Slab;
147 ///
148 /// // SAFETY: single slab per type, freed before thread exit
149 /// thread_local! {
150 /// static SLAB: Slab<u64> = const { unsafe { Slab::new() } };
151 /// }
152 /// ```
153 #[inline]
154 pub const unsafe fn new() -> Self {
155 Self {
156 slots: core::cell::UnsafeCell::new(Vec::new()),
157 capacity: Cell::new(0),
158 free_head: Cell::new(ptr::null_mut()),
159 }
160 }
161
162 /// Creates a new slab with the given capacity.
163 ///
164 /// # Safety
165 ///
166 /// See [struct-level safety contract](Self).
167 ///
168 /// # Panics
169 ///
170 /// Panics if capacity is zero.
171 #[inline]
172 pub unsafe fn with_capacity(capacity: usize) -> Self {
173 // SAFETY: caller upholds the slab contract
174 let slab = unsafe { Self::new() };
175 // SAFETY: caller upholds the slab contract
176 unsafe { slab.init(capacity) };
177 slab
178 }
179
180 /// Initializes the slab with the given capacity.
181 ///
182 /// This allocates slot storage and builds the freelist. Must be called
183 /// exactly once before any allocations.
184 ///
185 /// # Safety
186 ///
187 /// See [struct-level safety contract](Self).
188 ///
189 /// # Panics
190 ///
191 /// - Panics if the slab is already initialized (capacity > 0)
192 /// - Panics if capacity is zero
193 pub unsafe fn init(&self, capacity: usize) {
194 assert!(self.capacity.get() == 0, "Slab already initialized");
195 assert!(capacity > 0, "capacity must be non-zero");
196
197 // SAFETY: We have &self and verified capacity == 0, so no other code
198 // can be accessing slots. This is the only mutation point.
199 let slots = unsafe { &mut *self.slots.get() };
200
201 // Allocate slots — all initially vacant
202 slots.reserve_exact(capacity);
203 for _ in 0..capacity {
204 slots.push(SlotCell::vacant(ptr::null_mut()));
205 }
206
207 // Wire up the freelist: each slot's next_free points to the next slot
208 for i in 0..(capacity - 1) {
209 let next_ptr = slots.as_mut_ptr().wrapping_add(i + 1);
210 // SAFETY: Slot is vacant, wiring up the freelist during init
211 unsafe { slots[i].set_next_free(next_ptr) };
212 }
213 // Last slot points to NULL (end of freelist) — already null from vacant()
214
215 let free_head = slots.as_mut_ptr();
216 self.capacity.set(capacity);
217 self.free_head.set(free_head);
218 }
219
220 /// Returns true if the slab has been initialized.
221 #[inline]
222 pub fn is_initialized(&self) -> bool {
223 self.capacity.get() > 0
224 }
225
226 /// Returns the capacity.
227 #[inline]
228 pub fn capacity(&self) -> usize {
229 self.capacity.get()
230 }
231
232 /// Returns the base pointer to the slots array.
233 #[inline]
234 pub(crate) fn slots_ptr(&self) -> *mut SlotCell<T> {
235 // SAFETY: We're returning a pointer for use with raw pointer access
236 let slots = unsafe { &*self.slots.get() };
237 slots.as_ptr().cast_mut()
238 }
239
240 /// Returns `true` if `ptr` falls within this slab's slot array.
241 ///
242 /// O(1) range check. Used in `debug_assert!` to validate provenance.
243 #[doc(hidden)]
244 #[inline]
245 pub fn contains_ptr(&self, ptr: *const ()) -> bool {
246 let base = self.slots_ptr() as usize;
247 let end = base + self.capacity.get() * core::mem::size_of::<SlotCell<T>>();
248 let addr = ptr as usize;
249 addr >= base && addr < end
250 }
251
252 // =========================================================================
253 // Allocation API
254 // =========================================================================
255
256 /// Claims a slot from the freelist without writing a value.
257 ///
258 /// Returns `None` if the slab is full. The returned [`Claim`] must be
259 /// consumed via [`Claim::write()`] to complete the allocation.
260 ///
261 /// This two-phase allocation enables placement new optimization: the
262 /// value can be constructed directly into the slot memory.
263 ///
264 /// # Example
265 ///
266 /// ```
267 /// use nexus_slab::bounded::Slab;
268 ///
269 /// // SAFETY: caller guarantees slab contract (see struct docs)
270 /// let slab = unsafe { Slab::with_capacity(10) };
271 /// if let Some(claim) = slab.claim() {
272 /// let slot = claim.write(42u64);
273 /// assert_eq!(*slot, 42);
274 /// slab.free(slot);
275 /// }
276 /// ```
277 #[inline]
278 pub fn claim(&self) -> Option<Claim<'_, T>> {
279 self.claim_ptr().map(|slot_ptr| Claim {
280 slot_ptr,
281 slab: self,
282 })
283 }
284
285 /// Claims a slot from the freelist, returning the raw pointer.
286 ///
287 /// Returns `None` if the slab is full. This is a low-level API for
288 /// macro-generated code that needs to escape TLS closures.
289 ///
290 /// # Safety Contract
291 ///
292 /// The caller MUST either:
293 /// - Write a value to the slot and use it as an allocated slot, OR
294 /// - Return the pointer to the freelist via `free_ptr()` if abandoning
295 #[doc(hidden)]
296 #[inline]
297 pub(crate) fn claim_ptr(&self) -> Option<*mut SlotCell<T>> {
298 let slot_ptr = self.free_head.get();
299
300 if slot_ptr.is_null() {
301 return None;
302 }
303
304 // SAFETY: slot_ptr came from the freelist within this slab.
305 // The slot is vacant, so next_free is the active union field.
306 let next_free = unsafe { (*slot_ptr).get_next_free() };
307
308 // Update freelist head
309 self.free_head.set(next_free);
310
311 Some(slot_ptr)
312 }
313
314 /// Allocates a slot and writes the value.
315 ///
316 /// # Panics
317 ///
318 /// Panics if the slab is full.
319 #[inline]
320 pub fn alloc(&self, value: T) -> Slot<T> {
321 self.claim().expect("slab full").write(value)
322 }
323
324 /// Tries to allocate a slot and write the value.
325 ///
326 /// Returns `Err(Full(value))` if the slab is at capacity.
327 #[inline]
328 pub fn try_alloc(&self, value: T) -> Result<Slot<T>, Full<T>> {
329 match self.claim() {
330 Some(claim) => Ok(claim.write(value)),
331 None => Err(Full(value)),
332 }
333 }
334
335 /// Frees a slot, dropping the value and returning storage to the freelist.
336 ///
337 /// Consumes the handle — the slot cannot be used after this call.
338 /// The caller's safety obligation (free from the correct slab) was
339 /// accepted at construction time.
340 #[inline]
341 #[allow(clippy::needless_pass_by_value)]
342 pub fn free(&self, slot: Slot<T>) {
343 let slot_ptr = slot.into_raw();
344 debug_assert!(
345 self.contains_ptr(slot_ptr as *const ()),
346 "slot was not allocated from this slab"
347 );
348 // SAFETY: Caller guarantees slot is valid and occupied
349 unsafe {
350 (*slot_ptr).drop_value_in_place();
351 self.free_ptr(slot_ptr);
352 }
353 }
354
355 /// Frees a slot and returns the value without dropping it.
356 ///
357 /// Consumes the handle — the slot cannot be used after this call.
358 #[inline]
359 #[allow(clippy::needless_pass_by_value)]
360 pub fn take(&self, slot: Slot<T>) -> T {
361 let slot_ptr = slot.into_raw();
362 debug_assert!(
363 self.contains_ptr(slot_ptr as *const ()),
364 "slot was not allocated from this slab"
365 );
366 // SAFETY: Caller guarantees slot is valid and occupied
367 unsafe {
368 let value = (*slot_ptr).read_value();
369 self.free_ptr(slot_ptr);
370 value
371 }
372 }
373
374 /// Returns a slot to the freelist by pointer.
375 ///
376 /// Does NOT drop the value — caller must drop before calling.
377 ///
378 /// # Safety
379 ///
380 /// - `slot_ptr` must point to a slot within this slab
381 /// - Value must already be dropped or moved out
382 #[doc(hidden)]
383 #[inline]
384 pub(crate) unsafe fn free_ptr(&self, slot_ptr: *mut SlotCell<T>) {
385 debug_assert!(
386 self.contains_ptr(slot_ptr as *const ()),
387 "slot was not allocated from this slab"
388 );
389 let free_head = self.free_head.get();
390 // SAFETY: Caller guarantees slot_ptr is valid, transitioning to vacant
391 unsafe {
392 (*slot_ptr).set_next_free(free_head);
393 }
394 self.free_head.set(slot_ptr);
395 }
396}
397
398impl<T> fmt::Debug for Slab<T> {
399 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
400 f.debug_struct("Slab")
401 .field("capacity", &self.capacity.get())
402 .finish()
403 }
404}
405
406// =============================================================================
407// Tests
408// =============================================================================
409
410#[cfg(test)]
411mod tests {
412 use super::*;
413 use std::borrow::{Borrow, BorrowMut};
414
415 #[test]
416 fn slab_basic() {
417 let slab = unsafe { Slab::<u64>::with_capacity(100) };
418 assert_eq!(slab.capacity(), 100);
419
420 let slot = slab.alloc(42);
421 assert_eq!(*slot, 42);
422 slab.free(slot);
423 }
424
425 #[test]
426 fn slab_full() {
427 let slab = unsafe { Slab::<u64>::with_capacity(2) };
428 let s1 = slab.alloc(1);
429 let s2 = slab.alloc(2);
430
431 let result = slab.try_alloc(3);
432 assert!(result.is_err());
433 let recovered = result.unwrap_err().into_inner();
434 assert_eq!(recovered, 3);
435
436 slab.free(s1);
437 slab.free(s2);
438 }
439
440 #[test]
441 fn slot_deref_mut() {
442 let slab = unsafe { Slab::<String>::with_capacity(10) };
443 let mut slot = slab.alloc("hello".to_string());
444 slot.push_str(" world");
445 assert_eq!(&*slot, "hello world");
446 slab.free(slot);
447 }
448
449 #[test]
450 fn slot_dealloc_take() {
451 let slab = unsafe { Slab::<String>::with_capacity(10) };
452 let slot = slab.alloc("hello".to_string());
453
454 let value = slab.take(slot);
455 assert_eq!(value, "hello");
456 }
457
458 #[test]
459 fn slot_size() {
460 assert_eq!(std::mem::size_of::<Slot<u64>>(), 8);
461 }
462
463 #[test]
464 fn slab_debug() {
465 let slab = unsafe { Slab::<u64>::with_capacity(10) };
466 let s = slab.alloc(42);
467 let debug = format!("{:?}", slab);
468 assert!(debug.contains("Slab"));
469 assert!(debug.contains("capacity"));
470 slab.free(s);
471 }
472
473 #[test]
474 fn borrow_traits() {
475 let slab = unsafe { Slab::<u64>::with_capacity(10) };
476 let mut slot = slab.alloc(42);
477
478 let borrowed: &u64 = slot.borrow();
479 assert_eq!(*borrowed, 42);
480
481 let borrowed_mut: &mut u64 = slot.borrow_mut();
482 *borrowed_mut = 100;
483 assert_eq!(*slot, 100);
484
485 slab.free(slot);
486 }
487
488 #[cfg(debug_assertions)]
489 #[test]
490 fn slot_debug_drop_panics() {
491 let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
492 let slab = unsafe { Slab::<u64>::with_capacity(10) };
493 let _slot = slab.alloc(42u64);
494 // slot drops here without being freed
495 }));
496 assert!(result.is_err(), "Slot should panic on drop in debug mode");
497 }
498
499 #[test]
500 fn capacity_one() {
501 let slab = unsafe { Slab::<u64>::with_capacity(1) };
502
503 assert_eq!(slab.capacity(), 1);
504
505 let slot = slab.alloc(42);
506 assert!(slab.try_alloc(100).is_err());
507
508 slab.free(slot);
509
510 let slot2 = slab.alloc(100);
511 assert_eq!(*slot2, 100);
512 slab.free(slot2);
513 }
514}