nexus_slab/unbounded.rs
1//! Growable slab allocator.
2//!
3//! This module provides an unbounded (growable) slab allocator.
4//! Growth happens by adding independent chunks — no copying.
5//!
6//! # Example
7//!
8//! ```
9//! use nexus_slab::unbounded::Slab;
10//!
11//! let slab = Slab::with_chunk_capacity(4096);
12//! let slot = slab.alloc(42u64);
13//! assert_eq!(*slot, 42);
14//! // SAFETY: slot was allocated from this slab
15//! unsafe { slab.free(slot) };
16//! ```
17
18use std::cell::Cell;
19use std::fmt;
20use std::mem::{self, ManuallyDrop, MaybeUninit};
21use std::ptr;
22
23use crate::bounded::Slab as BoundedSlab;
24use crate::shared::{RawSlot, SlotCell};
25
26// =============================================================================
27// Claim
28// =============================================================================
29
30/// A claimed slot that has not yet been written to.
31///
32/// Created by [`Slab::claim()`]. Must be consumed via [`write()`](Self::write)
33/// to complete the allocation. If dropped without calling `write()`, the slot
34/// is returned to the freelist.
35///
36/// The `write()` method is `#[inline]`, enabling the compiler to potentially
37/// optimize the value write as a placement new (constructing directly into
38/// the slot memory).
39pub struct Claim<'a, T> {
40 slot_ptr: *mut SlotCell<T>,
41 slab: &'a Slab<T>,
42 chunk_idx: usize,
43}
44
45impl<T> Claim<'_, T> {
46 /// Writes the value to the claimed slot and returns the [`RawSlot`] handle.
47 ///
48 /// This consumes the claim. The value is written directly to the slot's
49 /// memory, which may enable placement new optimization.
50 #[inline]
51 pub fn write(self, value: T) -> RawSlot<T> {
52 let slot_ptr = self.slot_ptr;
53 // SAFETY: We own this slot from claim(), it's valid and vacant
54 unsafe {
55 (*slot_ptr).value = ManuallyDrop::new(MaybeUninit::new(value));
56 }
57 // Don't run Drop - we're completing the allocation
58 mem::forget(self);
59 // SAFETY: slot_ptr is valid and now occupied
60 unsafe { RawSlot::from_ptr(slot_ptr) }
61 }
62}
63
64impl<T> Drop for Claim<'_, T> {
65 fn drop(&mut self) {
66 // Abandoned claim - return slot to the correct chunk's freelist
67 let chunk = self.slab.chunk(self.chunk_idx);
68 let chunk_slab = &*chunk.inner;
69
70 let free_head = chunk_slab.free_head.get();
71 let was_full = free_head.is_null();
72
73 // SAFETY: slot_ptr is valid and still vacant (never written to)
74 unsafe {
75 (*self.slot_ptr).next_free = free_head;
76 }
77 chunk_slab.free_head.set(self.slot_ptr);
78
79 // If chunk was full, add it back to the available-space list
80 if was_full {
81 chunk.next_with_space.set(self.slab.head_with_space.get());
82 self.slab.head_with_space.set(self.chunk_idx);
83 }
84 }
85}
86
87// =============================================================================
88// Constants
89// =============================================================================
90
91/// Sentinel for chunk freelist
92const CHUNK_NONE: usize = usize::MAX;
93
94// =============================================================================
95// ChunkEntry
96// =============================================================================
97
98/// Internal wrapper for a chunk in the growable slab.
99struct ChunkEntry<T> {
100 inner: Box<BoundedSlab<T>>,
101 next_with_space: Cell<usize>,
102}
103
104// =============================================================================
105// Slab
106// =============================================================================
107
108/// Growable slab allocator.
109///
110/// Uses independent chunks for growth — no copying when the slab grows.
111///
112/// # Const Construction
113///
114/// Supports const construction via [`new()`](Self::new) followed by
115/// runtime initialization via [`init()`](Self::init). This enables use with
116/// `thread_local!` using the `const { }` block syntax for zero-overhead TLS access.
117///
118/// ```ignore
119/// thread_local! {
120/// static SLAB: Slab<MyType> = const { Slab::new() };
121/// }
122///
123/// // Later, at runtime:
124/// SLAB.with(|s| s.init(4096));
125/// ```
126///
127/// For direct usage, prefer [`with_chunk_capacity()`](Self::with_chunk_capacity).
128pub struct Slab<T> {
129 chunks: std::cell::UnsafeCell<Vec<ChunkEntry<T>>>,
130 chunk_capacity: Cell<usize>,
131 head_with_space: Cell<usize>,
132}
133
134impl<T> Slab<T> {
135 /// Creates an empty, uninitialized slab.
136 ///
137 /// This is a const function that performs no allocation. Call [`init()`](Self::init)
138 /// to configure chunk capacity before use.
139 ///
140 /// For direct usage, prefer [`with_chunk_capacity()`](Self::with_chunk_capacity).
141 ///
142 /// # Example
143 ///
144 /// ```ignore
145 /// // For use with thread_local! const initialization
146 /// thread_local! {
147 /// static SLAB: Slab<u64> = const { Slab::new() };
148 /// }
149 /// ```
150 #[inline]
151 pub const fn new() -> Self {
152 Self {
153 chunks: std::cell::UnsafeCell::new(Vec::new()),
154 chunk_capacity: Cell::new(0),
155 head_with_space: Cell::new(CHUNK_NONE),
156 }
157 }
158
159 /// Creates a new slab with the given chunk capacity.
160 ///
161 /// Chunks are allocated on-demand when slots are requested.
162 ///
163 /// # Panics
164 ///
165 /// Panics if chunk_capacity is zero.
166 #[inline]
167 pub fn with_chunk_capacity(chunk_capacity: usize) -> Self {
168 let slab = Self::new();
169 slab.init(chunk_capacity);
170 slab
171 }
172
173 /// Initializes the slab with the given chunk capacity.
174 ///
175 /// This configures the chunk parameters. Chunks are allocated on-demand
176 /// when slots are requested. Must be called exactly once before any allocations.
177 ///
178 /// # Panics
179 ///
180 /// - Panics if the slab is already initialized (chunk_capacity > 0)
181 /// - Panics if chunk_capacity is zero
182 pub fn init(&self, chunk_capacity: usize) {
183 assert!(self.chunk_capacity.get() == 0, "Slab already initialized");
184 assert!(chunk_capacity > 0, "chunk_capacity must be non-zero");
185
186 self.chunk_capacity.set(chunk_capacity);
187 }
188
189 /// Returns true if the slab has been initialized.
190 #[inline]
191 pub fn is_initialized(&self) -> bool {
192 self.chunk_capacity.get() > 0
193 }
194
195 /// Returns the total capacity across all chunks.
196 #[inline]
197 pub fn capacity(&self) -> usize {
198 self.chunks().len() * self.chunk_capacity.get()
199 }
200
201 /// Returns the chunk capacity.
202 #[inline]
203 pub fn chunk_capacity(&self) -> usize {
204 self.chunk_capacity.get()
205 }
206
207 #[inline]
208 fn chunks(&self) -> &Vec<ChunkEntry<T>> {
209 // SAFETY: Single-threaded access guaranteed by !Send
210 unsafe { &*self.chunks.get() }
211 }
212
213 #[inline]
214 #[allow(clippy::mut_from_ref)]
215 fn chunks_mut(&self) -> &mut Vec<ChunkEntry<T>> {
216 // SAFETY: Single-threaded access guaranteed by !Send
217 unsafe { &mut *self.chunks.get() }
218 }
219
220 fn chunk(&self, chunk_idx: usize) -> &ChunkEntry<T> {
221 let chunks = self.chunks();
222 debug_assert!(chunk_idx < chunks.len());
223 unsafe { chunks.get_unchecked(chunk_idx) }
224 }
225
226 /// Returns the number of allocated chunks.
227 #[inline]
228 pub fn chunk_count(&self) -> usize {
229 self.chunks().len()
230 }
231
232 /// Returns `true` if `ptr` falls within any chunk's slot array.
233 ///
234 /// O(chunks) scan. Typically 1–5 chunks. Used in `debug_assert!`
235 /// to validate provenance.
236 #[doc(hidden)]
237 pub fn contains_ptr(&self, ptr: *const ()) -> bool {
238 let chunks = self.chunks();
239 for chunk in chunks {
240 let chunk_slab = &*chunk.inner;
241 if chunk_slab.contains_ptr(ptr) {
242 return true;
243 }
244 }
245 false
246 }
247
248 /// Ensures at least `count` chunks are allocated.
249 ///
250 /// No-op if the slab already has `count` or more chunks. Only allocates
251 /// the difference.
252 pub fn reserve_chunks(&self, count: usize) {
253 let current = self.chunks().len();
254 for _ in current..count {
255 self.grow();
256 }
257 }
258
259 /// Grows the slab by adding a single new chunk.
260 fn grow(&self) {
261 let chunks = self.chunks_mut();
262 let chunk_idx = chunks.len();
263 let inner = Box::new(BoundedSlab::with_capacity(self.chunk_capacity.get()));
264
265 let entry = ChunkEntry {
266 inner,
267 next_with_space: Cell::new(self.head_with_space.get()),
268 };
269
270 chunks.push(entry);
271 self.head_with_space.set(chunk_idx);
272 }
273
274 // =========================================================================
275 // Allocation API
276 // =========================================================================
277
278 /// Claims a slot from the freelist without writing a value.
279 ///
280 /// Always succeeds — grows the slab if needed. The returned [`Claim`]
281 /// must be consumed via [`Claim::write()`] to complete the allocation.
282 ///
283 /// This two-phase allocation enables placement new optimization: the
284 /// value can be constructed directly into the slot memory.
285 ///
286 /// # Example
287 ///
288 /// ```
289 /// use nexus_slab::unbounded::Slab;
290 ///
291 /// let slab = Slab::with_chunk_capacity(16);
292 /// let claim = slab.claim();
293 /// let slot = claim.write(42u64);
294 /// assert_eq!(*slot, 42);
295 /// // SAFETY: slot was allocated from this slab
296 /// unsafe { slab.free(slot) };
297 /// ```
298 #[inline]
299 pub fn claim(&self) -> Claim<'_, T> {
300 let (slot_ptr, chunk_idx) = self.claim_ptr();
301 Claim {
302 slot_ptr,
303 slab: self,
304 chunk_idx,
305 }
306 }
307
308 /// Claims a slot from the freelist, returning the raw pointer and chunk index.
309 ///
310 /// Always succeeds — grows the slab if needed. This is a low-level API for
311 /// macro-generated code that needs to escape TLS closures.
312 ///
313 /// # Safety Contract
314 ///
315 /// The caller MUST either:
316 /// - Write a value to the slot and use it as an allocated slot, OR
317 /// - Return the pointer to the freelist via `free_ptr()` if abandoning
318 #[doc(hidden)]
319 #[inline]
320 pub fn claim_ptr(&self) -> (*mut SlotCell<T>, usize) {
321 // Ensure we have space (grow if needed)
322 if self.head_with_space.get() == CHUNK_NONE {
323 self.grow();
324 }
325
326 // Get the chunk with space
327 let chunk_idx = self.head_with_space.get();
328 let chunk = self.chunk(chunk_idx);
329 let chunk_slab = &*chunk.inner;
330
331 // Load freelist head pointer from chunk
332 let slot_ptr = chunk_slab.free_head.get();
333 debug_assert!(!slot_ptr.is_null(), "chunk on freelist has no free slots");
334
335 // SAFETY: slot_ptr came from the freelist. Slot is vacant, so next_free is active.
336 let next_free = unsafe { (*slot_ptr).next_free };
337
338 // Update chunk's freelist head
339 chunk_slab.free_head.set(next_free);
340
341 // If chunk is now full, remove from slab's available-chunk list
342 if next_free.is_null() {
343 self.head_with_space.set(chunk.next_with_space.get());
344 }
345
346 (slot_ptr, chunk_idx)
347 }
348
349 /// Allocates a slot and writes the value.
350 ///
351 /// Always succeeds — grows the slab if needed.
352 #[inline]
353 pub fn alloc(&self, value: T) -> RawSlot<T> {
354 // Ensure we have space (grow if needed)
355 if self.head_with_space.get() == CHUNK_NONE {
356 self.grow();
357 }
358
359 // Get the chunk with space
360 let chunk_idx = self.head_with_space.get();
361 let chunk = self.chunk(chunk_idx);
362 let chunk_slab = &*chunk.inner;
363
364 // Load freelist head pointer from chunk
365 let slot_ptr = chunk_slab.free_head.get();
366 debug_assert!(!slot_ptr.is_null(), "chunk on freelist has no free slots");
367
368 // SAFETY: slot_ptr came from the freelist. Slot is vacant, so next_free is active.
369 let next_free = unsafe { (*slot_ptr).next_free };
370
371 // Write the value — overwrites next_free (union semantics)
372 // SAFETY: Slot is claimed from freelist, we have exclusive access
373 unsafe {
374 (*slot_ptr).value = ManuallyDrop::new(MaybeUninit::new(value));
375 }
376
377 // Update chunk's freelist head
378 chunk_slab.free_head.set(next_free);
379
380 // If chunk is now full, remove from slab's available-chunk list
381 if next_free.is_null() {
382 self.head_with_space.set(chunk.next_with_space.get());
383 }
384
385 // SAFETY: slot_ptr is valid and occupied
386 unsafe { RawSlot::from_ptr(slot_ptr) }
387 }
388
389 /// Frees a slot, dropping the value and returning storage to the freelist.
390 ///
391 /// # Performance
392 ///
393 /// O(n) where n = chunk count, due to chunk lookup. Typically 1-5 chunks.
394 ///
395 /// # Safety
396 ///
397 /// - `slot` must have been allocated from **this** slab
398 /// - No references to the slot's value may exist
399 #[inline]
400 #[allow(clippy::needless_pass_by_value)]
401 pub unsafe fn free(&self, slot: RawSlot<T>) {
402 let slot_ptr = slot.into_ptr();
403 debug_assert!(
404 self.contains_ptr(slot_ptr as *const ()),
405 "slot was not allocated from this slab"
406 );
407 // SAFETY: Caller guarantees slot is valid and occupied
408 unsafe {
409 ptr::drop_in_place((*(*slot_ptr).value).as_mut_ptr());
410 self.free_ptr(slot_ptr);
411 }
412 }
413
414 /// Frees a slot and returns the value without dropping it.
415 ///
416 /// # Performance
417 ///
418 /// O(n) where n = chunk count, due to chunk lookup. Typically 1-5 chunks.
419 ///
420 /// # Safety
421 ///
422 /// - `slot` must have been allocated from **this** slab
423 /// - No references to the slot's value may exist
424 #[inline]
425 #[allow(clippy::needless_pass_by_value)]
426 pub unsafe fn take(&self, slot: RawSlot<T>) -> T {
427 let slot_ptr = slot.into_ptr();
428 debug_assert!(
429 self.contains_ptr(slot_ptr as *const ()),
430 "slot was not allocated from this slab"
431 );
432 // SAFETY: Caller guarantees slot is valid and occupied
433 unsafe {
434 let value = ptr::read((*slot_ptr).value.as_ptr());
435 self.free_ptr(slot_ptr);
436 value
437 }
438 }
439
440 /// Returns a slot to the freelist by pointer.
441 ///
442 /// Does NOT drop the value — caller must drop before calling.
443 /// Finds the owning chunk via linear scan (typically 1-5 chunks).
444 ///
445 /// # Safety
446 ///
447 /// - `slot_ptr` must point to a slot within this slab
448 /// - Value must already be dropped or moved out
449 #[doc(hidden)]
450 pub unsafe fn free_ptr(&self, slot_ptr: *mut SlotCell<T>) {
451 let chunks = self.chunks();
452 let cap = self.chunk_capacity.get();
453
454 // Find which chunk owns this pointer
455 for (chunk_idx, chunk) in chunks.iter().enumerate() {
456 let chunk_slab = &*chunk.inner;
457 let base = chunk_slab.slots_ptr();
458 let end = base.wrapping_add(cap);
459
460 if slot_ptr >= base && slot_ptr < end {
461 let free_head = chunk_slab.free_head.get();
462 let was_full = free_head.is_null();
463
464 // SAFETY: slot_ptr is within this chunk's range
465 unsafe {
466 (*slot_ptr).next_free = free_head;
467 }
468 chunk_slab.free_head.set(slot_ptr);
469
470 if was_full {
471 chunk.next_with_space.set(self.head_with_space.get());
472 self.head_with_space.set(chunk_idx);
473 }
474 return;
475 }
476 }
477
478 debug_assert!(false, "free_ptr: slot_ptr not found in any chunk");
479 }
480}
481
482impl<T> Default for Slab<T> {
483 fn default() -> Self {
484 Self::new()
485 }
486}
487
488impl<T> fmt::Debug for Slab<T> {
489 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
490 f.debug_struct("Slab")
491 .field("capacity", &self.capacity())
492 .finish()
493 }
494}
495
496// =============================================================================
497// Tests
498// =============================================================================
499
500#[cfg(test)]
501mod tests {
502 use super::*;
503 use std::borrow::{Borrow, BorrowMut};
504
505 #[test]
506 fn slab_basic() {
507 let slab = Slab::<u64>::with_chunk_capacity(16);
508
509 let slot = slab.alloc(42);
510 assert_eq!(*slot, 42);
511 // SAFETY: slot was allocated from this slab
512 unsafe { slab.free(slot) };
513 }
514
515 #[test]
516 fn slab_grows() {
517 let slab = Slab::<u64>::with_chunk_capacity(4);
518
519 let mut slots = Vec::new();
520 for i in 0..10 {
521 slots.push(slab.alloc(i));
522 }
523
524 assert!(slab.capacity() >= 10);
525
526 // Free all slots
527 for slot in slots {
528 // SAFETY: each slot was allocated from this slab
529 unsafe { slab.free(slot) };
530 }
531 }
532
533 #[test]
534 fn slot_deref_mut() {
535 let slab = Slab::<String>::with_chunk_capacity(16);
536 let mut slot = slab.alloc("hello".to_string());
537 slot.push_str(" world");
538 assert_eq!(&*slot, "hello world");
539 // SAFETY: slot was allocated from this slab
540 unsafe { slab.free(slot) };
541 }
542
543 #[test]
544 fn slot_dealloc_take() {
545 let slab = Slab::<String>::with_chunk_capacity(16);
546 let slot = slab.alloc("hello".to_string());
547
548 // SAFETY: slot was allocated from this slab
549 let value = unsafe { slab.take(slot) };
550 assert_eq!(value, "hello");
551 }
552
553 #[test]
554 fn chunk_freelist_maintenance() {
555 let slab = Slab::<u64>::with_chunk_capacity(2);
556
557 // Fill first chunk
558 let s1 = slab.alloc(1);
559 let s2 = slab.alloc(2);
560 // Triggers growth
561 let s3 = slab.alloc(3);
562
563 // Free from first chunk — should add it back to available list
564 // SAFETY: s1 was allocated from this slab
565 unsafe { slab.free(s1) };
566
567 // Should reuse the freed slot
568 let s4 = slab.alloc(4);
569
570 // SAFETY: all slots were allocated from this slab
571 unsafe {
572 slab.free(s2);
573 slab.free(s3);
574 slab.free(s4);
575 }
576 }
577
578 #[test]
579 fn slot_size() {
580 assert_eq!(std::mem::size_of::<RawSlot<u64>>(), 8);
581 }
582
583 #[test]
584 fn borrow_traits() {
585 let slab = Slab::<u64>::with_chunk_capacity(16);
586 let mut slot = slab.alloc(42);
587
588 let borrowed: &u64 = slot.borrow();
589 assert_eq!(*borrowed, 42);
590
591 let borrowed_mut: &mut u64 = slot.borrow_mut();
592 *borrowed_mut = 100;
593 assert_eq!(*slot, 100);
594
595 // SAFETY: slot was allocated from slab
596 unsafe { slab.free(slot) };
597 }
598}