nexus_slab/bounded.rs
1//! Fixed-capacity slab allocator.
2//!
3//! This module provides a bounded (fixed-capacity) slab allocator.
4//!
5//! # Example
6//!
7//! ```
8//! use nexus_slab::bounded::Slab;
9//!
10//! let slab = Slab::with_capacity(1024);
11//! let slot = slab.alloc(42u64);
12//! assert_eq!(*slot, 42);
13//! // SAFETY: slot was allocated from this slab
14//! unsafe { slab.free(slot) };
15//! ```
16
17use std::cell::Cell;
18use std::fmt;
19use std::mem::{self, ManuallyDrop, MaybeUninit};
20use std::ptr;
21
22use crate::alloc::Full;
23use crate::shared::{Slot, SlotCell};
24
25// =============================================================================
26// Claim
27// =============================================================================
28
29/// A claimed slot that has not yet been written to.
30///
31/// Created by [`Slab::claim()`]. Must be consumed via [`write()`](Self::write)
32/// to complete the allocation. If dropped without calling `write()`, the slot
33/// is returned to the freelist.
34///
35/// The `write()` method is `#[inline]`, enabling the compiler to potentially
36/// optimize the value write as a placement new (constructing directly into
37/// the slot memory).
38pub struct Claim<'a, T> {
39 slot_ptr: *mut SlotCell<T>,
40 slab: &'a Slab<T>,
41}
42
43impl<T> Claim<'_, T> {
44 /// Writes the value to the claimed slot and returns the [`Slot`] handle.
45 ///
46 /// This consumes the claim. The value is written directly to the slot's
47 /// memory, which may enable placement new optimization.
48 #[inline]
49 pub fn write(self, value: T) -> Slot<T> {
50 let slot_ptr = self.slot_ptr;
51 // SAFETY: We own this slot from claim(), it's valid and vacant
52 unsafe {
53 (*slot_ptr).value = ManuallyDrop::new(MaybeUninit::new(value));
54 }
55 // Don't run Drop - we're completing the allocation
56 mem::forget(self);
57 // SAFETY: slot_ptr is valid and now occupied
58 unsafe { Slot::from_ptr(slot_ptr) }
59 }
60}
61
62impl<T> Drop for Claim<'_, T> {
63 fn drop(&mut self) {
64 // Abandoned claim - return slot to freelist
65 // SAFETY: slot_ptr is valid and still vacant (never written to)
66 let free_head = self.slab.free_head.get();
67 unsafe {
68 (*self.slot_ptr).next_free = free_head;
69 }
70 self.slab.free_head.set(self.slot_ptr);
71 }
72}
73
74// =============================================================================
75// Slab
76// =============================================================================
77
78/// Fixed-capacity slab allocator.
79///
80/// Uses pointer-based freelist for O(1) allocation.
81///
82/// # Const Construction
83///
84/// Supports const construction via [`new()`](Self::new) followed by
85/// runtime initialization via [`init()`](Self::init). This enables use with
86/// `thread_local!` using the `const { }` block syntax for zero-overhead TLS access.
87///
88/// ```ignore
89/// thread_local! {
90/// static SLAB: Slab<MyType> = const { Slab::new() };
91/// }
92///
93/// // Later, at runtime:
94/// SLAB.with(|s| s.init(1024));
95/// ```
96///
97/// For direct usage, prefer [`with_capacity()`](Self::with_capacity).
98pub struct Slab<T> {
99 /// Slot storage. Wrapped in UnsafeCell for interior mutability.
100 slots: std::cell::UnsafeCell<Vec<SlotCell<T>>>,
101 /// Capacity. Wrapped in Cell so it can be set during init.
102 capacity: Cell<usize>,
103 /// Head of freelist - raw pointer for fast allocation.
104 /// NULL when slab is full or uninitialized.
105 #[doc(hidden)]
106 pub free_head: Cell<*mut SlotCell<T>>,
107}
108
109impl<T> Slab<T> {
110 /// Creates an empty, uninitialized slab.
111 ///
112 /// This is a const function that performs no allocation. Call [`init()`](Self::init)
113 /// to allocate storage before use.
114 ///
115 /// For direct usage, prefer [`with_capacity()`](Self::with_capacity).
116 ///
117 /// # Example
118 ///
119 /// ```ignore
120 /// // For use with thread_local! const initialization
121 /// thread_local! {
122 /// static SLAB: Slab<u64> = const { Slab::new() };
123 /// }
124 /// ```
125 #[inline]
126 pub const fn new() -> Self {
127 Self {
128 slots: std::cell::UnsafeCell::new(Vec::new()),
129 capacity: Cell::new(0),
130 free_head: Cell::new(ptr::null_mut()),
131 }
132 }
133
134 /// Creates a new slab with the given capacity.
135 ///
136 /// # Panics
137 ///
138 /// Panics if capacity is zero.
139 #[inline]
140 pub fn with_capacity(capacity: usize) -> Self {
141 let slab = Self::new();
142 slab.init(capacity);
143 slab
144 }
145
146 /// Initializes the slab with the given capacity.
147 ///
148 /// This allocates slot storage and builds the freelist. Must be called
149 /// exactly once before any allocations.
150 ///
151 /// # Panics
152 ///
153 /// - Panics if the slab is already initialized (capacity > 0)
154 /// - Panics if capacity is zero
155 pub fn init(&self, capacity: usize) {
156 assert!(self.capacity.get() == 0, "Slab already initialized");
157 assert!(capacity > 0, "capacity must be non-zero");
158
159 // SAFETY: We have &self and verified capacity == 0, so no other code
160 // can be accessing slots. This is the only mutation point.
161 let slots = unsafe { &mut *self.slots.get() };
162
163 // Allocate slots — all initially vacant
164 slots.reserve_exact(capacity);
165 for _ in 0..capacity {
166 slots.push(SlotCell::vacant(ptr::null_mut()));
167 }
168
169 // Wire up the freelist: each slot's next_free points to the next slot
170 for i in 0..(capacity - 1) {
171 let next_ptr = slots.as_mut_ptr().wrapping_add(i + 1);
172 slots[i].next_free = next_ptr;
173 }
174 // Last slot points to NULL (end of freelist) — already null from vacant()
175
176 let free_head = slots.as_mut_ptr();
177 self.capacity.set(capacity);
178 self.free_head.set(free_head);
179 }
180
181 /// Returns true if the slab has been initialized.
182 #[inline]
183 pub fn is_initialized(&self) -> bool {
184 self.capacity.get() > 0
185 }
186
187 /// Returns the capacity.
188 #[inline]
189 pub fn capacity(&self) -> usize {
190 self.capacity.get()
191 }
192
193 /// Returns the base pointer to the slots array.
194 #[inline]
195 pub(crate) fn slots_ptr(&self) -> *mut SlotCell<T> {
196 // SAFETY: We're returning a pointer for use with raw pointer access
197 let slots = unsafe { &*self.slots.get() };
198 slots.as_ptr().cast_mut()
199 }
200
201 // =========================================================================
202 // Allocation API
203 // =========================================================================
204
205 /// Claims a slot from the freelist without writing a value.
206 ///
207 /// Returns `None` if the slab is full. The returned [`Claim`] must be
208 /// consumed via [`Claim::write()`] to complete the allocation.
209 ///
210 /// This two-phase allocation enables placement new optimization: the
211 /// value can be constructed directly into the slot memory.
212 ///
213 /// # Example
214 ///
215 /// ```
216 /// use nexus_slab::bounded::Slab;
217 ///
218 /// let slab = Slab::with_capacity(10);
219 /// if let Some(claim) = slab.claim() {
220 /// let slot = claim.write(42u64);
221 /// assert_eq!(*slot, 42);
222 /// // SAFETY: slot was allocated from this slab
223 /// unsafe { slab.free(slot) };
224 /// }
225 /// ```
226 #[inline]
227 pub fn claim(&self) -> Option<Claim<'_, T>> {
228 self.claim_ptr().map(|slot_ptr| Claim {
229 slot_ptr,
230 slab: self,
231 })
232 }
233
234 /// Claims a slot from the freelist, returning the raw pointer.
235 ///
236 /// Returns `None` if the slab is full. This is a low-level API for
237 /// macro-generated code that needs to escape TLS closures.
238 ///
239 /// # Safety Contract
240 ///
241 /// The caller MUST either:
242 /// - Write a value to the slot and use it as an allocated slot, OR
243 /// - Return the pointer to the freelist via `free_ptr()` if abandoning
244 #[doc(hidden)]
245 #[inline]
246 pub fn claim_ptr(&self) -> Option<*mut SlotCell<T>> {
247 let slot_ptr = self.free_head.get();
248
249 if slot_ptr.is_null() {
250 return None;
251 }
252
253 // SAFETY: slot_ptr came from the freelist within this slab.
254 // The slot is vacant, so next_free is the active union field.
255 let next_free = unsafe { (*slot_ptr).next_free };
256
257 // Update freelist head
258 self.free_head.set(next_free);
259
260 Some(slot_ptr)
261 }
262
263 /// Allocates a slot and writes the value.
264 ///
265 /// # Panics
266 ///
267 /// Panics if the slab is full.
268 #[inline]
269 pub fn alloc(&self, value: T) -> Slot<T> {
270 self.try_alloc(value)
271 .unwrap_or_else(|_| panic!("slab full"))
272 }
273
274 /// Tries to allocate a slot and write the value.
275 ///
276 /// Returns `Err(Full(value))` if the slab is at capacity.
277 #[inline]
278 pub fn try_alloc(&self, value: T) -> Result<Slot<T>, Full<T>> {
279 let slot_ptr = self.free_head.get();
280
281 if slot_ptr.is_null() {
282 return Err(Full(value));
283 }
284
285 // SAFETY: slot_ptr came from the freelist within this slab.
286 // The slot is vacant, so next_free is the active union field.
287 let next_free = unsafe { (*slot_ptr).next_free };
288
289 // Write the value — this overwrites next_free (union semantics)
290 // SAFETY: Slot is claimed from freelist, we have exclusive access
291 unsafe {
292 (*slot_ptr).value = ManuallyDrop::new(MaybeUninit::new(value));
293 }
294
295 // Update freelist head
296 self.free_head.set(next_free);
297
298 // SAFETY: slot_ptr is valid and occupied
299 Ok(unsafe { Slot::from_ptr(slot_ptr) })
300 }
301
302 /// Frees a slot, dropping the value and returning storage to the freelist.
303 ///
304 /// # Safety
305 ///
306 /// - `slot` must have been allocated from **this** slab
307 /// - No references to the slot's value may exist
308 #[inline]
309 #[allow(clippy::needless_pass_by_value)]
310 pub unsafe fn free(&self, slot: Slot<T>) {
311 let slot_ptr = slot.as_ptr();
312 // SAFETY: Caller guarantees slot is valid and occupied
313 unsafe {
314 ptr::drop_in_place((*(*slot_ptr).value).as_mut_ptr());
315 self.free_ptr(slot_ptr);
316 }
317 }
318
319 /// Frees a slot and returns the value without dropping it.
320 ///
321 /// # Safety
322 ///
323 /// - `slot` must have been allocated from **this** slab
324 /// - No references to the slot's value may exist
325 #[inline]
326 #[allow(clippy::needless_pass_by_value)]
327 pub unsafe fn take(&self, slot: Slot<T>) -> T {
328 let slot_ptr = slot.as_ptr();
329 // SAFETY: Caller guarantees slot is valid and occupied
330 unsafe {
331 let value = ptr::read((*slot_ptr).value.as_ptr());
332 self.free_ptr(slot_ptr);
333 value
334 }
335 }
336
337 /// Returns a slot to the freelist by pointer.
338 ///
339 /// Does NOT drop the value — caller must drop before calling.
340 ///
341 /// # Safety
342 ///
343 /// - `slot_ptr` must point to a slot within this slab
344 /// - Value must already be dropped or moved out
345 #[doc(hidden)]
346 #[inline]
347 pub unsafe fn free_ptr(&self, slot_ptr: *mut SlotCell<T>) {
348 let free_head = self.free_head.get();
349 // SAFETY: Caller guarantees slot_ptr is valid
350 unsafe {
351 (*slot_ptr).next_free = free_head;
352 }
353 self.free_head.set(slot_ptr);
354 }
355}
356
357impl<T> Default for Slab<T> {
358 fn default() -> Self {
359 Self::new()
360 }
361}
362
363impl<T> fmt::Debug for Slab<T> {
364 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
365 f.debug_struct("Slab")
366 .field("capacity", &self.capacity.get())
367 .finish()
368 }
369}
370
371// =============================================================================
372// Tests
373// =============================================================================
374
375#[cfg(test)]
376mod tests {
377 use super::*;
378 use std::borrow::{Borrow, BorrowMut};
379
380 #[test]
381 fn slab_basic() {
382 let slab = Slab::<u64>::with_capacity(100);
383 assert_eq!(slab.capacity(), 100);
384
385 let slot = slab.alloc(42);
386 assert_eq!(*slot, 42);
387 // SAFETY: slot was allocated from this slab
388 unsafe { slab.free(slot) };
389 }
390
391 #[test]
392 fn slab_full() {
393 let slab = Slab::<u64>::with_capacity(2);
394 let s1 = slab.alloc(1);
395 let s2 = slab.alloc(2);
396
397 let result = slab.try_alloc(3);
398 assert!(result.is_err());
399 let recovered = result.unwrap_err().into_inner();
400 assert_eq!(recovered, 3);
401
402 // SAFETY: slots were allocated from this slab
403 unsafe {
404 slab.free(s1);
405 slab.free(s2);
406 }
407 }
408
409 #[test]
410 fn slot_deref_mut() {
411 let slab = Slab::<String>::with_capacity(10);
412 let mut slot = slab.alloc("hello".to_string());
413 slot.push_str(" world");
414 assert_eq!(&*slot, "hello world");
415 // SAFETY: slot was allocated from this slab
416 unsafe { slab.free(slot) };
417 }
418
419 #[test]
420 fn slot_dealloc_take() {
421 let slab = Slab::<String>::with_capacity(10);
422 let slot = slab.alloc("hello".to_string());
423
424 // SAFETY: slot was allocated from this slab
425 let value = unsafe { slab.take(slot) };
426 assert_eq!(value, "hello");
427 }
428
429 #[test]
430 fn slot_size() {
431 assert_eq!(std::mem::size_of::<Slot<u64>>(), 8);
432 }
433
434 #[test]
435 fn slab_debug() {
436 let slab = Slab::<u64>::with_capacity(10);
437 let s = slab.alloc(42);
438 let debug = format!("{:?}", slab);
439 assert!(debug.contains("Slab"));
440 assert!(debug.contains("capacity"));
441 // SAFETY: slot was allocated from slab
442 unsafe { slab.free(s) };
443 }
444
445 #[test]
446 fn borrow_traits() {
447 let slab = Slab::<u64>::with_capacity(10);
448 let mut slot = slab.alloc(42);
449
450 let borrowed: &u64 = slot.borrow();
451 assert_eq!(*borrowed, 42);
452
453 let borrowed_mut: &mut u64 = slot.borrow_mut();
454 *borrowed_mut = 100;
455 assert_eq!(*slot, 100);
456
457 // SAFETY: slot was allocated from slab
458 unsafe { slab.free(slot) };
459 }
460
461 #[test]
462 fn capacity_one() {
463 let slab = Slab::<u64>::with_capacity(1);
464
465 assert_eq!(slab.capacity(), 1);
466
467 let slot = slab.alloc(42);
468 assert!(slab.try_alloc(100).is_err());
469
470 // SAFETY: slot was allocated from this slab
471 unsafe { slab.free(slot) };
472
473 let slot2 = slab.alloc(100);
474 assert_eq!(*slot2, 100);
475 // SAFETY: slot2 was allocated from this slab
476 unsafe { slab.free(slot2) };
477 }
478}