nexus-slab 2.3.4

A high-performance slab allocator optimized for predictable tail latency
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
//! Reference-counted slab allocation with guarded access.
//!
//! Wraps the raw [`bounded`](crate::bounded) and [`unbounded`](crate::unbounded)
//! slabs with reference counting and borrow guards. The inner slab stores
//! `RcCell<T>` (refcount header + value) while the user works with `RcSlot<T>`.
//!
//! Only one borrow at a time — shared or exclusive. Panics if violated.
//! More conservative than `RefCell`.
//!
//! # Example
//!
//! ```
//! use nexus_slab::rc::bounded::Slab;
//!
//! // SAFETY: caller accepts manual memory management contract
//! let slab = unsafe { Slab::<u64>::with_capacity(1024) };
//!
//! let h1 = slab.alloc(42);
//! let h2 = h1.clone();  // refcount 1 → 2
//!
//! {
//!     let val = h1.borrow();
//!     assert_eq!(*val, 42);
//! }
//!
//! {
//!     let mut val = h2.borrow_mut();
//!     *val = 99;
//! }
//!
//! slab.free(h2);  // refcount 2 → 1
//! slab.free(h1);  // refcount 1 → 0, deallocated
//! ```

pub mod bounded;
pub mod unbounded;

use core::cell::Cell;
use core::fmt;
use core::marker::PhantomData;
use core::mem::ManuallyDrop;
use core::ops::{Deref, DerefMut};

use crate::shared::SlotCell;

// =============================================================================
// RcCell — storage layout for refcounted slots
// =============================================================================

/// Storage for a reference-counted slab slot.
///
/// When vacant: acts as a freelist node (same as `SlotCell`).
/// When occupied: `state` holds refcount + borrow flag, `value` holds `T`.
///
/// # Layout
///
/// ```text
/// ┌─────────────────────────┐
/// │ state: Cell<usize>      │  8 bytes (refcount + borrow bit)
/// ├─────────────────────────┤
/// │ value: ManuallyDrop<T>  │  size_of::<T>() bytes
/// └─────────────────────────┘
/// ```
///
/// The `state` field is at offset 0 in the value region of the `SlotCell`
/// union. When vacant, the `SlotCell::next_free` pointer occupies the
/// same bytes. This works because `next_free` is pointer-sized
/// and `state` is also pointer-sized (`usize`).
#[repr(C)]
pub struct RcCell<T> {
    /// Bit 63: borrow active (1 = someone has a Ref/RefMut guard).
    /// Bits 0-62: reference count.
    state: Cell<usize>,
    /// The value. Inside `UnsafeCell` to allow mutation through shared
    /// references (same pattern as `RefCell`). `ManuallyDrop` because we
    /// manage the lifetime manually (drop when refcount hits 0).
    value: core::cell::UnsafeCell<ManuallyDrop<T>>,
}

/// Borrow flag — bit 63.
const BORROW_BIT: usize = 1 << (usize::BITS - 1);
/// Mask for the reference count (bits 0-62).
const REFCOUNT_MASK: usize = !BORROW_BIT;

impl<T> RcCell<T> {
    /// Creates a new occupied RcCell with refcount 1, no borrow.
    #[inline]
    pub(crate) fn new(value: T) -> Self {
        RcCell {
            state: Cell::new(1),
            value: core::cell::UnsafeCell::new(ManuallyDrop::new(value)),
        }
    }

    /// Extracts the inner value, consuming the cell.
    #[inline]
    pub(crate) fn into_inner(self) -> T {
        ManuallyDrop::into_inner(self.value.into_inner())
    }

    /// Returns the current reference count (without borrow bit).
    #[inline]
    fn refcount(&self) -> usize {
        self.state.get() & REFCOUNT_MASK
    }

    /// Increments the reference count.
    #[inline]
    fn inc_ref(&self) {
        let state = self.state.get();
        debug_assert!(
            (state & REFCOUNT_MASK) < REFCOUNT_MASK,
            "RcSlot refcount overflow"
        );
        self.state.set(state + 1);
    }

    /// Decrements the reference count. Returns the new count.
    #[inline]
    fn dec_ref(&self) -> usize {
        let state = self.state.get();
        let count = state & REFCOUNT_MASK;
        debug_assert!(count > 0, "RcSlot refcount underflow");
        self.state.set((state & BORROW_BIT) | (count - 1));
        count - 1
    }

    /// Sets the borrow bit. Panics if already set.
    #[inline]
    fn acquire_borrow(&self) {
        let state = self.state.get();
        assert!(
            state & BORROW_BIT == 0,
            "RcSlot<{}> already borrowed",
            core::any::type_name::<T>()
        );
        self.state.set(state | BORROW_BIT);
    }

    /// Clears the borrow bit.
    #[inline]
    fn release_borrow(&self) {
        let state = self.state.get();
        debug_assert!(state & BORROW_BIT != 0, "release_borrow without borrow");
        self.state.set(state & REFCOUNT_MASK);
    }

    /// Returns a reference to the value.
    ///
    /// # Safety
    ///
    /// Caller must have acquired the borrow.
    #[inline]
    unsafe fn value_ref(&self) -> &T {
        // SAFETY: UnsafeCell::get() returns *mut ManuallyDrop<T>.
        // Borrow bit guarantees no concurrent mutation.
        unsafe { &*(self.value.get().cast::<T>()) }
    }

    /// Returns a mutable reference to the value.
    ///
    /// # Safety
    ///
    /// Caller must have acquired the borrow exclusively.
    #[inline]
    #[allow(clippy::mut_from_ref)]
    unsafe fn value_mut(&self) -> &mut T {
        // SAFETY: UnsafeCell::get() provides *mut with proper provenance.
        // Borrow bit guarantees exclusive access.
        unsafe { &mut *(self.value.get().cast::<T>()) }
    }

    /// Returns a raw pointer to the value without acquiring a borrow guard.
    ///
    /// This bypasses the borrow-checking mechanism. The returned pointer is
    /// valid as long as the refcount is non-zero (some `RcSlot` exists).
    ///
    /// Returns `*mut T` (not `*const T`) because `UnsafeCell` grants interior
    /// mutability. The caller must ensure no aliasing violations when
    /// dereferencing.
    #[inline]
    pub fn value_ptr(&self) -> *mut T {
        self.value.get().cast::<T>()
    }

    /// Drops the value in place.
    ///
    /// # Safety
    ///
    /// Must only be called once, when refcount hits 0.
    #[inline]
    unsafe fn drop_value(&self) {
        // SAFETY: UnsafeCell::get() provides *mut with write provenance.
        unsafe {
            core::ptr::drop_in_place(self.value.get().cast::<T>());
        }
    }
}

// =============================================================================
// RcSlot<T> — Reference-counted handle
// =============================================================================

/// Reference-counted handle to a slab-allocated value.
///
/// `RcSlot<T>` is `Clone` — cloning increments the refcount. Each clone
/// must be individually returned to the slab via `free_rc()`. The slot
/// is deallocated when the last handle is freed.
///
/// Access is through guards: [`borrow()`](Self::borrow) returns [`Ref<T>`],
/// [`borrow_mut()`](Self::borrow_mut) returns [`RefMut<T>`]. Only one
/// borrow at a time is allowed — panics if violated.
///
/// # Size
///
/// 8 bytes (one pointer).
pub struct RcSlot<T> {
    /// Points to the `RcCell<T>` inside a `SlotCell<RcCell<T>>`.
    ptr: *mut RcCell<T>,
    _marker: PhantomData<T>,
}

impl<T> RcSlot<T> {
    /// Creates an RcSlot from a pointer to an RcCell.
    ///
    /// # Safety
    ///
    /// `ptr` must point to a valid, occupied RcCell<T> within a slab.
    #[inline]
    pub(crate) unsafe fn from_ptr(ptr: *mut RcCell<T>) -> Self {
        RcSlot {
            ptr,
            _marker: PhantomData,
        }
    }

    /// Returns the raw pointer to the underlying RcCell.
    #[inline]
    pub fn as_ptr(&self) -> *mut RcCell<T> {
        self.ptr
    }

    /// Consumes the handle, returning the raw pointer without running Drop
    /// or modifying the refcount. Disarms the debug leak detector.
    ///
    /// Reconstruct via [`from_raw()`](Self::from_raw).
    #[inline]
    pub fn into_raw(self) -> *mut RcCell<T> {
        let ptr = self.ptr;
        core::mem::forget(self);
        ptr
    }

    /// Reconstructs an `RcSlot` from a raw pointer previously obtained
    /// via [`into_raw()`](Self::into_raw).
    ///
    /// # Safety
    ///
    /// `ptr` must be a valid pointer to an occupied `RcCell<T>` with a
    /// non-zero refcount, originally obtained from `into_raw()`.
    #[inline]
    pub unsafe fn from_raw(ptr: *mut RcCell<T>) -> Self {
        RcSlot {
            ptr,
            _marker: PhantomData,
        }
    }

    /// Returns a raw pointer to the value without acquiring a borrow guard.
    ///
    /// This bypasses the borrow-checking mechanism. Returns a read-only
    /// pointer — use [`value_ptr_mut`](Self::value_ptr_mut) for mutation.
    /// Intended for intrusive collection navigation via `Cell`-based link fields.
    ///
    /// The pointer is valid as long as any `RcSlot` for this slot exists
    /// (refcount > 0). Slab memory never moves.
    ///
    /// # Safety
    ///
    /// The caller must ensure no aliasing violations: do not create a `&mut T`
    /// through this pointer while any `Ref` or `RefMut` guard is active,
    /// and vice versa.
    #[inline]
    pub unsafe fn value_ptr(&self) -> *const T {
        // SAFETY: ptr is valid while any RcSlot exists.
        unsafe { (*self.ptr).value_ptr().cast_const() }
    }

    /// Returns a raw mutable pointer to the value without acquiring a borrow guard.
    ///
    /// # Safety
    ///
    /// Same as [`Self::value_ptr`], plus: the caller must ensure exclusive access
    /// (no other pointers or guards are reading/writing the value).
    #[inline]
    pub unsafe fn value_ptr_mut(&self) -> *mut T {
        // SAFETY: ptr is valid while any RcSlot exists.
        unsafe { (*self.ptr).value_ptr() }
    }

    /// Returns the current reference count.
    #[inline]
    pub fn refcount(&self) -> usize {
        // SAFETY: ptr is valid while any RcSlot exists.
        unsafe { (*self.ptr).refcount() }
    }

    /// Borrows the value, returning a guard that provides `&T`.
    ///
    /// # Panics
    ///
    /// Panics if the slot is already borrowed (by any handle, shared or exclusive).
    #[inline]
    pub fn borrow(&self) -> Ref<'_, T> {
        // SAFETY: ptr is valid.
        unsafe { (*self.ptr).acquire_borrow() };
        Ref {
            cell: self.ptr,
            _marker: PhantomData,
        }
    }

    /// Borrows the value mutably, returning a guard that provides `&mut T`.
    ///
    /// # Panics
    ///
    /// Panics if the slot is already borrowed (by any handle, shared or exclusive).
    #[inline]
    pub fn borrow_mut(&self) -> RefMut<'_, T> {
        // SAFETY: ptr is valid.
        unsafe { (*self.ptr).acquire_borrow() };
        RefMut {
            cell: self.ptr,
            _marker: PhantomData,
        }
    }

    /// Returns a pinned reference guard.
    ///
    /// Slab memory never moves, so Pin is sound without `T: Unpin`.
    #[inline]
    pub fn pin(&self) -> core::pin::Pin<Ref<'_, T>> {
        // SAFETY: slab memory is stable.
        unsafe { core::pin::Pin::new_unchecked(self.borrow()) }
    }

    /// Returns a pinned mutable reference guard.
    #[inline]
    pub fn pin_mut(&self) -> core::pin::Pin<RefMut<'_, T>> {
        unsafe { core::pin::Pin::new_unchecked(self.borrow_mut()) }
    }

    /// Increments the refcount (used by Clone).
    #[inline]
    fn inc_ref(&self) {
        // SAFETY: ptr is valid while any RcSlot exists.
        unsafe { (*self.ptr).inc_ref() };
    }

    /// Decrements the refcount. Returns the new count.
    /// If 0, the caller must free the slot.
    #[inline]
    pub(crate) fn dec_ref(&self) -> usize {
        unsafe { (*self.ptr).dec_ref() }
    }

    /// Drops the value in the cell. Called when refcount hits 0.
    ///
    /// # Safety
    ///
    /// Must only be called once, when refcount is 0.
    #[inline]
    pub(crate) unsafe fn drop_value(&self) {
        unsafe { (*self.ptr).drop_value() };
    }

    /// Returns the SlotCell pointer (for returning to the freelist).
    ///
    /// The RcCell<T> is stored inside a SlotCell<RcCell<T>>. Since SlotCell
    /// is repr(C) and the value field is at offset 0 (union), the pointer
    /// to RcCell<T> IS the pointer to the SlotCell value region.
    #[inline]
    pub(crate) fn slot_cell_ptr(&self) -> *mut SlotCell<RcCell<T>> {
        self.ptr.cast()
    }
}

impl<T> Clone for RcSlot<T> {
    /// Clones the handle, incrementing the reference count.
    ///
    /// The clone must also be freed via `slab.free_rc()`.
    #[inline]
    fn clone(&self) -> Self {
        self.inc_ref();
        RcSlot {
            ptr: self.ptr,
            _marker: PhantomData,
        }
    }
}

impl<T: fmt::Debug> fmt::Debug for RcSlot<T> {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        // Can't borrow here (might already be borrowed), just show metadata.
        f.debug_struct("RcSlot")
            .field("refcount", &self.refcount())
            .finish()
    }
}

#[cfg(debug_assertions)]
impl<T> Drop for RcSlot<T> {
    fn drop(&mut self) {
        #[cfg(feature = "std")]
        if std::thread::panicking() {
            return;
        }
        panic!(
            "RcSlot<{}> dropped without being freed — call slab.free(handle)",
            core::any::type_name::<T>()
        );
    }
}

// =============================================================================
// Ref<T> — Shared borrow guard
// =============================================================================

/// Guard providing `&T` access to an `RcSlot`-managed value.
///
/// Created by [`RcSlot::borrow()`]. Releases the borrow on drop.
pub struct Ref<'a, T> {
    cell: *mut RcCell<T>,
    _marker: PhantomData<&'a T>,
}

impl<T> Deref for Ref<'_, T> {
    type Target = T;

    #[inline]
    fn deref(&self) -> &T {
        // SAFETY: Borrow bit is set, guaranteeing no other active borrows.
        unsafe { (*self.cell).value_ref() }
    }
}

impl<T> Drop for Ref<'_, T> {
    #[inline]
    fn drop(&mut self) {
        // SAFETY: We set the borrow bit in acquire_borrow.
        unsafe { (*self.cell).release_borrow() };
    }
}

impl<T: fmt::Debug> fmt::Debug for Ref<'_, T> {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        f.debug_struct("Ref").field("value", &**self).finish()
    }
}

// =============================================================================
// RefMut<T> — Exclusive borrow guard
// =============================================================================

/// Guard providing `&mut T` access to an `RcSlot`-managed value.
///
/// Created by [`RcSlot::borrow_mut()`]. Releases the borrow on drop.
pub struct RefMut<'a, T> {
    cell: *mut RcCell<T>,
    _marker: PhantomData<&'a mut T>,
}

impl<T> Deref for RefMut<'_, T> {
    type Target = T;

    #[inline]
    fn deref(&self) -> &T {
        unsafe { (*self.cell).value_ref() }
    }
}

impl<T> DerefMut for RefMut<'_, T> {
    #[inline]
    fn deref_mut(&mut self) -> &mut T {
        // SAFETY: Borrow bit guarantees exclusive access.
        unsafe { (*self.cell).value_mut() }
    }
}

impl<T> Drop for RefMut<'_, T> {
    #[inline]
    fn drop(&mut self) {
        unsafe { (*self.cell).release_borrow() };
    }
}

impl<T: fmt::Debug> fmt::Debug for RefMut<'_, T> {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        f.debug_struct("RefMut").field("value", &**self).finish()
    }
}