Skip to main content

nexus_slab/
alloc.rs

1//! Generic slab allocator trait and slot types.
2//!
3//! This module provides:
4//! - [`Alloc`] - base trait for slot deallocation
5//! - [`BoundedAlloc`] - trait for fixed-capacity allocators (can fail)
6//! - [`UnboundedAlloc`] - trait for growable allocators (always succeeds)
7//! - [`BoxSlot`] - 8-byte RAII handle generic over allocator
8//! - [`RcSlot`] / [`WeakSlot`] - reference-counted handles
9
10use std::borrow::{Borrow, BorrowMut};
11use std::fmt;
12use std::marker::PhantomData;
13use std::mem::ManuallyDrop;
14use std::ops::{Deref, DerefMut};
15use std::pin::Pin;
16use std::ptr;
17
18use crate::shared::{RcInner, Slot, SlotCell};
19
20// =============================================================================
21// Full<T>
22// =============================================================================
23
24/// Error returned when a bounded allocator is full.
25///
26/// Contains the value that could not be allocated, allowing recovery.
27pub struct Full<T>(pub T);
28
29impl<T> Full<T> {
30    /// Consumes the error, returning the value that could not be allocated.
31    #[inline]
32    pub fn into_inner(self) -> T {
33        self.0
34    }
35}
36
37impl<T> fmt::Debug for Full<T> {
38    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
39        f.write_str("Full(..)")
40    }
41}
42
43impl<T> fmt::Display for Full<T> {
44    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
45        f.write_str("allocator full")
46    }
47}
48
49// =============================================================================
50// Traits
51// =============================================================================
52
53/// Base trait for slab allocators - handles slot deallocation.
54///
55/// Each macro-generated allocator is a ZST that implements this trait.
56/// All operations go through associated functions (no `&self`) since
57/// the backing storage lives in a `thread_local!`.
58///
59/// # Safety
60///
61/// Implementors must guarantee:
62/// - `free` correctly drops the value and returns the slot to the freelist
63/// - `take` correctly moves the value out and returns the slot to the freelist
64/// - All operations are single-threaded (TLS-backed)
65pub unsafe trait Alloc: Sized + 'static {
66    /// The type stored in each slot.
67    type Item;
68
69    /// Returns `true` if the allocator has been initialized.
70    fn is_initialized() -> bool;
71
72    /// Returns the total slot capacity.
73    ///
74    /// For bounded allocators this is fixed at init. For unbounded allocators
75    /// this is the sum across all allocated chunks.
76    fn capacity() -> usize;
77
78    /// Drops the value and returns the slot to the freelist.
79    ///
80    /// This is for manual memory management after calling `BoxSlot::into_slot()`.
81    ///
82    /// # Safety
83    ///
84    /// - `slot` must have been allocated from this allocator
85    /// - No references to the slot's value may exist
86    ///
87    /// Note: Double-free is prevented at compile time (`Slot` is move-only).
88    #[allow(clippy::needless_pass_by_value)] // Intentional: consumes slot to prevent reuse
89    unsafe fn free(slot: Slot<Self::Item>);
90
91    /// Takes the value from a slot, returning it and deallocating the slot.
92    ///
93    /// This is for manual memory management after calling `BoxSlot::into_slot()`.
94    ///
95    /// # Safety
96    ///
97    /// - `slot` must have been allocated from this allocator
98    /// - No references to the slot's value may exist
99    ///
100    /// Note: Double-free is prevented at compile time (`Slot` is move-only).
101    #[allow(clippy::needless_pass_by_value)] // Intentional: consumes slot to prevent reuse
102    unsafe fn take(slot: Slot<Self::Item>) -> Self::Item;
103}
104
105/// Trait for bounded (fixed-capacity) allocators.
106///
107/// Bounded allocators can fail when at capacity. Use [`try_alloc`](Self::try_alloc)
108/// to handle capacity exhaustion.
109pub trait BoundedAlloc: Alloc {
110    /// Tries to allocate a slot and write the value.
111    ///
112    /// Returns `Err(Full(value))` if the allocator is full, giving the
113    /// value back to the caller.
114    fn try_alloc(value: Self::Item) -> Result<Slot<Self::Item>, Full<Self::Item>>;
115}
116
117/// Trait for unbounded (growable) allocators.
118///
119/// Unbounded allocators always succeed (grow as needed).
120pub trait UnboundedAlloc: Alloc {
121    /// Allocates a slot and writes the value.
122    ///
123    /// Always succeeds - grows the allocator if needed.
124    fn alloc(value: Self::Item) -> Slot<Self::Item>;
125
126    /// Ensures at least `count` chunks are allocated.
127    ///
128    /// No-op if the allocator already has `count` or more chunks.
129    fn reserve_chunks(count: usize);
130
131    /// Returns the number of allocated chunks.
132    fn chunk_count() -> usize;
133}
134
135// =============================================================================
136// BoxSlot<T, A>
137// =============================================================================
138
139/// RAII handle to a slab-allocated value, generic over allocator.
140///
141/// `BoxSlot<T, A>` is 8 bytes (one pointer).
142///
143/// This is the slot type generated by `bounded_allocator!` and
144/// `unbounded_allocator!` macros via `type BoxSlot = alloc::BoxSlot<T, Allocator>`.
145///
146/// # Borrow Traits
147///
148/// `BoxSlot` implements [`Borrow<T>`] and [`BorrowMut<T>`], enabling use as
149/// HashMap keys that borrow `T` for lookups.
150///
151/// # Thread Safety
152///
153/// `BoxSlot` is `!Send` and `!Sync` (via `PhantomData<*const ()>` inside the
154/// marker). It must only be used from the thread that created it.
155#[must_use = "dropping BoxSlot returns it to the allocator"]
156pub struct BoxSlot<T, A: Alloc<Item = T>> {
157    slot: Slot<T>,
158    // PhantomData carries the allocator type AND makes BoxSlot !Send + !Sync
159    // (*mut is !Send + !Sync, and PhantomData<A> ties the type)
160    _marker: PhantomData<(A, *const ())>,
161}
162
163impl<T, A: Alloc<Item = T>> BoxSlot<T, A> {
164    /// Leaks the slot permanently, returning an immutable reference.
165    ///
166    /// The value will never be dropped or deallocated. Use this for data
167    /// that must live for the lifetime of the program.
168    ///
169    /// Returns a `LocalStatic<T>` which is `!Send + !Sync` and only supports
170    /// immutable access via `Deref`.
171    #[inline]
172    pub fn leak(self) -> LocalStatic<T> {
173        let slot_ptr = self.slot.as_ptr();
174        std::mem::forget(self);
175        // SAFETY: Destructor won't run (forgot self).
176        // The pointer is valid for 'static because slab storage is leaked.
177        // Union field `value` is active because the slot is occupied.
178        let value_ptr = unsafe { (*slot_ptr).value.as_ptr() };
179        unsafe { LocalStatic::new(value_ptr) }
180    }
181
182    /// Converts to a raw slot for manual memory management.
183    ///
184    /// The slot is NOT deallocated. Caller must eventually:
185    /// - Call `Allocator::free()` to drop and deallocate
186    /// - Call `Allocator::take()` to extract value and deallocate
187    /// - Wrap in another `BoxSlot` via `from_slot()`
188    #[inline]
189    pub fn into_slot(self) -> Slot<T> {
190        // SAFETY: Reading slot is safe since we own the BoxSlot.
191        // Forgot self prevents destructor from running.
192        let slot = unsafe { ptr::read(&raw const self.slot) };
193        std::mem::forget(self);
194        slot
195    }
196
197    /// Extracts the value from the slot, deallocating the slot.
198    ///
199    /// This is analogous to `Box::into_inner`.
200    #[inline]
201    pub fn into_inner(self) -> T {
202        // Extract slot before forget
203        // SAFETY: We're about to forget self, so reading slot is safe
204        let slot = unsafe { ptr::read(&raw const self.slot) };
205        std::mem::forget(self);
206
207        // SAFETY: We owned the slot, no other references exist
208        unsafe { A::take(slot) }
209    }
210
211    /// Replaces the value in the slot, returning the old value.
212    #[inline]
213    pub fn replace(&mut self, value: T) -> T {
214        // SAFETY: We own the slot exclusively (&mut self), union field `value` is active
215        unsafe {
216            let val_ptr = (*(*self.slot.as_ptr()).value).as_mut_ptr();
217            let old = ptr::read(val_ptr);
218            ptr::write(val_ptr, value);
219            old
220        }
221    }
222
223    /// Returns a pinned reference to the value.
224    ///
225    /// Slab-allocated values have stable addresses — they never move while
226    /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
227    ///
228    /// Useful for async code that requires `Pin<&mut Self>` for polling futures.
229    ///
230    /// # Example
231    ///
232    /// ```ignore
233    /// let mut slot = order_alloc::BoxSlot::try_new(MyFuture::new())?;
234    /// let pinned: Pin<&mut MyFuture> = slot.pin_mut();
235    /// pinned.poll(cx);
236    /// ```
237    #[inline]
238    pub fn pin(&self) -> Pin<&T> {
239        // SAFETY: Slab values have stable addresses — they don't move until
240        // the slot is explicitly freed. The BoxSlot owns the slot, so the
241        // value cannot be freed while this reference exists.
242        unsafe { Pin::new_unchecked(&**self) }
243    }
244
245    /// Returns a pinned mutable reference to the value.
246    ///
247    /// Slab-allocated values have stable addresses — they never move while
248    /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
249    ///
250    /// Useful for async code that requires `Pin<&mut Self>` for polling futures.
251    #[inline]
252    pub fn pin_mut(&mut self) -> Pin<&mut T> {
253        // SAFETY: Slab values have stable addresses — they don't move until
254        // the slot is explicitly freed. The BoxSlot owns the slot exclusively
255        // (&mut self), so the value cannot be freed or moved while this
256        // mutable reference exists.
257        unsafe { Pin::new_unchecked(&mut **self) }
258    }
259
260    /// Wraps a raw slot in an RAII handle.
261    ///
262    /// # Safety
263    ///
264    /// - `slot` must have been allocated from an allocator of type `A`
265    /// - `slot` must not be wrapped in another `BoxSlot` or otherwise managed
266    #[inline]
267    pub unsafe fn from_slot(slot: Slot<T>) -> Self {
268        BoxSlot {
269            slot,
270            _marker: PhantomData,
271        }
272    }
273
274    /// Returns a raw pointer to the underlying slot cell.
275    ///
276    /// The pointer is valid as long as the `BoxSlot` (or any handle derived
277    /// from the same slab slot) is alive.
278    #[inline]
279    pub fn as_ptr(&self) -> *mut SlotCell<T> {
280        self.slot.as_ptr()
281    }
282
283    /// Consumes the `BoxSlot` and returns a raw pointer to the slot cell.
284    ///
285    /// The slot is NOT deallocated. The caller takes ownership and must
286    /// eventually:
287    /// - Call [`from_raw`](Self::from_raw) to reconstruct the `BoxSlot`
288    /// - Or call [`Alloc::free`] / [`Alloc::take`] on the underlying [`Slot`]
289    #[inline]
290    pub fn into_raw(self) -> *mut SlotCell<T> {
291        let ptr = self.slot.as_ptr();
292        std::mem::forget(self);
293        ptr
294    }
295
296    /// Reconstructs a `BoxSlot` from a raw pointer.
297    ///
298    /// # Safety
299    ///
300    /// - `ptr` must point to a valid, occupied slot cell within an allocator
301    ///   of type `A`
302    /// - The caller must own the slot (no other `BoxSlot` wrapping it)
303    #[inline]
304    pub unsafe fn from_raw(ptr: *mut SlotCell<T>) -> Self {
305        BoxSlot {
306            slot: unsafe { Slot::from_ptr(ptr) },
307            _marker: PhantomData,
308        }
309    }
310}
311
312impl<T, A: UnboundedAlloc<Item = T>> BoxSlot<T, A> {
313    /// Creates a new slot containing the given value.
314    ///
315    /// Always succeeds - grows the allocator if needed.
316    ///
317    /// Only available for unbounded allocators. For bounded allocators,
318    /// use [`try_new`](Self::try_new).
319    #[inline]
320    pub fn new(value: T) -> Self {
321        BoxSlot {
322            slot: A::alloc(value),
323            _marker: PhantomData,
324        }
325    }
326}
327
328impl<T, A: BoundedAlloc<Item = T>> BoxSlot<T, A> {
329    /// Tries to create a new slot containing the given value.
330    ///
331    /// Returns `Err(Full(value))` if the allocator is at capacity,
332    /// giving the value back to the caller.
333    ///
334    /// Only available for bounded allocators. For unbounded allocators,
335    /// use [`new`](Self::new) directly - it never fails.
336    #[inline]
337    pub fn try_new(value: T) -> Result<Self, Full<T>> {
338        Ok(BoxSlot {
339            slot: A::try_alloc(value)?,
340            _marker: PhantomData,
341        })
342    }
343}
344
345// =============================================================================
346// Trait Implementations for BoxSlot
347// =============================================================================
348
349impl<T, A: Alloc<Item = T>> Deref for BoxSlot<T, A> {
350    type Target = T;
351
352    #[inline]
353    fn deref(&self) -> &Self::Target {
354        &self.slot
355    }
356}
357
358impl<T, A: Alloc<Item = T>> DerefMut for BoxSlot<T, A> {
359    #[inline]
360    fn deref_mut(&mut self) -> &mut Self::Target {
361        &mut self.slot
362    }
363}
364
365impl<T, A: Alloc<Item = T>> AsRef<T> for BoxSlot<T, A> {
366    #[inline]
367    fn as_ref(&self) -> &T {
368        self
369    }
370}
371
372impl<T, A: Alloc<Item = T>> AsMut<T> for BoxSlot<T, A> {
373    #[inline]
374    fn as_mut(&mut self) -> &mut T {
375        self
376    }
377}
378
379impl<T, A: Alloc<Item = T>> Borrow<T> for BoxSlot<T, A> {
380    #[inline]
381    fn borrow(&self) -> &T {
382        self
383    }
384}
385
386impl<T, A: Alloc<Item = T>> BorrowMut<T> for BoxSlot<T, A> {
387    #[inline]
388    fn borrow_mut(&mut self) -> &mut T {
389        self
390    }
391}
392
393impl<T, A: Alloc<Item = T>> Drop for BoxSlot<T, A> {
394    #[inline]
395    fn drop(&mut self) {
396        // Read slot via raw ptr because we're in drop and can't move out of &mut self
397        // SAFETY: We own the slot, this is the destructor
398        let slot = unsafe { ptr::read(&raw const self.slot) };
399        // SAFETY: We own the slot, no other references exist
400        unsafe { A::free(slot) };
401    }
402}
403
404impl<T: fmt::Debug, A: Alloc<Item = T>> fmt::Debug for BoxSlot<T, A> {
405    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
406        f.debug_struct("BoxSlot").field("value", &**self).finish()
407    }
408}
409
410// =============================================================================
411// LocalStatic
412// =============================================================================
413
414/// A `'static` reference to a thread-local slab-allocated value.
415///
416/// Returned by [`BoxSlot::leak()`]. The reference is valid for the lifetime of
417/// the program, but cannot be sent to other threads because the backing slab
418/// is thread-local.
419///
420/// Once leaked, the slot is permanently occupied — there is no way to reclaim it.
421#[repr(transparent)]
422pub struct LocalStatic<T> {
423    ptr: *const T,
424    _marker: PhantomData<*const ()>, // !Send + !Sync
425}
426
427impl<T> LocalStatic<T> {
428    /// Creates a new `LocalStatic` from a raw pointer.
429    ///
430    /// # Safety
431    ///
432    /// The pointer must point to a valid, permanently-leaked value in a
433    /// thread-local slab.
434    #[inline]
435    pub(crate) unsafe fn new(ptr: *const T) -> Self {
436        LocalStatic {
437            ptr,
438            _marker: PhantomData,
439        }
440    }
441
442    /// Returns a raw pointer to the value.
443    #[inline]
444    pub fn as_ptr(&self) -> *const T {
445        self.ptr
446    }
447
448    /// Returns a pinned reference to the value.
449    ///
450    /// Leaked slab values have stable addresses — they never move for the
451    /// lifetime of the program. This makes `Pin` safe without any `Unpin` bound.
452    #[inline]
453    pub fn pin(&self) -> Pin<&T> {
454        // SAFETY: Leaked values have stable addresses forever.
455        unsafe { Pin::new_unchecked(&**self) }
456    }
457}
458
459impl<T> Deref for LocalStatic<T> {
460    type Target = T;
461
462    #[inline]
463    fn deref(&self) -> &T {
464        // SAFETY: ptr came from a leaked BoxSlot, value is alive forever,
465        // and we're on the same thread (enforced by !Send)
466        unsafe { &*self.ptr }
467    }
468}
469
470impl<T> AsRef<T> for LocalStatic<T> {
471    #[inline]
472    fn as_ref(&self) -> &T {
473        self
474    }
475}
476
477impl<T> Clone for LocalStatic<T> {
478    #[inline]
479    fn clone(&self) -> Self {
480        *self
481    }
482}
483
484impl<T> Copy for LocalStatic<T> {}
485
486impl<T: fmt::Debug> fmt::Debug for LocalStatic<T> {
487    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
488        f.debug_tuple("LocalStatic").field(&self.as_ref()).finish()
489    }
490}
491
492// =============================================================================
493// RcSlot<T, A>
494// =============================================================================
495
496/// Reference-counted handle to a slab-allocated value.
497///
498/// `RcSlot` is a cloneable, RAII handle backed by the existing slab allocator.
499/// Cloning bumps the strong count. Dropping decrements it; when the last strong
500/// reference drops, the value is dropped. The slab slot is freed when both
501/// strong and weak counts reach zero.
502///
503/// 8 bytes — same as `BoxSlot`.
504///
505/// # Thread Safety
506///
507/// `RcSlot` is `!Send` and `!Sync` (same as `BoxSlot`). All access must be from
508/// the thread that created the allocator.
509#[must_use = "dropping RcSlot decrements the strong count"]
510pub struct RcSlot<T, A: Alloc<Item = RcInner<T>>> {
511    inner: ManuallyDrop<BoxSlot<RcInner<T>, A>>,
512    _phantom: PhantomData<T>,
513}
514
515impl<T, A: UnboundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
516    /// Creates a new `RcSlot` containing the given value.
517    ///
518    /// Always succeeds - grows the allocator if needed.
519    ///
520    /// Only available for unbounded allocators. For bounded allocators,
521    /// use [`try_new`](Self::try_new).
522    #[inline]
523    pub fn new(value: T) -> Self {
524        RcSlot {
525            inner: ManuallyDrop::new(BoxSlot::new(RcInner::new(value))),
526            _phantom: PhantomData,
527        }
528    }
529}
530
531impl<T, A: Alloc<Item = RcInner<T>>> RcSlot<T, A> {
532    /// Creates a weak reference to the same slab slot.
533    #[inline]
534    pub fn downgrade(&self) -> WeakSlot<T, A> {
535        let rc_inner: &RcInner<T> = &self.inner;
536        let new_weak = rc_inner.weak().checked_add(1).expect("weak count overflow");
537        rc_inner.set_weak(new_weak);
538        // SAFETY: We hold a strong ref, slot is alive. Duplicate the pointer.
539        let weak_slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
540        WeakSlot {
541            inner: ManuallyDrop::new(weak_slot),
542            _phantom: PhantomData,
543        }
544    }
545
546    /// Returns the strong reference count.
547    #[inline]
548    pub fn strong_count(&self) -> u32 {
549        let rc_inner: &RcInner<T> = &self.inner;
550        rc_inner.strong()
551    }
552
553    /// Returns the weak reference count (excludes the implicit weak).
554    #[inline]
555    pub fn weak_count(&self) -> u32 {
556        let rc_inner: &RcInner<T> = &self.inner;
557        rc_inner.weak().saturating_sub(1)
558    }
559
560    /// Returns a pinned reference to the value.
561    ///
562    /// Slab-allocated values have stable addresses — they never move while
563    /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
564    #[inline]
565    pub fn pin(&self) -> Pin<&T> {
566        // SAFETY: Slab values have stable addresses. The RcSlot keeps the
567        // value alive, so the reference is valid.
568        unsafe { Pin::new_unchecked(&**self) }
569    }
570
571    /// Returns a pinned mutable reference if this is the only reference.
572    ///
573    /// Returns `None` if there are other strong or weak references.
574    ///
575    /// Slab-allocated values have stable addresses — they never move while
576    /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
577    #[inline]
578    pub fn pin_get_mut(&mut self) -> Option<Pin<&mut T>> {
579        self.get_mut().map(|r| {
580            // SAFETY: Slab values have stable addresses. We verified exclusive
581            // access via get_mut().
582            unsafe { Pin::new_unchecked(r) }
583        })
584    }
585
586    /// Returns a mutable reference if this is the only reference.
587    ///
588    /// Returns `None` if there are other strong or weak references.
589    #[inline]
590    pub fn get_mut(&mut self) -> Option<&mut T> {
591        // Need strong == 1 AND weak == 0 (no outstanding weaks that could upgrade)
592        if self.strong_count() == 1 && self.weak_count() == 0 {
593            // SAFETY: We verified exclusive access
594            Some(unsafe { self.get_mut_unchecked() })
595        } else {
596            None
597        }
598    }
599
600    /// Returns a mutable reference to the value without checking the strong count.
601    ///
602    /// # Safety
603    ///
604    /// Caller must ensure this is the only `RcSlot` (strong_count == 1, weak_count == 0)
605    /// and no `WeakSlot::upgrade` calls are concurrent.
606    #[inline]
607    #[allow(clippy::mut_from_ref)]
608    pub unsafe fn get_mut_unchecked(&self) -> &mut T {
609        // SAFETY: Caller guarantees exclusive access.
610        // Navigate through SlotCell union → RcInner → ManuallyDrop<T> → T
611        let cell_ptr = self.inner.as_ptr();
612        let rc_inner = unsafe { (*(*cell_ptr).value).assume_init_mut() };
613        // SAFETY: value is live, caller guarantees exclusive access.
614        // Dereference through ManuallyDrop to get &mut T.
615        let md = unsafe { rc_inner.value_manual_drop_mut() };
616        &mut *md
617    }
618
619    /// Converts to a raw slot for manual memory management.
620    ///
621    /// Returns `Some(Slot)` if this is the only reference (strong == 1, no weak refs).
622    /// Returns `None` if other strong or weak references exist.
623    ///
624    /// The strong count is decremented but the value is NOT dropped.
625    /// Caller takes ownership and must eventually free via the allocator.
626    #[inline]
627    pub fn into_slot(self) -> Option<Slot<RcInner<T>>> {
628        let rc_inner: &RcInner<T> = &self.inner;
629
630        // Must be only reference - strong == 1 and no external weaks (just implicit)
631        if rc_inner.strong() != 1 || rc_inner.weak() != 1 {
632            return None;
633        }
634
635        // Set counts to 0 - we're taking full ownership via raw Slot
636        rc_inner.set_strong(0);
637        rc_inner.set_weak(0);
638
639        // Extract the raw slot pointer
640        let slot_ptr = self.inner.as_ptr();
641
642        // Don't run Drop (which would try to free)
643        std::mem::forget(self);
644
645        // SAFETY: We verified we're the only reference, slot_ptr is valid
646        Some(unsafe { Slot::from_ptr(slot_ptr) })
647    }
648
649    /// Converts to a raw slot without checking refcounts.
650    ///
651    /// Caller takes full ownership of the slot. Refcounts are NOT modified —
652    /// the caller is responsible for ensuring no other references exist or
653    /// for handling the consequences.
654    ///
655    /// # Safety
656    ///
657    /// - Caller takes ownership of the slot and the value within
658    /// - If other strong references exist, they will see stale refcounts
659    ///   and may double-free or access dropped memory
660    /// - If weak references exist, they will fail to upgrade (this is safe)
661    ///   but may attempt deallocation based on stale counts
662    #[inline]
663    pub unsafe fn into_slot_unchecked(self) -> Slot<RcInner<T>> {
664        // DON'T touch refcounts - caller takes full ownership
665        // Any other refs will see stale counts, but that's caller's problem
666
667        // Extract the raw slot pointer
668        let slot_ptr = self.inner.as_ptr();
669
670        // Don't run Drop
671        std::mem::forget(self);
672
673        unsafe { Slot::from_ptr(slot_ptr) }
674    }
675
676    // =========================================================================
677    // Raw pointer API (mirrors std::rc::Rc)
678    // =========================================================================
679
680    /// Returns a raw pointer to the underlying slot cell.
681    ///
682    /// The pointer is valid as long as any strong reference exists.
683    #[inline]
684    pub fn as_ptr(&self) -> *mut SlotCell<RcInner<T>> {
685        self.inner.as_ptr()
686    }
687
688    /// Consumes the `RcSlot` without decrementing the strong count.
689    ///
690    /// The caller takes responsibility for the strong count and must
691    /// eventually call [`from_raw`](Self::from_raw) (to reconstruct and
692    /// drop) or [`decrement_strong_count`](Self::decrement_strong_count).
693    #[inline]
694    pub fn into_raw(self) -> *mut SlotCell<RcInner<T>> {
695        let ptr = self.inner.as_ptr();
696        std::mem::forget(self);
697        ptr
698    }
699
700    /// Reconstructs an `RcSlot` from a raw pointer without incrementing
701    /// the strong count.
702    ///
703    /// # Safety
704    ///
705    /// - `ptr` must point to a valid, occupied `SlotCell<RcInner<T>>` within
706    ///   an allocator of type `A`
707    /// - The caller must own a strong count for this handle (e.g., obtained
708    ///   via [`into_raw`](Self::into_raw) or
709    ///   [`increment_strong_count`](Self::increment_strong_count))
710    #[inline]
711    pub unsafe fn from_raw(ptr: *mut SlotCell<RcInner<T>>) -> Self {
712        RcSlot {
713            inner: ManuallyDrop::new(unsafe { BoxSlot::from_raw(ptr) }),
714            _phantom: PhantomData,
715        }
716    }
717
718    /// Increments the strong count via a raw pointer.
719    ///
720    /// Use this when a data structure needs to acquire an additional strong
721    /// reference from a raw pointer without holding an `RcSlot`.
722    ///
723    /// # Safety
724    ///
725    /// - `ptr` must point to a live `RcInner<T>` (strong > 0)
726    #[inline]
727    pub unsafe fn increment_strong_count(ptr: *mut SlotCell<RcInner<T>>) {
728        // SAFETY: Caller guarantees ptr points to a live RcInner
729        let rc_inner = unsafe { (*ptr).value.assume_init_ref() };
730        let strong = rc_inner.strong();
731        rc_inner.set_strong(strong + 1);
732    }
733
734    /// Decrements the strong count via a raw pointer.
735    ///
736    /// If the strong count reaches zero, the value is dropped. If both
737    /// strong and weak counts reach zero, the slab slot is freed.
738    ///
739    /// # Safety
740    ///
741    /// - `ptr` must point to a valid `RcInner<T>`
742    /// - The caller must own a strong count to decrement
743    /// - After this call, `ptr` may be invalid if the slot was freed
744    #[inline]
745    pub unsafe fn decrement_strong_count(ptr: *mut SlotCell<RcInner<T>>) {
746        // Reconstruct and drop — reuses existing Drop logic
747        drop(unsafe { Self::from_raw(ptr) });
748    }
749}
750
751impl<T, A: BoundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
752    /// Tries to create a new `RcSlot` containing the given value.
753    ///
754    /// Returns `Err(Full(value))` if the allocator is at capacity.
755    ///
756    /// Only available for bounded allocators. For unbounded allocators,
757    /// use [`new`](Self::new) directly - it never fails.
758    #[inline]
759    pub fn try_new(value: T) -> Result<Self, Full<T>> {
760        match BoxSlot::try_new(RcInner::new(value)) {
761            Ok(slot) => Ok(RcSlot {
762                inner: ManuallyDrop::new(slot),
763                _phantom: PhantomData,
764            }),
765            Err(full) => Err(Full(full.into_inner().into_value())),
766        }
767    }
768}
769
770impl<T: Clone, A: UnboundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
771    /// Makes a mutable reference to the value, cloning if necessary.
772    ///
773    /// If this is the only reference (strong == 1, weak == 0), returns a
774    /// mutable reference directly. Otherwise, clones the value into a new
775    /// slot and returns a mutable reference to the clone.
776    ///
777    /// Always succeeds - grows the allocator if needed.
778    ///
779    /// Only available for unbounded allocators. For bounded allocators,
780    /// use [`try_make_mut`](Self::try_make_mut).
781    #[inline]
782    pub fn make_mut(&mut self) -> &mut T {
783        if self.strong_count() != 1 || self.weak_count() != 0 {
784            // Clone into new slot, replace self
785            *self = Self::new((**self).clone());
786        }
787        // SAFETY: Now we're the only reference
788        unsafe { self.get_mut_unchecked() }
789    }
790}
791
792impl<T: Clone, A: BoundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
793    /// Tries to make a mutable reference to the value, cloning if necessary.
794    ///
795    /// If this is the only reference (strong == 1, weak == 0), returns a
796    /// mutable reference directly. Otherwise, attempts to clone the value
797    /// into a new slot.
798    ///
799    /// Returns `Err(Full)` if allocation fails.
800    ///
801    /// Only available for bounded allocators. For unbounded allocators,
802    /// use [`make_mut`](Self::make_mut) directly - it never fails.
803    #[inline]
804    pub fn try_make_mut(&mut self) -> Result<&mut T, Full<()>> {
805        if self.strong_count() != 1 || self.weak_count() != 0 {
806            // Clone into new slot, replace self
807            match Self::try_new((**self).clone()) {
808                Ok(new_slot) => *self = new_slot,
809                Err(_) => return Err(Full(())),
810            }
811        }
812        // SAFETY: Now we're the only reference
813        Ok(unsafe { self.get_mut_unchecked() })
814    }
815}
816
817impl<T, A: Alloc<Item = RcInner<T>>> Clone for RcSlot<T, A> {
818    #[inline]
819    fn clone(&self) -> Self {
820        let rc_inner: &RcInner<T> = &self.inner;
821        let new_strong = rc_inner
822            .strong()
823            .checked_add(1)
824            .expect("RcSlot strong count overflow");
825        rc_inner.set_strong(new_strong);
826        // SAFETY: We hold a strong ref, slot is alive
827        let cloned_slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
828        RcSlot {
829            inner: ManuallyDrop::new(cloned_slot),
830            _phantom: PhantomData,
831        }
832    }
833}
834
835impl<T, A: Alloc<Item = RcInner<T>>> Drop for RcSlot<T, A> {
836    #[inline]
837    fn drop(&mut self) {
838        // All refcount access goes through raw pointers to avoid Stacked
839        // Borrows invalidation when we take &mut to drop the value.
840        let cell_ptr = self.inner.as_ptr();
841
842        // SAFETY: Slot is alive, union field `value` is active
843        let strong = unsafe { (*cell_ptr).value.assume_init_ref().strong() };
844        if strong > 1 {
845            // SAFETY: same as above
846            unsafe { (*cell_ptr).value.assume_init_ref().set_strong(strong - 1) };
847            return;
848        }
849
850        // Last strong reference — drop the value
851        // SAFETY: same as above
852        unsafe { (*cell_ptr).value.assume_init_ref().set_strong(0) };
853
854        // SAFETY: We are the last strong ref, value is live. We need &mut
855        // to drop the ManuallyDrop<T> inside RcInner.
856        unsafe {
857            let rc_inner_mut = (*(*cell_ptr).value).assume_init_mut();
858            ManuallyDrop::drop(rc_inner_mut.value_manual_drop_mut());
859        }
860
861        // Re-derive shared ref after the mutable drop above
862        // SAFETY: RcInner is still valid memory (Cell<u32> fields are Copy,
863        // ManuallyDrop<T> is dropped but the storage is still there)
864        let weak = unsafe { (*cell_ptr).value.assume_init_ref().weak() };
865        if weak == 1 {
866            // No outstanding weaks — free the slot.
867            // SAFETY: Value is dropped. Slot's drop_in_place on RcInner is
868            // a no-op (ManuallyDrop<T> already dropped, Cell<u32> is Copy).
869            // BoxSlot's Drop will call A::free() to return slot to freelist.
870            unsafe { ManuallyDrop::drop(&mut self.inner) };
871        } else {
872            // SAFETY: same as weak read above
873            unsafe { (*cell_ptr).value.assume_init_ref().set_weak(weak - 1) };
874            // Zombie: T dropped, weak refs still hold the slot alive
875        }
876    }
877}
878
879impl<T, A: Alloc<Item = RcInner<T>>> Deref for RcSlot<T, A> {
880    type Target = T;
881
882    #[inline]
883    fn deref(&self) -> &T {
884        let rc_inner: &RcInner<T> = &self.inner;
885        rc_inner.value()
886    }
887}
888
889impl<T, A: Alloc<Item = RcInner<T>>> AsRef<T> for RcSlot<T, A> {
890    #[inline]
891    fn as_ref(&self) -> &T {
892        self
893    }
894}
895
896impl<T: fmt::Debug, A: Alloc<Item = RcInner<T>>> fmt::Debug for RcSlot<T, A> {
897    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
898        f.debug_struct("RcSlot")
899            .field("strong", &self.strong_count())
900            .field("weak", &self.weak_count())
901            .field("value", &**self)
902            .finish()
903    }
904}
905
906// =============================================================================
907// WeakSlot<T, A>
908// =============================================================================
909
910/// Weak reference to a slab-allocated value.
911///
912/// Does not keep the value alive. Must [`upgrade`](Self::upgrade) to access
913/// the value. Keeps the slab slot alive (for upgrade checks) until all weak
914/// and strong references are dropped.
915///
916/// 8 bytes — same as `BoxSlot`.
917pub struct WeakSlot<T, A: Alloc<Item = RcInner<T>>> {
918    inner: ManuallyDrop<BoxSlot<RcInner<T>, A>>,
919    _phantom: PhantomData<T>,
920}
921
922impl<T, A: Alloc<Item = RcInner<T>>> WeakSlot<T, A> {
923    /// Attempts to upgrade to a strong reference.
924    ///
925    /// Returns `Some(RcSlot)` if the value is still alive (strong > 0),
926    /// or `None` if the last strong reference has been dropped.
927    #[inline]
928    pub fn upgrade(&self) -> Option<RcSlot<T, A>> {
929        let rc_inner: &RcInner<T> = &self.inner;
930        let strong = rc_inner.strong();
931        if strong == 0 {
932            return None;
933        }
934        let new_strong = strong.checked_add(1).expect("RcSlot strong count overflow");
935        rc_inner.set_strong(new_strong);
936        // SAFETY: strong > 0 means slot is alive and value is valid
937        let slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
938        Some(RcSlot {
939            inner: ManuallyDrop::new(slot),
940            _phantom: PhantomData,
941        })
942    }
943
944    /// Returns the strong reference count.
945    #[inline]
946    pub fn strong_count(&self) -> u32 {
947        let rc_inner: &RcInner<T> = &self.inner;
948        rc_inner.strong()
949    }
950
951    /// Returns the weak reference count (excludes the implicit weak).
952    #[inline]
953    pub fn weak_count(&self) -> u32 {
954        let rc_inner: &RcInner<T> = &self.inner;
955        let weak = rc_inner.weak();
956        // If strong > 0, subtract the implicit weak. If strong == 0,
957        // the implicit weak was already decremented.
958        if rc_inner.strong() > 0 {
959            weak.saturating_sub(1)
960        } else {
961            weak
962        }
963    }
964}
965
966impl<T, A: Alloc<Item = RcInner<T>>> Clone for WeakSlot<T, A> {
967    #[inline]
968    fn clone(&self) -> Self {
969        let rc_inner: &RcInner<T> = &self.inner;
970        let new_weak = rc_inner
971            .weak()
972            .checked_add(1)
973            .expect("WeakSlot weak count overflow");
974        rc_inner.set_weak(new_weak);
975        // SAFETY: We hold a weak ref, slot memory is alive
976        let cloned_slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
977        WeakSlot {
978            inner: ManuallyDrop::new(cloned_slot),
979            _phantom: PhantomData,
980        }
981    }
982}
983
984impl<T, A: Alloc<Item = RcInner<T>>> Drop for WeakSlot<T, A> {
985    #[inline]
986    fn drop(&mut self) {
987        let rc_inner: &RcInner<T> = &self.inner;
988        let weak = rc_inner.weak();
989
990        // Always decrement weak count
991        rc_inner.set_weak(weak.saturating_sub(1));
992
993        // Dealloc only if this was the last weak AND value already dropped (strong==0)
994        if weak == 1 && rc_inner.strong() == 0 {
995            // Zombie slot — value already dropped, dealloc the slot.
996            // SAFETY: RcInner's ManuallyDrop<T> is already dropped.
997            // BoxSlot's drop_in_place on RcInner is a no-op. Dealloc returns
998            // the slot to the freelist.
999            unsafe { ManuallyDrop::drop(&mut self.inner) };
1000        }
1001        // If strong > 0, strong holder's drop will handle dealloc.
1002        // If weak > 1, other weak refs still hold the slot alive.
1003    }
1004}
1005
1006impl<T, A: Alloc<Item = RcInner<T>>> fmt::Debug for WeakSlot<T, A> {
1007    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1008        f.debug_struct("WeakSlot")
1009            .field("strong", &self.strong_count())
1010            .field("weak", &self.weak_count())
1011            .finish()
1012    }
1013}