Skip to main content

nexus_slab/
alloc.rs

1//! Generic slab allocator trait and slot types.
2//!
3//! This module provides:
4//! - [`Alloc`] - base trait for slot deallocation
5//! - [`BoundedAlloc`] - trait for fixed-capacity allocators (can fail)
6//! - [`UnboundedAlloc`] - trait for growable allocators (always succeeds)
7//! - [`BoxSlot`] - 8-byte RAII handle generic over allocator
8//! - [`RcSlot`] / [`WeakSlot`] - reference-counted handles
9
10use std::borrow::{Borrow, BorrowMut};
11use std::fmt;
12use std::marker::PhantomData;
13use std::mem::ManuallyDrop;
14use std::ops::{Deref, DerefMut};
15use std::pin::Pin;
16use std::ptr;
17
18use crate::shared::{RawSlot, RcInner, SlotCell};
19
20// =============================================================================
21// Full<T>
22// =============================================================================
23
24/// Error returned when a bounded allocator is full.
25///
26/// Contains the value that could not be allocated, allowing recovery.
27pub struct Full<T>(pub T);
28
29impl<T> Full<T> {
30    /// Consumes the error, returning the value that could not be allocated.
31    #[inline]
32    pub fn into_inner(self) -> T {
33        self.0
34    }
35}
36
37impl<T> fmt::Debug for Full<T> {
38    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
39        f.write_str("Full(..)")
40    }
41}
42
43impl<T> fmt::Display for Full<T> {
44    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
45        f.write_str("allocator full")
46    }
47}
48
49// =============================================================================
50// Traits
51// =============================================================================
52
53/// Base trait for slab allocators - handles slot deallocation.
54///
55/// Each macro-generated allocator is a ZST that implements this trait.
56/// All operations go through associated functions (no `&self`) since
57/// the backing storage lives in a `thread_local!`.
58///
59/// # Safety
60///
61/// Implementors must guarantee:
62/// - `free` correctly drops the stored item and returns the slot to the freelist.
63///   For byte allocators (`AlignedBytes<N>` is `Copy`), `free` only does a
64///   freelist return — the actual `T` value must be dropped by the caller
65///   (e.g., `ByteBoxSlot::drop` calls `drop_in_place::<T>()` before `A::free`).
66/// - `take` correctly moves the value out and returns the slot to the freelist
67/// - All operations are single-threaded (TLS-backed)
68pub unsafe trait Alloc: Sized + 'static {
69    /// The type stored in each slot.
70    type Item;
71
72    /// Returns `true` if the allocator has been initialized.
73    fn is_initialized() -> bool;
74
75    /// Returns the total slot capacity.
76    ///
77    /// For bounded allocators this is fixed at init. For unbounded allocators
78    /// this is the sum across all allocated chunks.
79    fn capacity() -> usize;
80
81    /// Drops the stored item and returns the slot to the freelist.
82    ///
83    /// For typed allocators, this drops `T` via `drop_in_place` then frees.
84    /// For byte allocators (`AlignedBytes<N>` is `Copy`), this only does a
85    /// freelist return — the caller must drop `T` before calling `free`.
86    ///
87    /// This is for manual memory management after calling `BoxSlot::into_slot()`.
88    ///
89    /// # Safety
90    ///
91    /// - `slot` must have been allocated from this allocator
92    /// - No references to the slot's value may exist
93    /// - For byte allocators: the value must already have been dropped or moved out
94    ///
95    /// Note: Double-free is prevented at compile time (`RawSlot` is move-only).
96    #[allow(clippy::needless_pass_by_value)] // consumes slot to prevent reuse
97    unsafe fn free(slot: RawSlot<Self::Item>);
98
99    /// Takes the value from a slot, returning it and deallocating the slot.
100    ///
101    /// This is for manual memory management after calling `BoxSlot::into_slot()`.
102    ///
103    /// # Safety
104    ///
105    /// - `slot` must have been allocated from this allocator
106    /// - No references to the slot's value may exist
107    ///
108    /// Note: Double-free is prevented at compile time (`RawSlot` is move-only).
109    #[allow(clippy::needless_pass_by_value)] // consumes slot to prevent reuse
110    unsafe fn take(slot: RawSlot<Self::Item>) -> Self::Item;
111}
112
113/// Trait for bounded (fixed-capacity) allocators.
114///
115/// Bounded allocators can fail when at capacity. Use [`try_alloc`](Self::try_alloc)
116/// to handle capacity exhaustion.
117pub trait BoundedAlloc: Alloc {
118    /// Tries to allocate a slot and write the value.
119    ///
120    /// Returns `Err(Full(value))` if the allocator is full, giving the
121    /// value back to the caller.
122    fn try_alloc(value: Self::Item) -> Result<RawSlot<Self::Item>, Full<Self::Item>>;
123}
124
125/// Trait for unbounded (growable) allocators.
126///
127/// Unbounded allocators always succeed (grow as needed).
128pub trait UnboundedAlloc: Alloc {
129    /// Allocates a slot and writes the value.
130    ///
131    /// Always succeeds - grows the allocator if needed.
132    fn alloc(value: Self::Item) -> RawSlot<Self::Item>;
133
134    /// Ensures at least `count` chunks are allocated.
135    ///
136    /// No-op if the allocator already has `count` or more chunks.
137    fn reserve_chunks(count: usize);
138
139    /// Returns the number of allocated chunks.
140    fn chunk_count() -> usize;
141}
142
143// =============================================================================
144// BoxSlot<T, A>
145// =============================================================================
146
147/// RAII handle to a slab-allocated value, generic over allocator.
148///
149/// `BoxSlot<T, A>` is 8 bytes (one pointer).
150///
151/// This is the slot type generated by `bounded_allocator!` and
152/// `unbounded_allocator!` macros via `type BoxSlot = alloc::BoxSlot<T, Allocator>`.
153///
154/// # Borrow Traits
155///
156/// `BoxSlot` implements [`Borrow<T>`] and [`BorrowMut<T>`], enabling use as
157/// HashMap keys that borrow `T` for lookups.
158///
159/// # Thread Safety
160///
161/// `BoxSlot` is `!Send` and `!Sync` (via `PhantomData<*const ()>` inside the
162/// marker). It must only be used from the thread that created it.
163#[must_use = "dropping BoxSlot returns it to the allocator"]
164pub struct BoxSlot<T, A: Alloc<Item = T>> {
165    ptr: *mut SlotCell<T>,
166    // PhantomData carries the allocator type AND makes BoxSlot !Send + !Sync
167    // (*mut is !Send + !Sync, and PhantomData<A> ties the type)
168    _marker: PhantomData<(A, *const ())>,
169}
170
171impl<T, A: Alloc<Item = T>> BoxSlot<T, A> {
172    /// Leaks the slot permanently, returning an immutable reference.
173    ///
174    /// The value will never be dropped or deallocated. Use this for data
175    /// that must live for the lifetime of the program.
176    ///
177    /// Returns a `LocalStatic<T>` which is `!Send + !Sync` and only supports
178    /// immutable access via `Deref`.
179    #[inline]
180    pub fn leak(self) -> LocalStatic<T> {
181        let slot_ptr = self.ptr;
182        std::mem::forget(self);
183        // SAFETY: Destructor won't run (forgot self).
184        // The pointer is valid for 'static because slab storage is leaked.
185        // Union field `value` is active because the slot is occupied.
186        let value_ptr = unsafe { (*slot_ptr).value.as_ptr() };
187        unsafe { LocalStatic::new(value_ptr) }
188    }
189
190    /// Converts to a raw slot for manual memory management.
191    ///
192    /// The slot is NOT deallocated. Caller must eventually:
193    /// - Call `Allocator::free()` to drop and deallocate
194    /// - Call `Allocator::take()` to extract value and deallocate
195    /// - Wrap in another `BoxSlot` via `from_slot()`
196    #[inline]
197    pub fn into_slot(self) -> RawSlot<T> {
198        let ptr = self.ptr;
199        std::mem::forget(self);
200        // SAFETY: ptr came from a valid allocation
201        unsafe { RawSlot::from_ptr(ptr) }
202    }
203
204    /// Extracts the value from the slot, deallocating the slot.
205    ///
206    /// This is analogous to `Box::into_inner`.
207    #[inline]
208    pub fn into_inner(self) -> T {
209        let ptr = self.ptr;
210        std::mem::forget(self);
211
212        // SAFETY: ptr came from a valid allocation, construct RawSlot for take
213        let slot = unsafe { RawSlot::from_ptr(ptr) };
214        // SAFETY: We owned the slot, no other references exist
215        unsafe { A::take(slot) }
216    }
217
218    /// Replaces the value in the slot, returning the old value.
219    #[inline]
220    pub fn replace(&mut self, value: T) -> T {
221        // SAFETY: We own the slot exclusively (&mut self), union field `value` is active
222        unsafe {
223            let val_ptr = (*(*self.ptr).value).as_mut_ptr();
224            let old = ptr::read(val_ptr);
225            ptr::write(val_ptr, value);
226            old
227        }
228    }
229
230    /// Returns a pinned reference to the value.
231    ///
232    /// Slab-allocated values have stable addresses — they never move while
233    /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
234    ///
235    /// Useful for async code that requires `Pin<&mut Self>` for polling futures.
236    ///
237    /// # Example
238    ///
239    /// ```ignore
240    /// let mut slot = order_alloc::BoxSlot::try_new(MyFuture::new())?;
241    /// let pinned: Pin<&mut MyFuture> = slot.pin_mut();
242    /// pinned.poll(cx);
243    /// ```
244    #[inline]
245    pub fn pin(&self) -> Pin<&T> {
246        // SAFETY: Slab values have stable addresses — they don't move until
247        // the slot is explicitly freed. The BoxSlot owns the slot, so the
248        // value cannot be freed while this reference exists.
249        unsafe { Pin::new_unchecked(&**self) }
250    }
251
252    /// Returns a pinned mutable reference to the value.
253    ///
254    /// Slab-allocated values have stable addresses — they never move while
255    /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
256    ///
257    /// Useful for async code that requires `Pin<&mut Self>` for polling futures.
258    #[inline]
259    pub fn pin_mut(&mut self) -> Pin<&mut T> {
260        // SAFETY: Slab values have stable addresses — they don't move until
261        // the slot is explicitly freed. The BoxSlot owns the slot exclusively
262        // (&mut self), so the value cannot be freed or moved while this
263        // mutable reference exists.
264        unsafe { Pin::new_unchecked(&mut **self) }
265    }
266
267    /// Wraps a raw slot in an RAII handle.
268    ///
269    /// # Safety
270    ///
271    /// - `slot` must have been allocated from an allocator of type `A`
272    /// - `slot` must not be wrapped in another `BoxSlot` or otherwise managed
273    #[inline]
274    pub unsafe fn from_slot(slot: RawSlot<T>) -> Self {
275        BoxSlot {
276            ptr: slot.into_ptr(),
277            _marker: PhantomData,
278        }
279    }
280
281    /// Returns a raw pointer to the underlying slot cell.
282    ///
283    /// The pointer is valid as long as the `BoxSlot` (or any handle derived
284    /// from the same slab slot) is alive.
285    #[inline]
286    pub fn as_ptr(&self) -> *mut SlotCell<T> {
287        self.ptr
288    }
289
290    /// Consumes the `BoxSlot` and returns a raw pointer to the slot cell.
291    ///
292    /// The slot is NOT deallocated. The caller takes ownership and must
293    /// eventually:
294    /// - Call [`from_raw`](Self::from_raw) to reconstruct the `BoxSlot`
295    /// - Or call [`Alloc::free`] / [`Alloc::take`] on the underlying [`RawSlot`]
296    #[inline]
297    pub fn into_raw(self) -> *mut SlotCell<T> {
298        let ptr = self.ptr;
299        std::mem::forget(self);
300        ptr
301    }
302
303    /// Reconstructs a `BoxSlot` from a raw pointer.
304    ///
305    /// # Safety
306    ///
307    /// - `ptr` must point to a valid, occupied slot cell within an allocator
308    ///   of type `A`
309    /// - The caller must own the slot (no other `BoxSlot` wrapping it)
310    #[inline]
311    pub unsafe fn from_raw(ptr: *mut SlotCell<T>) -> Self {
312        BoxSlot {
313            ptr,
314            _marker: PhantomData,
315        }
316    }
317}
318
319impl<T, A: UnboundedAlloc<Item = T>> BoxSlot<T, A> {
320    /// Creates a new slot containing the given value.
321    ///
322    /// Always succeeds - grows the allocator if needed.
323    ///
324    /// Only available for unbounded allocators. For bounded allocators,
325    /// use [`try_new`](Self::try_new).
326    #[inline]
327    pub fn new(value: T) -> Self {
328        BoxSlot {
329            ptr: A::alloc(value).into_ptr(),
330            _marker: PhantomData,
331        }
332    }
333}
334
335impl<T, A: BoundedAlloc<Item = T>> BoxSlot<T, A> {
336    /// Tries to create a new slot containing the given value.
337    ///
338    /// Returns `Err(Full(value))` if the allocator is at capacity,
339    /// giving the value back to the caller.
340    ///
341    /// Only available for bounded allocators. For unbounded allocators,
342    /// use [`new`](Self::new) directly - it never fails.
343    #[inline]
344    pub fn try_new(value: T) -> Result<Self, Full<T>> {
345        Ok(BoxSlot {
346            ptr: A::try_alloc(value)?.into_ptr(),
347            _marker: PhantomData,
348        })
349    }
350}
351
352// =============================================================================
353// Trait Implementations for BoxSlot
354// =============================================================================
355
356impl<T, A: Alloc<Item = T>> Deref for BoxSlot<T, A> {
357    type Target = T;
358
359    #[inline]
360    fn deref(&self) -> &Self::Target {
361        // SAFETY: BoxSlot was created from a valid, occupied SlotCell.
362        // Union field `value` is active because the slot is occupied.
363        unsafe { (*self.ptr).value.assume_init_ref() }
364    }
365}
366
367impl<T, A: Alloc<Item = T>> DerefMut for BoxSlot<T, A> {
368    #[inline]
369    fn deref_mut(&mut self) -> &mut Self::Target {
370        // SAFETY: We have &mut self, guaranteeing exclusive access.
371        // Union field `value` is active because the slot is occupied.
372        unsafe { (*(*self.ptr).value).assume_init_mut() }
373    }
374}
375
376impl<T, A: Alloc<Item = T>> AsRef<T> for BoxSlot<T, A> {
377    #[inline]
378    fn as_ref(&self) -> &T {
379        self
380    }
381}
382
383impl<T, A: Alloc<Item = T>> AsMut<T> for BoxSlot<T, A> {
384    #[inline]
385    fn as_mut(&mut self) -> &mut T {
386        self
387    }
388}
389
390impl<T, A: Alloc<Item = T>> Borrow<T> for BoxSlot<T, A> {
391    #[inline]
392    fn borrow(&self) -> &T {
393        self
394    }
395}
396
397impl<T, A: Alloc<Item = T>> BorrowMut<T> for BoxSlot<T, A> {
398    #[inline]
399    fn borrow_mut(&mut self) -> &mut T {
400        self
401    }
402}
403
404impl<T, A: Alloc<Item = T>> Drop for BoxSlot<T, A> {
405    #[inline]
406    fn drop(&mut self) {
407        // SAFETY: We own the slot, construct RawSlot for A::free
408        let slot = unsafe { RawSlot::from_ptr(self.ptr) };
409        // SAFETY: We own the slot, no other references exist
410        unsafe { A::free(slot) };
411    }
412}
413
414impl<T: fmt::Debug, A: Alloc<Item = T>> fmt::Debug for BoxSlot<T, A> {
415    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
416        f.debug_struct("BoxSlot").field("value", &**self).finish()
417    }
418}
419
420// =============================================================================
421// LocalStatic
422// =============================================================================
423
424/// A `'static` reference to a thread-local slab-allocated value.
425///
426/// Returned by [`BoxSlot::leak()`]. The reference is valid for the lifetime of
427/// the program, but cannot be sent to other threads because the backing slab
428/// is thread-local.
429///
430/// Once leaked, the slot is permanently occupied — there is no way to reclaim it.
431#[repr(transparent)]
432pub struct LocalStatic<T: ?Sized> {
433    ptr: *const T,
434    _marker: PhantomData<*const ()>, // !Send + !Sync
435}
436
437impl<T: ?Sized> LocalStatic<T> {
438    /// Creates a new `LocalStatic` from a raw pointer.
439    ///
440    /// # Safety
441    ///
442    /// The pointer must point to a valid, permanently-leaked value in a
443    /// thread-local slab.
444    #[inline]
445    pub(crate) unsafe fn new(ptr: *const T) -> Self {
446        LocalStatic {
447            ptr,
448            _marker: PhantomData,
449        }
450    }
451
452    /// Returns a raw pointer to the value.
453    #[inline]
454    pub fn as_ptr(&self) -> *const T {
455        self.ptr
456    }
457
458    /// Returns a pinned reference to the value.
459    ///
460    /// Leaked slab values have stable addresses — they never move for the
461    /// lifetime of the program. This makes `Pin` safe without any `Unpin` bound.
462    #[inline]
463    pub fn pin(&self) -> Pin<&T> {
464        // SAFETY: Leaked values have stable addresses forever.
465        unsafe { Pin::new_unchecked(&**self) }
466    }
467}
468
469impl<T: ?Sized> Deref for LocalStatic<T> {
470    type Target = T;
471
472    #[inline]
473    fn deref(&self) -> &T {
474        // SAFETY: ptr came from a leaked BoxSlot, value is alive forever,
475        // and we're on the same thread (enforced by !Send)
476        unsafe { &*self.ptr }
477    }
478}
479
480impl<T: ?Sized> AsRef<T> for LocalStatic<T> {
481    #[inline]
482    fn as_ref(&self) -> &T {
483        self
484    }
485}
486
487impl<T: ?Sized> Clone for LocalStatic<T> {
488    #[inline]
489    fn clone(&self) -> Self {
490        *self
491    }
492}
493
494impl<T: ?Sized> Copy for LocalStatic<T> {}
495
496impl<T: fmt::Debug + ?Sized> fmt::Debug for LocalStatic<T> {
497    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
498        f.debug_tuple("LocalStatic").field(&self.as_ref()).finish()
499    }
500}
501
502// =============================================================================
503// RcSlot<T, A>
504// =============================================================================
505
506/// Reference-counted handle to a slab-allocated value.
507///
508/// `RcSlot` is a cloneable, RAII handle backed by the existing slab allocator.
509/// Cloning bumps the strong count. Dropping decrements it; when the last strong
510/// reference drops, the value is dropped. The slab slot is freed when both
511/// strong and weak counts reach zero.
512///
513/// 8 bytes — same as `BoxSlot`.
514///
515/// # Thread Safety
516///
517/// `RcSlot` is `!Send` and `!Sync` (same as `BoxSlot`). All access must be from
518/// the thread that created the allocator.
519#[must_use = "dropping RcSlot decrements the strong count"]
520pub struct RcSlot<T, A: Alloc<Item = RcInner<T>>> {
521    inner: ManuallyDrop<BoxSlot<RcInner<T>, A>>,
522    _phantom: PhantomData<T>,
523}
524
525impl<T, A: UnboundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
526    /// Creates a new `RcSlot` containing the given value.
527    ///
528    /// Always succeeds - grows the allocator if needed.
529    ///
530    /// Only available for unbounded allocators. For bounded allocators,
531    /// use [`try_new`](Self::try_new).
532    #[inline]
533    pub fn new(value: T) -> Self {
534        RcSlot {
535            inner: ManuallyDrop::new(BoxSlot::new(RcInner::new(value))),
536            _phantom: PhantomData,
537        }
538    }
539}
540
541impl<T, A: Alloc<Item = RcInner<T>>> RcSlot<T, A> {
542    /// Creates a weak reference to the same slab slot.
543    #[inline]
544    pub fn downgrade(&self) -> WeakSlot<T, A> {
545        let rc_inner: &RcInner<T> = &self.inner;
546        let new_weak = rc_inner.weak().checked_add(1).expect("weak count overflow");
547        rc_inner.set_weak(new_weak);
548        // SAFETY: We hold a strong ref, slot is alive. Duplicate the pointer.
549        let weak_slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
550        WeakSlot {
551            inner: ManuallyDrop::new(weak_slot),
552            _phantom: PhantomData,
553        }
554    }
555
556    /// Returns the strong reference count.
557    #[inline]
558    pub fn strong_count(&self) -> u32 {
559        let rc_inner: &RcInner<T> = &self.inner;
560        rc_inner.strong()
561    }
562
563    /// Returns the weak reference count (excludes the implicit weak).
564    #[inline]
565    pub fn weak_count(&self) -> u32 {
566        let rc_inner: &RcInner<T> = &self.inner;
567        rc_inner.weak().saturating_sub(1)
568    }
569
570    /// Returns a pinned reference to the value.
571    ///
572    /// Slab-allocated values have stable addresses — they never move while
573    /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
574    #[inline]
575    pub fn pin(&self) -> Pin<&T> {
576        // SAFETY: Slab values have stable addresses. The RcSlot keeps the
577        // value alive, so the reference is valid.
578        unsafe { Pin::new_unchecked(&**self) }
579    }
580
581    /// Returns a pinned mutable reference if this is the only reference.
582    ///
583    /// Returns `None` if there are other strong or weak references.
584    ///
585    /// Slab-allocated values have stable addresses — they never move while
586    /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
587    #[inline]
588    pub fn pin_get_mut(&mut self) -> Option<Pin<&mut T>> {
589        self.get_mut().map(|r| {
590            // SAFETY: Slab values have stable addresses. We verified exclusive
591            // access via get_mut().
592            unsafe { Pin::new_unchecked(r) }
593        })
594    }
595
596    /// Returns a mutable reference if this is the only reference.
597    ///
598    /// Returns `None` if there are other strong or weak references.
599    #[inline]
600    pub fn get_mut(&mut self) -> Option<&mut T> {
601        // Need strong == 1 AND weak == 0 (no outstanding weaks that could upgrade)
602        if self.strong_count() == 1 && self.weak_count() == 0 {
603            // SAFETY: We verified exclusive access
604            Some(unsafe { self.get_mut_unchecked() })
605        } else {
606            None
607        }
608    }
609
610    /// Returns a mutable reference to the value without checking the strong count.
611    ///
612    /// # Safety
613    ///
614    /// Caller must ensure this is the only `RcSlot` (strong_count == 1, weak_count == 0)
615    /// and no `WeakSlot::upgrade` calls are concurrent.
616    #[inline]
617    #[allow(clippy::mut_from_ref)]
618    pub unsafe fn get_mut_unchecked(&self) -> &mut T {
619        // SAFETY: Caller guarantees exclusive access.
620        // Navigate through SlotCell union → RcInner → ManuallyDrop<T> → T
621        let cell_ptr = self.inner.as_ptr();
622        let rc_inner = unsafe { (*(*cell_ptr).value).assume_init_mut() };
623        // SAFETY: value is live, caller guarantees exclusive access.
624        // Dereference through ManuallyDrop to get &mut T.
625        let md = unsafe { rc_inner.value_manual_drop_mut() };
626        &mut *md
627    }
628
629    /// Converts to a raw slot for manual memory management.
630    ///
631    /// Returns `Some(Slot)` if this is the only reference (strong == 1, no weak refs).
632    /// Returns `None` if other strong or weak references exist.
633    ///
634    /// The strong count is decremented but the value is NOT dropped.
635    /// Caller takes ownership and must eventually free via the allocator.
636    #[inline]
637    pub fn into_slot(self) -> Option<RawSlot<RcInner<T>>> {
638        let rc_inner: &RcInner<T> = &self.inner;
639
640        // Must be only reference - strong == 1 and no external weaks (just implicit)
641        if rc_inner.strong() != 1 || rc_inner.weak() != 1 {
642            return None;
643        }
644
645        // Set counts to 0 - we're taking full ownership via raw Slot
646        rc_inner.set_strong(0);
647        rc_inner.set_weak(0);
648
649        // Extract the raw slot pointer
650        let slot_ptr = self.inner.as_ptr();
651
652        // Don't run Drop (which would try to free)
653        std::mem::forget(self);
654
655        // SAFETY: We verified we're the only reference, slot_ptr is valid
656        Some(unsafe { RawSlot::from_ptr(slot_ptr) })
657    }
658
659    /// Converts to a raw slot without checking refcounts.
660    ///
661    /// Caller takes full ownership of the slot. Refcounts are NOT modified —
662    /// the caller is responsible for ensuring no other references exist or
663    /// for handling the consequences.
664    ///
665    /// # Safety
666    ///
667    /// - Caller takes ownership of the slot and the value within
668    /// - If other strong references exist, they will see stale refcounts
669    ///   and may double-free or access dropped memory
670    /// - If weak references exist, they will fail to upgrade (this is safe)
671    ///   but may attempt deallocation based on stale counts
672    #[inline]
673    pub unsafe fn into_slot_unchecked(self) -> RawSlot<RcInner<T>> {
674        // DON'T touch refcounts - caller takes full ownership
675        // Any other refs will see stale counts, but that's caller's problem
676
677        // Extract the raw slot pointer
678        let slot_ptr = self.inner.as_ptr();
679
680        // Don't run Drop
681        std::mem::forget(self);
682
683        unsafe { RawSlot::from_ptr(slot_ptr) }
684    }
685
686    // =========================================================================
687    // Raw pointer API (mirrors std::rc::Rc)
688    // =========================================================================
689
690    /// Returns a raw pointer to the underlying slot cell.
691    ///
692    /// The pointer is valid as long as any strong reference exists.
693    #[inline]
694    pub fn as_ptr(&self) -> *mut SlotCell<RcInner<T>> {
695        self.inner.as_ptr()
696    }
697
698    /// Consumes the `RcSlot` without decrementing the strong count.
699    ///
700    /// The caller takes responsibility for the strong count and must
701    /// eventually call [`from_raw`](Self::from_raw) (to reconstruct and
702    /// drop) or [`decrement_strong_count`](Self::decrement_strong_count).
703    #[inline]
704    pub fn into_raw(self) -> *mut SlotCell<RcInner<T>> {
705        let ptr = self.inner.as_ptr();
706        std::mem::forget(self);
707        ptr
708    }
709
710    /// Reconstructs an `RcSlot` from a raw pointer without incrementing
711    /// the strong count.
712    ///
713    /// # Safety
714    ///
715    /// - `ptr` must point to a valid, occupied `SlotCell<RcInner<T>>` within
716    ///   an allocator of type `A`
717    /// - The caller must own a strong count for this handle (e.g., obtained
718    ///   via [`into_raw`](Self::into_raw) or
719    ///   [`increment_strong_count`](Self::increment_strong_count))
720    #[inline]
721    pub unsafe fn from_raw(ptr: *mut SlotCell<RcInner<T>>) -> Self {
722        RcSlot {
723            inner: ManuallyDrop::new(unsafe { BoxSlot::from_raw(ptr) }),
724            _phantom: PhantomData,
725        }
726    }
727
728    /// Increments the strong count via a raw pointer.
729    ///
730    /// Use this when a data structure needs to acquire an additional strong
731    /// reference from a raw pointer without holding an `RcSlot`.
732    ///
733    /// # Safety
734    ///
735    /// - `ptr` must point to a live `RcInner<T>` (strong > 0)
736    #[inline]
737    pub unsafe fn increment_strong_count(ptr: *mut SlotCell<RcInner<T>>) {
738        // SAFETY: Caller guarantees ptr points to a live RcInner
739        let rc_inner = unsafe { (*ptr).value.assume_init_ref() };
740        let strong = rc_inner.strong();
741        rc_inner.set_strong(strong + 1);
742    }
743
744    /// Decrements the strong count via a raw pointer.
745    ///
746    /// If the strong count reaches zero, the value is dropped. If both
747    /// strong and weak counts reach zero, the slab slot is freed.
748    ///
749    /// # Safety
750    ///
751    /// - `ptr` must point to a valid `RcInner<T>`
752    /// - The caller must own a strong count to decrement
753    /// - After this call, `ptr` may be invalid if the slot was freed
754    #[inline]
755    pub unsafe fn decrement_strong_count(ptr: *mut SlotCell<RcInner<T>>) {
756        // Reconstruct and drop — reuses existing Drop logic
757        drop(unsafe { Self::from_raw(ptr) });
758    }
759}
760
761impl<T, A: BoundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
762    /// Tries to create a new `RcSlot` containing the given value.
763    ///
764    /// Returns `Err(Full(value))` if the allocator is at capacity.
765    ///
766    /// Only available for bounded allocators. For unbounded allocators,
767    /// use [`new`](Self::new) directly - it never fails.
768    #[inline]
769    pub fn try_new(value: T) -> Result<Self, Full<T>> {
770        match BoxSlot::try_new(RcInner::new(value)) {
771            Ok(slot) => Ok(RcSlot {
772                inner: ManuallyDrop::new(slot),
773                _phantom: PhantomData,
774            }),
775            Err(full) => Err(Full(full.into_inner().into_value())),
776        }
777    }
778}
779
780impl<T: Clone, A: UnboundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
781    /// Makes a mutable reference to the value, cloning if necessary.
782    ///
783    /// If this is the only reference (strong == 1, weak == 0), returns a
784    /// mutable reference directly. Otherwise, clones the value into a new
785    /// slot and returns a mutable reference to the clone.
786    ///
787    /// Always succeeds - grows the allocator if needed.
788    ///
789    /// Only available for unbounded allocators. For bounded allocators,
790    /// use [`try_make_mut`](Self::try_make_mut).
791    #[inline]
792    pub fn make_mut(&mut self) -> &mut T {
793        if self.strong_count() != 1 || self.weak_count() != 0 {
794            // Clone into new slot, replace self
795            *self = Self::new((**self).clone());
796        }
797        // SAFETY: Now we're the only reference
798        unsafe { self.get_mut_unchecked() }
799    }
800}
801
802impl<T: Clone, A: BoundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
803    /// Tries to make a mutable reference to the value, cloning if necessary.
804    ///
805    /// If this is the only reference (strong == 1, weak == 0), returns a
806    /// mutable reference directly. Otherwise, attempts to clone the value
807    /// into a new slot.
808    ///
809    /// Returns `Err(Full)` if allocation fails.
810    ///
811    /// Only available for bounded allocators. For unbounded allocators,
812    /// use [`make_mut`](Self::make_mut) directly - it never fails.
813    #[inline]
814    pub fn try_make_mut(&mut self) -> Result<&mut T, Full<()>> {
815        if self.strong_count() != 1 || self.weak_count() != 0 {
816            // Clone into new slot, replace self
817            match Self::try_new((**self).clone()) {
818                Ok(new_slot) => *self = new_slot,
819                Err(_) => return Err(Full(())),
820            }
821        }
822        // SAFETY: Now we're the only reference
823        Ok(unsafe { self.get_mut_unchecked() })
824    }
825}
826
827impl<T, A: Alloc<Item = RcInner<T>>> Clone for RcSlot<T, A> {
828    #[inline]
829    fn clone(&self) -> Self {
830        let rc_inner: &RcInner<T> = &self.inner;
831        let new_strong = rc_inner
832            .strong()
833            .checked_add(1)
834            .expect("RcSlot strong count overflow");
835        rc_inner.set_strong(new_strong);
836        // SAFETY: We hold a strong ref, slot is alive
837        let cloned_slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
838        RcSlot {
839            inner: ManuallyDrop::new(cloned_slot),
840            _phantom: PhantomData,
841        }
842    }
843}
844
845impl<T, A: Alloc<Item = RcInner<T>>> Drop for RcSlot<T, A> {
846    #[inline]
847    fn drop(&mut self) {
848        // All refcount access goes through raw pointers to avoid Stacked
849        // Borrows invalidation when we take &mut to drop the value.
850        let cell_ptr = self.inner.as_ptr();
851
852        // SAFETY: Slot is alive, union field `value` is active
853        let strong = unsafe { (*cell_ptr).value.assume_init_ref().strong() };
854        if strong > 1 {
855            // SAFETY: same as above
856            unsafe { (*cell_ptr).value.assume_init_ref().set_strong(strong - 1) };
857            return;
858        }
859
860        // Last strong reference — drop the value
861        // SAFETY: same as above
862        unsafe { (*cell_ptr).value.assume_init_ref().set_strong(0) };
863
864        // SAFETY: We are the last strong ref, value is live. We need &mut
865        // to drop the ManuallyDrop<T> inside RcInner.
866        unsafe {
867            let rc_inner_mut = (*(*cell_ptr).value).assume_init_mut();
868            ManuallyDrop::drop(rc_inner_mut.value_manual_drop_mut());
869        }
870
871        // Re-derive shared ref after the mutable drop above
872        // SAFETY: RcInner is still valid memory (Cell<u32> fields are Copy,
873        // ManuallyDrop<T> is dropped but the storage is still there)
874        let weak = unsafe { (*cell_ptr).value.assume_init_ref().weak() };
875        if weak == 1 {
876            // No outstanding weaks — free the slot.
877            // SAFETY: Value is dropped. Slot's drop_in_place on RcInner is
878            // a no-op (ManuallyDrop<T> already dropped, Cell<u32> is Copy).
879            // BoxSlot's Drop will call A::free() to return slot to freelist.
880            unsafe { ManuallyDrop::drop(&mut self.inner) };
881        } else {
882            // SAFETY: same as weak read above
883            unsafe { (*cell_ptr).value.assume_init_ref().set_weak(weak - 1) };
884            // Zombie: T dropped, weak refs still hold the slot alive
885        }
886    }
887}
888
889impl<T, A: Alloc<Item = RcInner<T>>> Deref for RcSlot<T, A> {
890    type Target = T;
891
892    #[inline]
893    fn deref(&self) -> &T {
894        let rc_inner: &RcInner<T> = &self.inner;
895        rc_inner.value()
896    }
897}
898
899impl<T, A: Alloc<Item = RcInner<T>>> AsRef<T> for RcSlot<T, A> {
900    #[inline]
901    fn as_ref(&self) -> &T {
902        self
903    }
904}
905
906impl<T: fmt::Debug, A: Alloc<Item = RcInner<T>>> fmt::Debug for RcSlot<T, A> {
907    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
908        f.debug_struct("RcSlot")
909            .field("strong", &self.strong_count())
910            .field("weak", &self.weak_count())
911            .field("value", &**self)
912            .finish()
913    }
914}
915
916// =============================================================================
917// WeakSlot<T, A>
918// =============================================================================
919
920/// Weak reference to a slab-allocated value.
921///
922/// Does not keep the value alive. Must [`upgrade`](Self::upgrade) to access
923/// the value. Keeps the slab slot alive (for upgrade checks) until all weak
924/// and strong references are dropped.
925///
926/// 8 bytes — same as `BoxSlot`.
927pub struct WeakSlot<T, A: Alloc<Item = RcInner<T>>> {
928    inner: ManuallyDrop<BoxSlot<RcInner<T>, A>>,
929    _phantom: PhantomData<T>,
930}
931
932impl<T, A: Alloc<Item = RcInner<T>>> WeakSlot<T, A> {
933    /// Attempts to upgrade to a strong reference.
934    ///
935    /// Returns `Some(RcSlot)` if the value is still alive (strong > 0),
936    /// or `None` if the last strong reference has been dropped.
937    #[inline]
938    pub fn upgrade(&self) -> Option<RcSlot<T, A>> {
939        let rc_inner: &RcInner<T> = &self.inner;
940        let strong = rc_inner.strong();
941        if strong == 0 {
942            return None;
943        }
944        let new_strong = strong.checked_add(1).expect("RcSlot strong count overflow");
945        rc_inner.set_strong(new_strong);
946        // SAFETY: strong > 0 means slot is alive and value is valid
947        let slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
948        Some(RcSlot {
949            inner: ManuallyDrop::new(slot),
950            _phantom: PhantomData,
951        })
952    }
953
954    /// Returns the strong reference count.
955    #[inline]
956    pub fn strong_count(&self) -> u32 {
957        let rc_inner: &RcInner<T> = &self.inner;
958        rc_inner.strong()
959    }
960
961    /// Returns the weak reference count (excludes the implicit weak).
962    #[inline]
963    pub fn weak_count(&self) -> u32 {
964        let rc_inner: &RcInner<T> = &self.inner;
965        let weak = rc_inner.weak();
966        // If strong > 0, subtract the implicit weak. If strong == 0,
967        // the implicit weak was already decremented.
968        if rc_inner.strong() > 0 {
969            weak.saturating_sub(1)
970        } else {
971            weak
972        }
973    }
974}
975
976impl<T, A: Alloc<Item = RcInner<T>>> Clone for WeakSlot<T, A> {
977    #[inline]
978    fn clone(&self) -> Self {
979        let rc_inner: &RcInner<T> = &self.inner;
980        let new_weak = rc_inner
981            .weak()
982            .checked_add(1)
983            .expect("WeakSlot weak count overflow");
984        rc_inner.set_weak(new_weak);
985        // SAFETY: We hold a weak ref, slot memory is alive
986        let cloned_slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
987        WeakSlot {
988            inner: ManuallyDrop::new(cloned_slot),
989            _phantom: PhantomData,
990        }
991    }
992}
993
994impl<T, A: Alloc<Item = RcInner<T>>> Drop for WeakSlot<T, A> {
995    #[inline]
996    fn drop(&mut self) {
997        let rc_inner: &RcInner<T> = &self.inner;
998        let weak = rc_inner.weak();
999
1000        // Always decrement weak count
1001        rc_inner.set_weak(weak.saturating_sub(1));
1002
1003        // Dealloc only if this was the last weak AND value already dropped (strong==0)
1004        if weak == 1 && rc_inner.strong() == 0 {
1005            // Zombie slot — value already dropped, dealloc the slot.
1006            // SAFETY: RcInner's ManuallyDrop<T> is already dropped.
1007            // BoxSlot's drop_in_place on RcInner is a no-op. Dealloc returns
1008            // the slot to the freelist.
1009            unsafe { ManuallyDrop::drop(&mut self.inner) };
1010        }
1011        // If strong > 0, strong holder's drop will handle dealloc.
1012        // If weak > 1, other weak refs still hold the slot alive.
1013    }
1014}
1015
1016impl<T, A: Alloc<Item = RcInner<T>>> fmt::Debug for WeakSlot<T, A> {
1017    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1018        f.debug_struct("WeakSlot")
1019            .field("strong", &self.strong_count())
1020            .field("weak", &self.weak_count())
1021            .finish()
1022    }
1023}