nexus_slab/alloc.rs
1//! Generic slab allocator trait and slot types.
2//!
3//! This module provides:
4//! - [`Alloc`] - base trait for slot deallocation
5//! - [`BoundedAlloc`] - trait for fixed-capacity allocators (can fail)
6//! - [`UnboundedAlloc`] - trait for growable allocators (always succeeds)
7//! - [`BoxSlot`] - 8-byte RAII handle generic over allocator
8//! - [`RcSlot`] / [`WeakSlot`] - reference-counted handles
9
10use std::borrow::{Borrow, BorrowMut};
11use std::fmt;
12use std::marker::PhantomData;
13use std::mem::ManuallyDrop;
14use std::ops::{Deref, DerefMut};
15use std::pin::Pin;
16
17use crate::shared::{RawSlot, RcInner, SlotCell};
18
19// =============================================================================
20// Full<T>
21// =============================================================================
22
23/// Error returned when a bounded allocator is full.
24///
25/// Contains the value that could not be allocated, allowing recovery.
26pub struct Full<T>(pub T);
27
28impl<T> Full<T> {
29 /// Consumes the error, returning the value that could not be allocated.
30 #[inline]
31 pub fn into_inner(self) -> T {
32 self.0
33 }
34}
35
36impl<T> fmt::Debug for Full<T> {
37 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
38 f.write_str("Full(..)")
39 }
40}
41
42impl<T> fmt::Display for Full<T> {
43 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
44 f.write_str("allocator full")
45 }
46}
47
48impl<T> std::error::Error for Full<T> {}
49
50// =============================================================================
51// Traits
52// =============================================================================
53
54/// Base trait for slab allocators - handles slot deallocation.
55///
56/// Each macro-generated allocator is a ZST that implements this trait.
57/// All operations go through associated functions (no `&self`) since
58/// the backing storage lives in a `thread_local!`.
59///
60/// # Safety
61///
62/// Implementors must guarantee:
63/// - `free` correctly drops the stored item and returns the slot to the freelist.
64/// For byte allocators (`AlignedBytes<N>` is `Copy`), `free` only does a
65/// freelist return — the actual `T` value must be dropped by the caller
66/// (e.g., `ByteBoxSlot::drop` calls `drop_in_place::<T>()` before `A::free`).
67/// - `take` correctly moves the value out and returns the slot to the freelist
68/// - All operations are single-threaded (TLS-backed)
69pub unsafe trait Alloc: Sized + 'static {
70 /// The type stored in each slot.
71 type Item;
72
73 /// Returns `true` if the allocator has been initialized.
74 fn is_initialized() -> bool;
75
76 /// Returns the total slot capacity.
77 ///
78 /// For bounded allocators this is fixed at init. For unbounded allocators
79 /// this is the sum across all allocated chunks.
80 fn capacity() -> usize;
81
82 /// Drops the stored item and returns the slot to the freelist.
83 ///
84 /// For typed allocators, this drops `T` via `drop_in_place` then frees.
85 /// For byte allocators (`AlignedBytes<N>` is `Copy`), this only does a
86 /// freelist return — the caller must drop `T` before calling `free`.
87 ///
88 /// This is for manual memory management after calling `BoxSlot::into_slot()`.
89 ///
90 /// # Safety
91 ///
92 /// - `slot` must have been allocated from this allocator
93 /// - No references to the slot's value may exist
94 /// - For byte allocators: the value must already have been dropped or moved out
95 ///
96 /// Note: Double-free is prevented at compile time (`RawSlot` is move-only).
97 #[allow(clippy::needless_pass_by_value)] // consumes slot to prevent reuse
98 unsafe fn free(slot: RawSlot<Self::Item>);
99
100 /// Takes the value from a slot, returning it and deallocating the slot.
101 ///
102 /// This is for manual memory management after calling `BoxSlot::into_slot()`.
103 ///
104 /// # Safety
105 ///
106 /// - `slot` must have been allocated from this allocator
107 /// - No references to the slot's value may exist
108 ///
109 /// Note: Double-free is prevented at compile time (`RawSlot` is move-only).
110 #[allow(clippy::needless_pass_by_value)] // consumes slot to prevent reuse
111 unsafe fn take(slot: RawSlot<Self::Item>) -> Self::Item;
112}
113
114/// Trait for bounded (fixed-capacity) allocators.
115///
116/// Bounded allocators can fail when at capacity. Use [`try_alloc`](Self::try_alloc)
117/// to handle capacity exhaustion.
118pub trait BoundedAlloc: Alloc {
119 /// Tries to allocate a slot and write the value.
120 ///
121 /// Returns `Err(Full(value))` if the allocator is full, giving the
122 /// value back to the caller.
123 fn try_alloc(value: Self::Item) -> Result<RawSlot<Self::Item>, Full<Self::Item>>;
124}
125
126/// Trait for unbounded (growable) allocators.
127///
128/// Unbounded allocators always succeed (grow as needed).
129pub trait UnboundedAlloc: Alloc {
130 /// Allocates a slot and writes the value.
131 ///
132 /// Always succeeds - grows the allocator if needed.
133 fn alloc(value: Self::Item) -> RawSlot<Self::Item>;
134
135 /// Ensures at least `count` chunks are allocated.
136 ///
137 /// No-op if the allocator already has `count` or more chunks.
138 fn reserve_chunks(count: usize);
139
140 /// Returns the number of allocated chunks.
141 fn chunk_count() -> usize;
142}
143
144// =============================================================================
145// BoxSlot<T, A>
146// =============================================================================
147
148/// RAII handle to a slab-allocated value, generic over allocator.
149///
150/// `BoxSlot<T, A>` is 8 bytes (one pointer).
151///
152/// This is the slot type generated by `bounded_allocator!` and
153/// `unbounded_allocator!` macros via `type BoxSlot = alloc::BoxSlot<T, Allocator>`.
154///
155/// # Borrow Traits
156///
157/// `BoxSlot` implements [`Borrow<T>`] and [`BorrowMut<T>`], enabling use as
158/// HashMap keys that borrow `T` for lookups.
159///
160/// # Thread Safety
161///
162/// `BoxSlot` is `!Send` and `!Sync` (via `PhantomData<*const ()>` inside the
163/// marker). It must only be used from the thread that created it.
164#[must_use = "dropping BoxSlot returns it to the allocator"]
165pub struct BoxSlot<T, A: Alloc<Item = T>> {
166 ptr: *mut SlotCell<T>,
167 // PhantomData carries the allocator type AND makes BoxSlot !Send + !Sync
168 // (*const () is !Send + !Sync, and PhantomData<A> ties the type)
169 _marker: PhantomData<(A, *const ())>,
170}
171
172impl<T, A: Alloc<Item = T>> BoxSlot<T, A> {
173 /// Leaks the slot permanently, returning an immutable reference.
174 ///
175 /// The value will never be dropped or deallocated. Use this for data
176 /// that must live for the lifetime of the program.
177 ///
178 /// Returns a `LocalStatic<T>` which is `!Send + !Sync` and only supports
179 /// immutable access via `Deref`.
180 #[inline]
181 pub fn leak(self) -> LocalStatic<T> {
182 let slot_ptr = self.ptr;
183 std::mem::forget(self);
184 // SAFETY: Destructor won't run (forgot self).
185 // The pointer is valid for 'static because slab storage is leaked.
186 // Union field `value` is active because the slot is occupied.
187 let value_ptr = unsafe { (*slot_ptr).value_ptr() };
188 unsafe { LocalStatic::new(value_ptr) }
189 }
190
191 /// Converts to a raw slot for manual memory management.
192 ///
193 /// The slot is NOT deallocated. Caller must eventually:
194 /// - Call `Allocator::free()` to drop and deallocate
195 /// - Call `Allocator::take()` to extract value and deallocate
196 /// - Wrap in another `BoxSlot` via `from_slot()`
197 #[inline]
198 pub fn into_slot(self) -> RawSlot<T> {
199 let ptr = self.ptr;
200 std::mem::forget(self);
201 // SAFETY: ptr came from a valid allocation
202 unsafe { RawSlot::from_ptr(ptr) }
203 }
204
205 /// Extracts the value from the slot, deallocating the slot.
206 ///
207 /// This is analogous to `Box::into_inner`.
208 #[inline]
209 pub fn into_inner(self) -> T {
210 let ptr = self.ptr;
211 std::mem::forget(self);
212
213 // SAFETY: ptr came from a valid allocation, construct RawSlot for take
214 let slot = unsafe { RawSlot::from_ptr(ptr) };
215 // SAFETY: We owned the slot, no other references exist
216 unsafe { A::take(slot) }
217 }
218
219 /// Replaces the value in the slot, returning the old value.
220 #[inline]
221 pub fn replace(&mut self, value: T) -> T {
222 // SAFETY: We own the slot exclusively (&mut self), slot is occupied
223 unsafe {
224 let old = (*self.ptr).read_value();
225 (*self.ptr).write_value(value);
226 old
227 }
228 }
229
230 /// Returns a pinned reference to the value.
231 ///
232 /// Slab-allocated values have stable addresses — they never move while
233 /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
234 ///
235 /// Useful for async code that requires `Pin<&mut Self>` for polling futures.
236 ///
237 /// # Example
238 ///
239 /// ```ignore
240 /// let mut slot = order_alloc::BoxSlot::try_new(MyFuture::new())?;
241 /// let pinned: Pin<&mut MyFuture> = slot.pin_mut();
242 /// pinned.poll(cx);
243 /// ```
244 #[inline]
245 pub fn pin(&self) -> Pin<&T> {
246 // SAFETY: Slab values have stable addresses — they don't move until
247 // the slot is explicitly freed. The BoxSlot owns the slot, so the
248 // value cannot be freed while this reference exists.
249 unsafe { Pin::new_unchecked(&**self) }
250 }
251
252 /// Returns a pinned mutable reference to the value.
253 ///
254 /// Slab-allocated values have stable addresses — they never move while
255 /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
256 ///
257 /// Useful for async code that requires `Pin<&mut Self>` for polling futures.
258 #[inline]
259 pub fn pin_mut(&mut self) -> Pin<&mut T> {
260 // SAFETY: Slab values have stable addresses — they don't move until
261 // the slot is explicitly freed. The BoxSlot owns the slot exclusively
262 // (&mut self), so the value cannot be freed or moved while this
263 // mutable reference exists.
264 unsafe { Pin::new_unchecked(&mut **self) }
265 }
266
267 /// Wraps a raw slot in an RAII handle.
268 ///
269 /// # Safety
270 ///
271 /// - `slot` must have been allocated from an allocator of type `A`
272 /// - `slot` must not be wrapped in another `BoxSlot` or otherwise managed
273 #[inline]
274 pub unsafe fn from_slot(slot: RawSlot<T>) -> Self {
275 BoxSlot {
276 ptr: slot.into_ptr(),
277 _marker: PhantomData,
278 }
279 }
280
281 /// Returns a raw pointer to the underlying slot cell.
282 ///
283 /// The pointer is valid as long as the `BoxSlot` (or any handle derived
284 /// from the same slab slot) is alive.
285 #[inline]
286 pub fn as_ptr(&self) -> *mut SlotCell<T> {
287 self.ptr
288 }
289
290 /// Consumes the `BoxSlot` and returns a raw pointer to the slot cell.
291 ///
292 /// The slot is NOT deallocated. The caller takes ownership and must
293 /// eventually:
294 /// - Call [`from_raw`](Self::from_raw) to reconstruct the `BoxSlot`
295 /// - Or call [`Alloc::free`] / [`Alloc::take`] on the underlying [`RawSlot`]
296 #[inline]
297 pub fn into_raw(self) -> *mut SlotCell<T> {
298 let ptr = self.ptr;
299 std::mem::forget(self);
300 ptr
301 }
302
303 /// Reconstructs a `BoxSlot` from a raw pointer.
304 ///
305 /// # Safety
306 ///
307 /// - `ptr` must point to a valid, occupied slot cell within an allocator
308 /// of type `A`
309 /// - The caller must own the slot (no other `BoxSlot` wrapping it)
310 #[inline]
311 pub unsafe fn from_raw(ptr: *mut SlotCell<T>) -> Self {
312 BoxSlot {
313 ptr,
314 _marker: PhantomData,
315 }
316 }
317}
318
319impl<T, A: UnboundedAlloc<Item = T>> BoxSlot<T, A> {
320 /// Creates a new slot containing the given value.
321 ///
322 /// Always succeeds - grows the allocator if needed.
323 ///
324 /// Only available for unbounded allocators. For bounded allocators,
325 /// use [`try_new`](Self::try_new).
326 #[inline]
327 pub fn new(value: T) -> Self {
328 BoxSlot {
329 ptr: A::alloc(value).into_ptr(),
330 _marker: PhantomData,
331 }
332 }
333}
334
335impl<T, A: BoundedAlloc<Item = T>> BoxSlot<T, A> {
336 /// Tries to create a new slot containing the given value.
337 ///
338 /// Returns `Err(Full(value))` if the allocator is at capacity,
339 /// giving the value back to the caller.
340 ///
341 /// Only available for bounded allocators. For unbounded allocators,
342 /// use [`new`](Self::new) directly - it never fails.
343 #[inline]
344 pub fn try_new(value: T) -> Result<Self, Full<T>> {
345 Ok(BoxSlot {
346 ptr: A::try_alloc(value)?.into_ptr(),
347 _marker: PhantomData,
348 })
349 }
350}
351
352// =============================================================================
353// Trait Implementations for BoxSlot
354// =============================================================================
355
356impl<T, A: Alloc<Item = T>> Deref for BoxSlot<T, A> {
357 type Target = T;
358
359 #[inline]
360 fn deref(&self) -> &Self::Target {
361 // SAFETY: BoxSlot was created from a valid, occupied SlotCell.
362 unsafe { (*self.ptr).value_ref() }
363 }
364}
365
366impl<T, A: Alloc<Item = T>> DerefMut for BoxSlot<T, A> {
367 #[inline]
368 fn deref_mut(&mut self) -> &mut Self::Target {
369 // SAFETY: We have &mut self, guaranteeing exclusive access.
370 unsafe { (*self.ptr).value_mut() }
371 }
372}
373
374impl<T, A: Alloc<Item = T>> AsRef<T> for BoxSlot<T, A> {
375 #[inline]
376 fn as_ref(&self) -> &T {
377 self
378 }
379}
380
381impl<T, A: Alloc<Item = T>> AsMut<T> for BoxSlot<T, A> {
382 #[inline]
383 fn as_mut(&mut self) -> &mut T {
384 self
385 }
386}
387
388impl<T, A: Alloc<Item = T>> Borrow<T> for BoxSlot<T, A> {
389 #[inline]
390 fn borrow(&self) -> &T {
391 self
392 }
393}
394
395impl<T, A: Alloc<Item = T>> BorrowMut<T> for BoxSlot<T, A> {
396 #[inline]
397 fn borrow_mut(&mut self) -> &mut T {
398 self
399 }
400}
401
402impl<T, A: Alloc<Item = T>> Drop for BoxSlot<T, A> {
403 #[inline]
404 fn drop(&mut self) {
405 // SAFETY: We own the slot, construct RawSlot for A::free
406 let slot = unsafe { RawSlot::from_ptr(self.ptr) };
407 // SAFETY: We own the slot, no other references exist
408 unsafe { A::free(slot) };
409 }
410}
411
412impl<T: fmt::Debug, A: Alloc<Item = T>> fmt::Debug for BoxSlot<T, A> {
413 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
414 f.debug_struct("BoxSlot").field("value", &**self).finish()
415 }
416}
417
418// =============================================================================
419// LocalStatic
420// =============================================================================
421
422/// A `'static` reference to a thread-local slab-allocated value.
423///
424/// Returned by [`BoxSlot::leak()`]. The reference is valid for the lifetime of
425/// the program, but cannot be sent to other threads because the backing slab
426/// is thread-local.
427///
428/// Once leaked, the slot is permanently occupied — there is no way to reclaim it.
429#[repr(transparent)]
430pub struct LocalStatic<T: ?Sized> {
431 ptr: *const T,
432 _marker: PhantomData<*const ()>, // !Send + !Sync
433}
434
435impl<T: ?Sized> LocalStatic<T> {
436 /// Creates a new `LocalStatic` from a raw pointer.
437 ///
438 /// # Safety
439 ///
440 /// The pointer must point to a valid, permanently-leaked value in a
441 /// thread-local slab.
442 #[inline]
443 pub(crate) unsafe fn new(ptr: *const T) -> Self {
444 LocalStatic {
445 ptr,
446 _marker: PhantomData,
447 }
448 }
449
450 /// Returns a raw pointer to the value.
451 #[inline]
452 pub fn as_ptr(&self) -> *const T {
453 self.ptr
454 }
455
456 /// Returns a pinned reference to the value.
457 ///
458 /// Leaked slab values have stable addresses — they never move for the
459 /// lifetime of the program. This makes `Pin` safe without any `Unpin` bound.
460 #[inline]
461 pub fn pin(&self) -> Pin<&T> {
462 // SAFETY: Leaked values have stable addresses forever.
463 unsafe { Pin::new_unchecked(&**self) }
464 }
465}
466
467impl<T: ?Sized> Deref for LocalStatic<T> {
468 type Target = T;
469
470 #[inline]
471 fn deref(&self) -> &T {
472 // SAFETY: ptr came from a leaked BoxSlot, value is alive forever,
473 // and we're on the same thread (enforced by !Send)
474 unsafe { &*self.ptr }
475 }
476}
477
478impl<T: ?Sized> AsRef<T> for LocalStatic<T> {
479 #[inline]
480 fn as_ref(&self) -> &T {
481 self
482 }
483}
484
485impl<T: ?Sized> Clone for LocalStatic<T> {
486 #[inline]
487 fn clone(&self) -> Self {
488 *self
489 }
490}
491
492impl<T: ?Sized> Copy for LocalStatic<T> {}
493
494impl<T: fmt::Debug + ?Sized> fmt::Debug for LocalStatic<T> {
495 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
496 f.debug_tuple("LocalStatic").field(&self.as_ref()).finish()
497 }
498}
499
500// =============================================================================
501// RcSlot<T, A>
502// =============================================================================
503
504/// Reference-counted handle to a slab-allocated value.
505///
506/// `RcSlot` is a cloneable, RAII handle backed by the existing slab allocator.
507/// Cloning bumps the strong count. Dropping decrements it; when the last strong
508/// reference drops, the value is dropped. The slab slot is freed when both
509/// strong and weak counts reach zero.
510///
511/// 8 bytes — same as `BoxSlot`.
512///
513/// # Thread Safety
514///
515/// `RcSlot` is `!Send` and `!Sync` (same as `BoxSlot`). All access must be from
516/// the thread that created the allocator.
517#[must_use = "dropping RcSlot decrements the strong count"]
518pub struct RcSlot<T, A: Alloc<Item = RcInner<T>>> {
519 inner: ManuallyDrop<BoxSlot<RcInner<T>, A>>,
520 _phantom: PhantomData<T>,
521}
522
523impl<T, A: UnboundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
524 /// Creates a new `RcSlot` containing the given value.
525 ///
526 /// Always succeeds - grows the allocator if needed.
527 ///
528 /// Only available for unbounded allocators. For bounded allocators,
529 /// use [`try_new`](Self::try_new).
530 #[inline]
531 pub fn new(value: T) -> Self {
532 RcSlot {
533 inner: ManuallyDrop::new(BoxSlot::new(RcInner::new(value))),
534 _phantom: PhantomData,
535 }
536 }
537}
538
539impl<T, A: Alloc<Item = RcInner<T>>> RcSlot<T, A> {
540 /// Creates a weak reference to the same slab slot.
541 #[inline]
542 pub fn downgrade(&self) -> WeakSlot<T, A> {
543 let rc_inner: &RcInner<T> = &self.inner;
544 let new_weak = rc_inner.weak().checked_add(1).expect("weak count overflow");
545 rc_inner.set_weak(new_weak);
546 // SAFETY: We hold a strong ref, slot is alive. Duplicate the pointer.
547 let weak_slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
548 WeakSlot {
549 inner: ManuallyDrop::new(weak_slot),
550 _phantom: PhantomData,
551 }
552 }
553
554 /// Returns the strong reference count.
555 #[inline]
556 pub fn strong_count(&self) -> u32 {
557 let rc_inner: &RcInner<T> = &self.inner;
558 rc_inner.strong()
559 }
560
561 /// Returns the weak reference count (excludes the implicit weak).
562 #[inline]
563 pub fn weak_count(&self) -> u32 {
564 let rc_inner: &RcInner<T> = &self.inner;
565 rc_inner.weak().saturating_sub(1)
566 }
567
568 /// Returns a pinned reference to the value.
569 ///
570 /// Slab-allocated values have stable addresses — they never move while
571 /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
572 #[inline]
573 pub fn pin(&self) -> Pin<&T> {
574 // SAFETY: Slab values have stable addresses. The RcSlot keeps the
575 // value alive, so the reference is valid.
576 unsafe { Pin::new_unchecked(&**self) }
577 }
578
579 /// Returns a pinned mutable reference if this is the only reference.
580 ///
581 /// Returns `None` if there are other strong or weak references.
582 ///
583 /// Slab-allocated values have stable addresses — they never move while
584 /// the slot is occupied. This makes `Pin` safe without any `Unpin` bound.
585 #[inline]
586 pub fn pin_get_mut(&mut self) -> Option<Pin<&mut T>> {
587 self.get_mut().map(|r| {
588 // SAFETY: Slab values have stable addresses. We verified exclusive
589 // access via get_mut().
590 unsafe { Pin::new_unchecked(r) }
591 })
592 }
593
594 /// Returns a mutable reference if this is the only reference.
595 ///
596 /// Returns `None` if there are other strong or weak references.
597 #[inline]
598 pub fn get_mut(&mut self) -> Option<&mut T> {
599 // Need strong == 1 AND weak == 0 (no outstanding weaks that could upgrade)
600 if self.strong_count() == 1 && self.weak_count() == 0 {
601 // SAFETY: We verified exclusive access
602 Some(unsafe { self.get_mut_unchecked() })
603 } else {
604 None
605 }
606 }
607
608 /// Returns a mutable reference to the value without checking the strong count.
609 ///
610 /// # Safety
611 ///
612 /// Caller must ensure this is the only `RcSlot` (strong_count == 1, weak_count == 0)
613 /// and no `WeakSlot::upgrade` calls are concurrent.
614 #[inline]
615 #[allow(clippy::mut_from_ref)]
616 pub unsafe fn get_mut_unchecked(&self) -> &mut T {
617 // SAFETY: Caller guarantees exclusive access.
618 // Navigate through SlotCell union → RcInner → ManuallyDrop<T> → T
619 let cell_ptr = self.inner.as_ptr();
620 let rc_inner = unsafe { (*cell_ptr).value_mut() };
621 // SAFETY: value is live, caller guarantees exclusive access.
622 // Dereference through ManuallyDrop to get &mut T.
623 let md = unsafe { rc_inner.value_manual_drop_mut() };
624 &mut *md
625 }
626
627 /// Converts to a raw slot for manual memory management.
628 ///
629 /// Returns `Some(RawSlot)` if this is the only reference (strong == 1, no weak refs).
630 /// Returns `None` if other strong or weak references exist.
631 ///
632 /// The strong count is decremented but the value is NOT dropped.
633 /// Caller takes ownership and must eventually free via the allocator.
634 ///
635 /// # Important
636 ///
637 /// The `T` inside `RcInner<T>` is wrapped in `ManuallyDrop`. Calling
638 /// `A::free()` on the returned slot does **not** drop `T`. The caller must
639 /// extract the value (e.g., via `ptr::read`) before freeing, or the inner
640 /// `T` will be leaked.
641 #[inline]
642 pub fn into_slot(self) -> Option<RawSlot<RcInner<T>>> {
643 let rc_inner: &RcInner<T> = &self.inner;
644
645 // Must be only reference - strong == 1 and no external weaks (just implicit)
646 if rc_inner.strong() != 1 || rc_inner.weak() != 1 {
647 return None;
648 }
649
650 // Set counts to 0 - we're taking full ownership via raw Slot
651 rc_inner.set_strong(0);
652 rc_inner.set_weak(0);
653
654 // Extract the raw slot pointer
655 let slot_ptr = self.inner.as_ptr();
656
657 // Don't run Drop (which would try to free)
658 std::mem::forget(self);
659
660 // SAFETY: We verified we're the only reference, slot_ptr is valid
661 Some(unsafe { RawSlot::from_ptr(slot_ptr) })
662 }
663
664 /// Converts to a raw slot without checking refcounts.
665 ///
666 /// Caller takes full ownership of the slot. Refcounts are NOT modified —
667 /// the caller is responsible for ensuring no other references exist or
668 /// for handling the consequences.
669 ///
670 /// # Safety
671 ///
672 /// - Caller takes ownership of the slot and the value within
673 /// - If other strong references exist, they will see stale refcounts
674 /// and may double-free or access dropped memory
675 /// - If weak references exist, they will fail to upgrade (this is safe)
676 /// but may attempt deallocation based on stale counts
677 #[inline]
678 pub unsafe fn into_slot_unchecked(self) -> RawSlot<RcInner<T>> {
679 // DON'T touch refcounts - caller takes full ownership
680 // Any other refs will see stale counts, but that's caller's problem
681
682 // Extract the raw slot pointer
683 let slot_ptr = self.inner.as_ptr();
684
685 // Don't run Drop
686 std::mem::forget(self);
687
688 unsafe { RawSlot::from_ptr(slot_ptr) }
689 }
690
691 // =========================================================================
692 // Raw pointer API (mirrors std::rc::Rc)
693 // =========================================================================
694
695 /// Returns a raw pointer to the underlying slot cell.
696 ///
697 /// The pointer is valid as long as any strong reference exists.
698 #[inline]
699 pub fn as_ptr(&self) -> *mut SlotCell<RcInner<T>> {
700 self.inner.as_ptr()
701 }
702
703 /// Returns `true` if two `RcSlot`s point to the same allocation.
704 #[inline]
705 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
706 this.as_ptr() == other.as_ptr()
707 }
708
709 /// Consumes the `RcSlot` without decrementing the strong count.
710 ///
711 /// The caller takes responsibility for the strong count and must
712 /// eventually call [`from_raw`](Self::from_raw) (to reconstruct and
713 /// drop) or [`decrement_strong_count`](Self::decrement_strong_count).
714 #[inline]
715 pub fn into_raw(self) -> *mut SlotCell<RcInner<T>> {
716 let ptr = self.inner.as_ptr();
717 std::mem::forget(self);
718 ptr
719 }
720
721 /// Reconstructs an `RcSlot` from a raw pointer without incrementing
722 /// the strong count.
723 ///
724 /// # Safety
725 ///
726 /// - `ptr` must point to a valid, occupied `SlotCell<RcInner<T>>` within
727 /// an allocator of type `A`
728 /// - The caller must own a strong count for this handle (e.g., obtained
729 /// via [`into_raw`](Self::into_raw) or
730 /// [`increment_strong_count`](Self::increment_strong_count))
731 #[inline]
732 pub unsafe fn from_raw(ptr: *mut SlotCell<RcInner<T>>) -> Self {
733 RcSlot {
734 inner: ManuallyDrop::new(unsafe { BoxSlot::from_raw(ptr) }),
735 _phantom: PhantomData,
736 }
737 }
738
739 /// Increments the strong count via a raw pointer.
740 ///
741 /// Use this when a data structure needs to acquire an additional strong
742 /// reference from a raw pointer without holding an `RcSlot`.
743 ///
744 /// # Safety
745 ///
746 /// - `ptr` must point to a live `RcInner<T>` (strong > 0)
747 #[inline]
748 pub unsafe fn increment_strong_count(ptr: *mut SlotCell<RcInner<T>>) {
749 // SAFETY: Caller guarantees ptr points to a live RcInner
750 let rc_inner = unsafe { (*ptr).value_ref() };
751 let strong = rc_inner.strong();
752 let new_strong = strong
753 .checked_add(1)
754 .expect("RcSlot strong count overflow");
755 rc_inner.set_strong(new_strong);
756 }
757
758 /// Decrements the strong count via a raw pointer.
759 ///
760 /// If the strong count reaches zero, the value is dropped. If both
761 /// strong and weak counts reach zero, the slab slot is freed.
762 ///
763 /// # Safety
764 ///
765 /// - `ptr` must point to a valid `RcInner<T>`
766 /// - The caller must own a strong count to decrement
767 /// - After this call, `ptr` may be invalid if the slot was freed
768 #[inline]
769 pub unsafe fn decrement_strong_count(ptr: *mut SlotCell<RcInner<T>>) {
770 // Reconstruct and drop — reuses existing Drop logic
771 drop(unsafe { Self::from_raw(ptr) });
772 }
773}
774
775impl<T, A: BoundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
776 /// Tries to create a new `RcSlot` containing the given value.
777 ///
778 /// Returns `Err(Full(value))` if the allocator is at capacity.
779 ///
780 /// Only available for bounded allocators. For unbounded allocators,
781 /// use [`new`](Self::new) directly - it never fails.
782 #[inline]
783 pub fn try_new(value: T) -> Result<Self, Full<T>> {
784 match BoxSlot::try_new(RcInner::new(value)) {
785 Ok(slot) => Ok(RcSlot {
786 inner: ManuallyDrop::new(slot),
787 _phantom: PhantomData,
788 }),
789 Err(full) => Err(Full(full.into_inner().into_value())),
790 }
791 }
792}
793
794impl<T: Clone, A: UnboundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
795 /// Makes a mutable reference to the value, cloning if necessary.
796 ///
797 /// If this is the only reference (strong == 1, weak == 0), returns a
798 /// mutable reference directly. Otherwise, clones the value into a new
799 /// slot and returns a mutable reference to the clone.
800 ///
801 /// Always succeeds - grows the allocator if needed.
802 ///
803 /// Only available for unbounded allocators. For bounded allocators,
804 /// use [`try_make_mut`](Self::try_make_mut).
805 #[inline]
806 pub fn make_mut(&mut self) -> &mut T {
807 if self.strong_count() != 1 || self.weak_count() != 0 {
808 // Clone into new slot, replace self
809 *self = Self::new((**self).clone());
810 }
811 // SAFETY: Now we're the only reference
812 unsafe { self.get_mut_unchecked() }
813 }
814}
815
816impl<T: Clone, A: BoundedAlloc<Item = RcInner<T>>> RcSlot<T, A> {
817 /// Tries to make a mutable reference to the value, cloning if necessary.
818 ///
819 /// If this is the only reference (strong == 1, weak == 0), returns a
820 /// mutable reference directly. Otherwise, attempts to clone the value
821 /// into a new slot.
822 ///
823 /// Returns `Err(Full)` if allocation fails.
824 ///
825 /// Only available for bounded allocators. For unbounded allocators,
826 /// use [`make_mut`](Self::make_mut) directly - it never fails.
827 #[inline]
828 pub fn try_make_mut(&mut self) -> Result<&mut T, Full<()>> {
829 if self.strong_count() != 1 || self.weak_count() != 0 {
830 // Clone into new slot, replace self
831 match Self::try_new((**self).clone()) {
832 Ok(new_slot) => *self = new_slot,
833 Err(_) => return Err(Full(())),
834 }
835 }
836 // SAFETY: Now we're the only reference
837 Ok(unsafe { self.get_mut_unchecked() })
838 }
839}
840
841impl<T, A: Alloc<Item = RcInner<T>>> Clone for RcSlot<T, A> {
842 #[inline]
843 fn clone(&self) -> Self {
844 let rc_inner: &RcInner<T> = &self.inner;
845 let new_strong = rc_inner
846 .strong()
847 .checked_add(1)
848 .expect("RcSlot strong count overflow");
849 rc_inner.set_strong(new_strong);
850 // SAFETY: We hold a strong ref, slot is alive
851 let cloned_slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
852 RcSlot {
853 inner: ManuallyDrop::new(cloned_slot),
854 _phantom: PhantomData,
855 }
856 }
857}
858
859impl<T, A: Alloc<Item = RcInner<T>>> Drop for RcSlot<T, A> {
860 #[inline]
861 fn drop(&mut self) {
862 // All refcount access goes through raw pointers to avoid Stacked
863 // Borrows invalidation when we take &mut to drop the value.
864 let cell_ptr = self.inner.as_ptr();
865
866 // SAFETY: Slot is alive, slot is occupied
867 let strong = unsafe { (*cell_ptr).value_ref().strong() };
868 if strong > 1 {
869 // SAFETY: same as above
870 unsafe { (*cell_ptr).value_ref().set_strong(strong - 1) };
871 return;
872 }
873
874 // Last strong reference — drop the value
875 // SAFETY: same as above
876 unsafe { (*cell_ptr).value_ref().set_strong(0) };
877
878 // SAFETY: We are the last strong ref, value is live. We need &mut
879 // to drop the ManuallyDrop<T> inside RcInner.
880 unsafe {
881 let rc_inner_mut = (*cell_ptr).value_mut();
882 ManuallyDrop::drop(rc_inner_mut.value_manual_drop_mut());
883 }
884
885 // Re-derive shared ref after the mutable drop above
886 // SAFETY: RcInner is still valid memory (Cell<u32> fields are Copy,
887 // ManuallyDrop<T> is dropped but the storage is still there)
888 let weak = unsafe { (*cell_ptr).value_ref().weak() };
889 if weak == 1 {
890 // No outstanding weaks — free the slot.
891 // SAFETY: Value is dropped. Slot's drop_in_place on RcInner is
892 // a no-op (ManuallyDrop<T> already dropped, Cell<u32> is Copy).
893 // BoxSlot's Drop will call A::free() to return slot to freelist.
894 unsafe { ManuallyDrop::drop(&mut self.inner) };
895 } else {
896 // SAFETY: same as weak read above
897 unsafe { (*cell_ptr).value_ref().set_weak(weak - 1) };
898 // Zombie: T dropped, weak refs still hold the slot alive
899 }
900 }
901}
902
903impl<T, A: Alloc<Item = RcInner<T>>> Deref for RcSlot<T, A> {
904 type Target = T;
905
906 #[inline]
907 fn deref(&self) -> &T {
908 let rc_inner: &RcInner<T> = &self.inner;
909 rc_inner.value()
910 }
911}
912
913impl<T, A: Alloc<Item = RcInner<T>>> AsRef<T> for RcSlot<T, A> {
914 #[inline]
915 fn as_ref(&self) -> &T {
916 self
917 }
918}
919
920impl<T: fmt::Debug, A: Alloc<Item = RcInner<T>>> fmt::Debug for RcSlot<T, A> {
921 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
922 f.debug_struct("RcSlot")
923 .field("strong", &self.strong_count())
924 .field("weak", &self.weak_count())
925 .field("value", &**self)
926 .finish()
927 }
928}
929
930// =============================================================================
931// WeakSlot<T, A>
932// =============================================================================
933
934/// Weak reference to a slab-allocated value.
935///
936/// Does not keep the value alive. Must [`upgrade`](Self::upgrade) to access
937/// the value. Keeps the slab slot alive (for upgrade checks) until all weak
938/// and strong references are dropped.
939///
940/// 8 bytes — same as `BoxSlot`.
941pub struct WeakSlot<T, A: Alloc<Item = RcInner<T>>> {
942 inner: ManuallyDrop<BoxSlot<RcInner<T>, A>>,
943 _phantom: PhantomData<T>,
944}
945
946impl<T, A: Alloc<Item = RcInner<T>>> WeakSlot<T, A> {
947 /// Attempts to upgrade to a strong reference.
948 ///
949 /// Returns `Some(RcSlot)` if the value is still alive (strong > 0),
950 /// or `None` if the last strong reference has been dropped.
951 #[inline]
952 pub fn upgrade(&self) -> Option<RcSlot<T, A>> {
953 let rc_inner: &RcInner<T> = &self.inner;
954 let strong = rc_inner.strong();
955 if strong == 0 {
956 return None;
957 }
958 let new_strong = strong.checked_add(1).expect("RcSlot strong count overflow");
959 rc_inner.set_strong(new_strong);
960 // SAFETY: strong > 0 means slot is alive and value is valid
961 let slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
962 Some(RcSlot {
963 inner: ManuallyDrop::new(slot),
964 _phantom: PhantomData,
965 })
966 }
967
968 /// Returns the strong reference count.
969 #[inline]
970 pub fn strong_count(&self) -> u32 {
971 let rc_inner: &RcInner<T> = &self.inner;
972 rc_inner.strong()
973 }
974
975 /// Returns the weak reference count (excludes the implicit weak).
976 #[inline]
977 pub fn weak_count(&self) -> u32 {
978 let rc_inner: &RcInner<T> = &self.inner;
979 let weak = rc_inner.weak();
980 // If strong > 0, subtract the implicit weak. If strong == 0,
981 // the implicit weak was already decremented.
982 if rc_inner.strong() > 0 {
983 weak.saturating_sub(1)
984 } else {
985 weak
986 }
987 }
988}
989
990impl<T, A: Alloc<Item = RcInner<T>>> Clone for WeakSlot<T, A> {
991 #[inline]
992 fn clone(&self) -> Self {
993 let rc_inner: &RcInner<T> = &self.inner;
994 let new_weak = rc_inner
995 .weak()
996 .checked_add(1)
997 .expect("WeakSlot weak count overflow");
998 rc_inner.set_weak(new_weak);
999 // SAFETY: We hold a weak ref, slot memory is alive
1000 let cloned_slot = unsafe { BoxSlot::from_raw(self.inner.as_ptr()) };
1001 WeakSlot {
1002 inner: ManuallyDrop::new(cloned_slot),
1003 _phantom: PhantomData,
1004 }
1005 }
1006}
1007
1008impl<T, A: Alloc<Item = RcInner<T>>> Drop for WeakSlot<T, A> {
1009 #[inline]
1010 fn drop(&mut self) {
1011 let rc_inner: &RcInner<T> = &self.inner;
1012 let weak = rc_inner.weak();
1013 debug_assert!(weak > 0, "WeakSlot dropped with zero weak count");
1014
1015 // Always decrement weak count
1016 rc_inner.set_weak(weak.checked_sub(1).expect("WeakSlot: weak count underflow"));
1017
1018 // Dealloc only if this was the last weak AND value already dropped (strong==0)
1019 if weak == 1 && rc_inner.strong() == 0 {
1020 // Zombie slot — value already dropped, dealloc the slot.
1021 // SAFETY: RcInner's ManuallyDrop<T> is already dropped.
1022 // BoxSlot's drop_in_place on RcInner is a no-op. Dealloc returns
1023 // the slot to the freelist.
1024 unsafe { ManuallyDrop::drop(&mut self.inner) };
1025 }
1026 // If strong > 0, strong holder's drop will handle dealloc.
1027 // If weak > 1, other weak refs still hold the slot alive.
1028 }
1029}
1030
1031impl<T, A: Alloc<Item = RcInner<T>>> fmt::Debug for WeakSlot<T, A> {
1032 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1033 f.debug_struct("WeakSlot")
1034 .field("strong", &self.strong_count())
1035 .field("weak", &self.weak_count())
1036 .finish()
1037 }
1038}