Skip to main content

nexus_slab/
byte.rs

1//! Byte slab types for type-erased allocation.
2//!
3//! This module provides:
4//! - [`AlignedBytes`] — fixed-size byte storage with pointer alignment
5//! - [`BoundedByteAlloc`] / [`UnboundedByteAlloc`] — traits for byte slab allocators
6//! - [`BoxSlot`] — RAII handle for TLS byte allocators
7//!   (8 bytes for `Sized` types, 16 bytes for `dyn Trait`)
8//! - [`Slot`] — move-only handle for struct-owned byte slabs
9//!   (8 bytes for `Sized` types, 16 bytes for `dyn Trait`)
10
11use std::borrow::{Borrow, BorrowMut};
12use std::fmt;
13use std::marker::PhantomData;
14use std::mem::{self, align_of, size_of};
15use std::ops::{Deref, DerefMut};
16use std::pin::Pin;
17use std::ptr;
18
19use crate::alloc::{Alloc, Full, LocalStatic};
20use crate::shared::{RawSlot, SlotCell};
21
22// =============================================================================
23// AlignedBytes
24// =============================================================================
25
26/// Fixed-size byte storage with pointer alignment.
27///
28/// Used as `SlotCell<AlignedBytes<N>>` in byte slab allocators. The 8-byte
29/// alignment matches the `next_free` pointer in the `SlotCell` union and
30/// covers all common types (up to `u64`, pointers, most structs).
31///
32/// Types requiring greater than 8-byte alignment (e.g., SIMD vectors)
33/// cannot be stored in a byte slab.
34///
35/// `Copy` guarantees `drop_in_place` is a compile-time no-op.
36#[derive(Clone, Copy)]
37#[repr(C, align(8))]
38pub struct AlignedBytes<const N: usize> {
39    bytes: [u8; N],
40}
41
42// =============================================================================
43// Traits
44// =============================================================================
45
46/// Trait for bounded byte slab allocators.
47///
48/// Provides raw slot claiming so [`BoxSlot`] can write `T` directly
49/// into slot memory without constructing an intermediate `AlignedBytes`.
50///
51/// # Safety
52///
53/// Implementors must guarantee:
54/// - `claim_raw` returns a valid, vacant slot from the TLS slab
55/// - The returned pointer is exclusively owned by the caller
56pub unsafe trait BoundedByteAlloc: Alloc {
57    /// Claims a raw slot pointer from the freelist.
58    ///
59    /// Returns `None` if the allocator is full.
60    fn claim_raw() -> Option<*mut SlotCell<Self::Item>>;
61}
62
63/// Trait for unbounded byte slab allocators.
64///
65/// Always succeeds — grows the allocator if needed.
66///
67/// # Safety
68///
69/// Same guarantees as [`BoundedByteAlloc`], plus the returned pointer
70/// is always valid (allocator grows on demand).
71pub unsafe trait UnboundedByteAlloc: Alloc {
72    /// Claims a raw slot pointer, growing the allocator if needed.
73    fn claim_raw() -> *mut SlotCell<Self::Item>;
74
75    /// Ensures at least `count` chunks are allocated.
76    fn reserve_chunks(count: usize);
77
78    /// Returns the number of allocated chunks.
79    fn chunk_count() -> usize;
80}
81
82// =============================================================================
83// BoxSlot<T, A>
84// =============================================================================
85
86/// RAII handle to a byte-slab-allocated value, generic over allocator.
87///
88/// `BoxSlot<T, A>` stores a value of type `T` in a byte slab managed by
89/// allocator `A`. The allocator manages [`AlignedBytes<N>`] storage, while
90/// this handle provides typed access via `Deref<Target = T>` and correctly
91/// drops `T` when the handle is dropped.
92///
93/// # Size
94///
95/// - 8 bytes for `Sized` types (thin pointer)
96/// - 16 bytes for `dyn Trait` types (fat pointer = data ptr + vtable ptr)
97///
98/// # Thread Safety
99///
100/// `BoxSlot` is `!Send` and `!Sync`. It must only be used from the
101/// thread that created it.
102///
103/// # Compile-Time Safety
104///
105/// [`try_new`](Self::try_new) and [`new`](Self::new) include `const`
106/// assertions that verify:
107/// - `size_of::<T>() <= N` — T fits in the slot
108/// - `align_of::<T>() <= 8` — T alignment is compatible
109///
110/// Violations are compile errors, not runtime panics.
111#[must_use = "dropping BoxSlot returns it to the allocator"]
112pub struct BoxSlot<T: ?Sized, A: Alloc> {
113    ptr: *mut T,
114    _marker: PhantomData<(A, *const ())>,
115}
116
117// =============================================================================
118// Sized-only constructors (bounded)
119// =============================================================================
120
121impl<T, A: BoundedByteAlloc> BoxSlot<T, A> {
122    /// Tries to create a new slot containing the given value.
123    ///
124    /// Returns `Err(Full(value))` if the allocator is at capacity,
125    /// giving the value back to the caller.
126    ///
127    /// # Compile-Time Checks
128    ///
129    /// Fails to compile if `T` is too large or too aligned for the slot.
130    #[inline]
131    pub fn try_new(value: T) -> Result<Self, Full<T>> {
132        const {
133            assert!(
134                size_of::<T>() <= size_of::<A::Item>(),
135                "T does not fit in byte slab slot"
136            );
137        };
138        const {
139            assert!(
140                align_of::<T>() <= align_of::<A::Item>(),
141                "T alignment exceeds slot alignment"
142            );
143        };
144
145        match A::claim_raw() {
146            Some(slot_ptr) => {
147                // SAFETY: slot_ptr is a valid, vacant slot exclusively owned
148                // by us. T fits within AlignedBytes<N> (const asserted above).
149                // SlotCell is repr(C) union with fields at offset 0;
150                // ManuallyDrop and MaybeUninit are transparent; AlignedBytes
151                // is repr(C) with bytes at offset 0. So slot_ptr points to
152                // where T's bytes go.
153                unsafe {
154                    write_and_zero_pad::<T, A>(slot_ptr, value);
155                }
156                Ok(BoxSlot {
157                    ptr: slot_ptr as *mut T,
158                    _marker: PhantomData,
159                })
160            }
161            None => Err(Full(value)),
162        }
163    }
164
165    /// Tries to create a slot containing `value`, returning a handle typed
166    /// as `BoxSlot<U, A>` where `U: ?Sized`.
167    ///
168    /// The `coerce` function converts the concrete `*mut T` to a fat pointer
169    /// `*mut U` (e.g., `|p| p as *mut dyn Trait`).
170    ///
171    /// # Compile-Time Checks
172    ///
173    /// Same as [`try_new`](Self::try_new).
174    #[inline]
175    pub fn try_new_as<U: ?Sized>(
176        value: T,
177        coerce: fn(*mut T) -> *mut U,
178    ) -> Result<BoxSlot<U, A>, Full<T>> {
179        match Self::try_new(value) {
180            Ok(slot) => Ok(slot.unsize(coerce)),
181            Err(full) => Err(full),
182        }
183    }
184}
185
186// =============================================================================
187// Sized-only constructors (unbounded)
188// =============================================================================
189
190impl<T, A: UnboundedByteAlloc> BoxSlot<T, A> {
191    /// Creates a new slot containing the given value.
192    ///
193    /// Always succeeds — grows the allocator if needed.
194    ///
195    /// # Compile-Time Checks
196    ///
197    /// Fails to compile if `T` is too large or too aligned for the slot.
198    #[inline]
199    pub fn new(value: T) -> Self {
200        const {
201            assert!(
202                size_of::<T>() <= size_of::<A::Item>(),
203                "T does not fit in byte slab slot"
204            );
205        };
206        const {
207            assert!(
208                align_of::<T>() <= align_of::<A::Item>(),
209                "T alignment exceeds slot alignment"
210            );
211        };
212
213        let slot_ptr = A::claim_raw();
214        // SAFETY: Same as try_new — slot_ptr is valid and exclusively ours.
215        unsafe {
216            write_and_zero_pad::<T, A>(slot_ptr, value);
217        }
218        BoxSlot {
219            ptr: slot_ptr as *mut T,
220            _marker: PhantomData,
221        }
222    }
223
224    /// Creates a slot containing `value`, returning a handle typed as
225    /// `BoxSlot<U, A>` where `U: ?Sized`.
226    ///
227    /// The `coerce` function converts the concrete `*mut T` to a fat pointer
228    /// `*mut U` (e.g., `|p| p as *mut dyn Trait`).
229    ///
230    /// # Compile-Time Checks
231    ///
232    /// Same as [`new`](Self::new).
233    #[inline]
234    pub fn new_as<U: ?Sized>(value: T, coerce: fn(*mut T) -> *mut U) -> BoxSlot<U, A> {
235        Self::new(value).unsize(coerce)
236    }
237}
238
239// =============================================================================
240// Sized-only methods
241// =============================================================================
242
243impl<T, A: Alloc> BoxSlot<T, A> {
244    /// Extracts the value from the slot, deallocating the slot.
245    #[inline]
246    pub fn into_inner(self) -> T {
247        // SAFETY: T is Sized, so self.ptr is a thin pointer whose address
248        // is the start of the SlotCell. Read the value, reconstruct the
249        // Slot for freeing.
250        let data_ptr = self.ptr;
251        mem::forget(self);
252        let value = unsafe { ptr::read(data_ptr) };
253        // SAFETY: data_ptr is the address of the SlotCell<A::Item>.
254        // Reconstruct RawSlot to pass to A::free.
255        let slot = unsafe { RawSlot::from_ptr(data_ptr as *mut SlotCell<A::Item>) };
256        unsafe { A::free(slot) };
257        value
258    }
259
260    /// Replaces the value in the slot, returning the old value.
261    #[inline]
262    pub fn replace(&mut self, value: T) -> T {
263        // SAFETY: We own the slot exclusively (&mut self). T is at offset 0.
264        unsafe {
265            let old = ptr::read(self.ptr);
266            ptr::write(self.ptr, value);
267            old
268        }
269    }
270
271    /// Converts this `BoxSlot<T, A>` into a `BoxSlot<U, A>` where
272    /// `U: ?Sized`, using the given coercion function.
273    ///
274    /// This is the low-level API for unsizing. For convenience, use the
275    /// [`box_dyn!`](crate::box_dyn) or
276    /// [`try_box_dyn!`](crate::try_box_dyn) macros.
277    ///
278    /// # Example
279    ///
280    /// ```ignore
281    /// let sized: BoxSlot<MyHandler, A> = BoxSlot::new(handler);
282    /// let dyn_slot: BoxSlot<dyn Handler<E>, A> = sized.unsize(|p| p as *mut dyn Handler<E>);
283    /// ```
284    #[inline]
285    pub fn unsize<U: ?Sized>(self, coerce: fn(*mut T) -> *mut U) -> BoxSlot<U, A> {
286        let thin_ptr = self.ptr;
287        let fat_ptr = coerce(thin_ptr);
288        // Verify the coercion didn't change the data pointer.
289        // This is assert, not debug_assert — an incorrect coerce function
290        // would cause UB in Drop (wrong data pointer → wrong slot freed).
291        assert_eq!(
292            fat_ptr as *const () as usize, thin_ptr as *const () as usize,
293            "coerce function must not change the data pointer address"
294        );
295        mem::forget(self);
296        BoxSlot {
297            ptr: fat_ptr,
298            _marker: PhantomData,
299        }
300    }
301}
302
303// =============================================================================
304// ?Sized methods
305// =============================================================================
306
307impl<T: ?Sized, A: Alloc> BoxSlot<T, A> {
308    /// Leaks the slot permanently, returning an immutable reference.
309    ///
310    /// The value will never be dropped or deallocated.
311    #[inline]
312    pub fn leak(self) -> LocalStatic<T> {
313        let ptr = self.ptr.cast_const();
314        mem::forget(self);
315        // SAFETY: Slot is permanently leaked. ptr points to a valid T.
316        unsafe { LocalStatic::new(ptr) }
317    }
318
319    /// Returns a pinned reference to the value.
320    ///
321    /// Slab-allocated values have stable addresses — they never move while
322    /// the slot is occupied.
323    #[inline]
324    pub fn pin(&self) -> Pin<&T> {
325        // SAFETY: Slab values have stable addresses. The BoxSlot owns
326        // the slot, so the value cannot be freed while this reference exists.
327        unsafe { Pin::new_unchecked(&**self) }
328    }
329
330    /// Returns a pinned mutable reference to the value.
331    ///
332    /// Slab-allocated values have stable addresses — they never move while
333    /// the slot is occupied.
334    #[inline]
335    pub fn pin_mut(&mut self) -> Pin<&mut T> {
336        // SAFETY: Slab values have stable addresses. We have exclusive
337        // access (&mut self).
338        unsafe { Pin::new_unchecked(&mut **self) }
339    }
340}
341
342// =============================================================================
343// Trait Implementations
344// =============================================================================
345
346impl<T: ?Sized, A: Alloc> Deref for BoxSlot<T, A> {
347    type Target = T;
348
349    #[inline]
350    fn deref(&self) -> &T {
351        // SAFETY: self.ptr points to a valid, occupied T value within the
352        // slab. For Sized T this is a thin pointer cast; for dyn Trait this
353        // is a fat pointer that carries the vtable.
354        unsafe { &*self.ptr }
355    }
356}
357
358impl<T: ?Sized, A: Alloc> DerefMut for BoxSlot<T, A> {
359    #[inline]
360    fn deref_mut(&mut self) -> &mut T {
361        // SAFETY: We have &mut self, guaranteeing exclusive access.
362        unsafe { &mut *self.ptr }
363    }
364}
365
366impl<T: ?Sized, A: Alloc> AsRef<T> for BoxSlot<T, A> {
367    #[inline]
368    fn as_ref(&self) -> &T {
369        self
370    }
371}
372
373impl<T: ?Sized, A: Alloc> AsMut<T> for BoxSlot<T, A> {
374    #[inline]
375    fn as_mut(&mut self) -> &mut T {
376        self
377    }
378}
379
380impl<T: ?Sized, A: Alloc> Borrow<T> for BoxSlot<T, A> {
381    #[inline]
382    fn borrow(&self) -> &T {
383        self
384    }
385}
386
387impl<T: ?Sized, A: Alloc> BorrowMut<T> for BoxSlot<T, A> {
388    #[inline]
389    fn borrow_mut(&mut self) -> &mut T {
390        self
391    }
392}
393
394impl<T: ?Sized, A: Alloc> Drop for BoxSlot<T, A> {
395    #[inline]
396    fn drop(&mut self) {
397        // SAFETY: We own the slot. Drop T first, then free the slot.
398        // For dyn Trait, drop_in_place dispatches through the vtable.
399        // Extract the data pointer (drops vtable for fat ptrs) to
400        // reconstruct the Slot for A::free.
401        unsafe {
402            ptr::drop_in_place(self.ptr);
403            let data_ptr = self.ptr as *mut () as *mut SlotCell<A::Item>;
404            let slot = RawSlot::from_ptr(data_ptr);
405            A::free(slot);
406        }
407    }
408}
409
410impl<T: fmt::Debug + ?Sized, A: Alloc> fmt::Debug for BoxSlot<T, A> {
411    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
412        f.debug_struct("BoxSlot").field("value", &&**self).finish()
413    }
414}
415
416// =============================================================================
417// Slot<T: ?Sized>
418// =============================================================================
419
420/// Move-only handle to a value stored in a byte slab.
421///
422/// Unlike [`BoxSlot`] (TLS allocator), `Slot` is for struct-owned slabs
423/// (`bounded::Slab<AlignedBytes<N>>` or `unbounded::Slab<AlignedBytes<N>>`).
424/// It does NOT auto-free on drop — the caller must return it to the slab via
425/// [`remove`](crate::bounded::Slab::remove),
426/// [`take_value`](crate::bounded::Slab::take_value), or
427/// [`reclaim`](crate::bounded::Slab::reclaim).
428///
429/// # Size
430///
431/// - 8 bytes for `Sized` types (thin pointer)
432/// - 16 bytes for `dyn Trait` types (fat pointer)
433///
434/// # Thread Safety
435///
436/// `Slot` is `!Send` and `!Sync`.
437#[must_use = "slot must be freed via slab.remove() or slab.take_value()"]
438pub struct Slot<T: ?Sized> {
439    ptr: *mut T,
440    _marker: PhantomData<*const ()>, // !Send + !Sync
441}
442
443// =============================================================================
444// Slot — Sized-only methods
445// =============================================================================
446
447impl<T> Slot<T> {
448    /// Unsizes this handle (e.g., concrete → dyn Trait).
449    ///
450    /// The `coerce` function converts the concrete `*mut T` to a fat pointer
451    /// `*mut U` (e.g., `|p| p as *mut dyn Trait`).
452    #[inline]
453    pub fn unsize<U: ?Sized>(self, coerce: fn(*mut T) -> *mut U) -> Slot<U> {
454        let thin_ptr = self.ptr;
455        let fat_ptr = coerce(thin_ptr);
456        // assert, not debug_assert — an incorrect coerce function would cause
457        // UB when the slab frees the wrong slot.
458        assert_eq!(
459            fat_ptr as *const () as usize, thin_ptr as *const () as usize,
460            "coerce function must not change the data pointer address"
461        );
462        mem::forget(self);
463        Slot {
464            ptr: fat_ptr,
465            _marker: PhantomData,
466        }
467    }
468}
469
470// =============================================================================
471// Slot — ?Sized methods
472// =============================================================================
473
474impl<T: ?Sized> Slot<T> {
475    /// Creates a `Slot` from a raw pointer.
476    ///
477    /// # Safety
478    ///
479    /// - `ptr` must point to a valid, live value within a byte slab
480    /// - The caller transfers ownership to the `Slot`
481    #[inline]
482    pub(crate) unsafe fn from_raw(ptr: *mut T) -> Self {
483        Slot {
484            ptr,
485            _marker: PhantomData,
486        }
487    }
488
489    /// Extracts the raw pointer, consuming the `Slot` without running
490    /// the debug-mode leak detector.
491    #[inline]
492    pub(crate) fn into_raw(self) -> *mut T {
493        let ptr = self.ptr;
494        mem::forget(self);
495        ptr
496    }
497
498    /// Returns a pinned reference to the value.
499    ///
500    /// Slab-allocated values have stable addresses — they never move while
501    /// the slot is occupied.
502    #[inline]
503    pub fn pin(&self) -> Pin<&T> {
504        // SAFETY: Slab values have stable addresses. The Slot owns
505        // the value, so it cannot be freed while this reference exists.
506        unsafe { Pin::new_unchecked(&**self) }
507    }
508
509    /// Returns a pinned mutable reference to the value.
510    ///
511    /// Slab-allocated values have stable addresses — they never move while
512    /// the slot is occupied.
513    #[inline]
514    pub fn pin_mut(&mut self) -> Pin<&mut T> {
515        // SAFETY: Slab values have stable addresses. We have exclusive
516        // access (&mut self).
517        unsafe { Pin::new_unchecked(&mut **self) }
518    }
519}
520
521// =============================================================================
522// Slot — Trait Implementations
523// =============================================================================
524
525impl<T: ?Sized> Deref for Slot<T> {
526    type Target = T;
527
528    #[inline]
529    fn deref(&self) -> &T {
530        // SAFETY: self.ptr points to a valid, occupied T value within the
531        // slab. For Sized T this is a thin pointer; for dyn Trait this is
532        // a fat pointer carrying the vtable.
533        unsafe { &*self.ptr }
534    }
535}
536
537impl<T: ?Sized> DerefMut for Slot<T> {
538    #[inline]
539    fn deref_mut(&mut self) -> &mut T {
540        // SAFETY: We have &mut self, guaranteeing exclusive access.
541        unsafe { &mut *self.ptr }
542    }
543}
544
545impl<T: ?Sized> AsRef<T> for Slot<T> {
546    #[inline]
547    fn as_ref(&self) -> &T {
548        self
549    }
550}
551
552impl<T: ?Sized> AsMut<T> for Slot<T> {
553    #[inline]
554    fn as_mut(&mut self) -> &mut T {
555        self
556    }
557}
558
559impl<T: ?Sized> Borrow<T> for Slot<T> {
560    #[inline]
561    fn borrow(&self) -> &T {
562        self
563    }
564}
565
566impl<T: ?Sized> BorrowMut<T> for Slot<T> {
567    #[inline]
568    fn borrow_mut(&mut self) -> &mut T {
569        self
570    }
571}
572
573impl<T: fmt::Debug + ?Sized> fmt::Debug for Slot<T> {
574    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
575        f.debug_struct("Slot").field("value", &&**self).finish()
576    }
577}
578
579#[cfg(debug_assertions)]
580impl<T: ?Sized> Drop for Slot<T> {
581    fn drop(&mut self) {
582        if std::thread::panicking() {
583            // During unwinding: log but don't abort. Leak is lesser evil than abort.
584            eprintln!(
585                "byte::Slot<{}> leaked during panic unwind (was not freed)",
586                std::any::type_name::<T>()
587            );
588        } else {
589            panic!(
590                "byte::Slot<{}> dropped without being freed — \
591                 call slab.remove() or slab.take_value()",
592                std::any::type_name::<T>()
593            );
594        }
595    }
596}
597
598// =============================================================================
599// Internal helpers
600// =============================================================================
601
602/// Writes `value` into a slot and zeroes trailing bytes.
603///
604/// # Safety
605///
606/// - `slot_ptr` must be a valid, exclusively-owned, vacant slot
607/// - `T` must fit within `A::Item` (caller must const-assert)
608#[inline]
609unsafe fn write_and_zero_pad<T, A: Alloc>(slot_ptr: *mut SlotCell<A::Item>, value: T) {
610    // SAFETY: Caller guarantees slot_ptr is valid and exclusively owned.
611    // T fits within A::Item (caller must const-assert).
612    unsafe {
613        ptr::write(slot_ptr as *mut T, value);
614        // Ensures the full AlignedBytes<N> is deterministically initialized.
615        // When size_of::<T>() == size_of::<A::Item>(), the compiler
616        // eliminates this entirely.
617        let t_size = size_of::<T>();
618        let slot_size = size_of::<A::Item>();
619        if t_size < slot_size {
620            ptr::write_bytes((slot_ptr as *mut u8).add(t_size), 0, slot_size - t_size);
621        }
622    }
623}
624
625// =============================================================================
626// Convenience macros
627// =============================================================================
628
629/// Creates a `BoxSlot<dyn Trait, A>` from a concrete value.
630///
631/// For unbounded byte allocators (always succeeds).
632///
633/// # Example
634///
635/// ```ignore
636/// let handler = nexus_slab::box_dyn!(
637///     msg_alloc::Allocator, dyn Handler<E>, my_handler
638/// );
639/// ```
640#[macro_export]
641macro_rules! box_dyn {
642    ($alloc:ty, $dyn_ty:ty, $value:expr) => {{ <$crate::byte::BoxSlot<_, $alloc>>::new_as($value, |__p| __p as *mut $dyn_ty) }};
643}
644
645/// Creates a `BoxSlot<dyn Trait, A>` from a concrete value.
646///
647/// For bounded byte allocators (returns `Result`).
648///
649/// # Example
650///
651/// ```ignore
652/// let handler = nexus_slab::try_box_dyn!(
653///     msg_alloc::Allocator, dyn Handler<E>, my_handler
654/// )?;
655/// ```
656#[macro_export]
657macro_rules! try_box_dyn {
658    ($alloc:ty, $dyn_ty:ty, $value:expr) => {{ <$crate::byte::BoxSlot<_, $alloc>>::try_new_as($value, |__p| __p as *mut $dyn_ty) }};
659}
660
661// =============================================================================
662// Raw slab helpers — bounded::Slab<AlignedBytes<N>>
663// =============================================================================
664
665impl<const N: usize> crate::bounded::Slab<AlignedBytes<N>> {
666    /// Inserts a value into the slab, returning a [`Slot`](byte::Slot) handle.
667    ///
668    /// Returns `Err(value)` if the slab is full.
669    ///
670    /// # Compile-Time Checks
671    ///
672    /// Fails to compile if `T` is too large or too aligned for the slot.
673    #[inline]
674    pub fn try_insert<T>(&self, value: T) -> Result<Slot<T>, T> {
675        const {
676            assert!(size_of::<T>() <= N, "T does not fit in byte slab slot");
677        };
678        const {
679            assert!(
680                align_of::<T>() <= align_of::<AlignedBytes<N>>(),
681                "T alignment exceeds slot alignment"
682            );
683        };
684
685        match self.claim_ptr() {
686            Some(slot_ptr) => {
687                // SAFETY: slot_ptr is valid and exclusively ours from claim_ptr.
688                // T fits (const-asserted). SlotCell repr(C) union has data at
689                // offset 0.
690                unsafe {
691                    let t_ptr = slot_ptr as *mut T;
692                    ptr::write(t_ptr, value);
693                    let t_size = size_of::<T>();
694                    if t_size < N {
695                        ptr::write_bytes((slot_ptr as *mut u8).add(t_size), 0, N - t_size);
696                    }
697                }
698                // SAFETY: We just wrote a valid T at slot_ptr.
699                Ok(unsafe { Slot::from_raw(slot_ptr as *mut T) })
700            }
701            None => Err(value),
702        }
703    }
704
705    /// Drops the value and frees the slot.
706    ///
707    /// Handles both thin and fat pointers: extracts the data pointer for
708    /// freeing regardless of whether `T` is `Sized` or `dyn Trait`.
709    ///
710    /// # Safety
711    ///
712    /// - `slot` must have been allocated from this slab
713    #[inline]
714    pub unsafe fn remove<T: ?Sized>(&self, slot: Slot<T>) {
715        let ptr = slot.into_raw();
716        debug_assert!(
717            self.contains_ptr(ptr as *const ()),
718            "slot was not allocated from this slab"
719        );
720        // SAFETY: Caller guarantees slot came from this slab.
721        unsafe {
722            ptr::drop_in_place(ptr);
723            let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
724            self.free_ptr(data_ptr);
725        }
726    }
727
728    /// Extracts the value and frees the slot (Sized only).
729    ///
730    /// # Safety
731    ///
732    /// - `slot` must have been allocated from this slab
733    #[inline]
734    pub unsafe fn take_value<T>(&self, slot: Slot<T>) -> T {
735        let ptr = slot.into_raw();
736        debug_assert!(
737            self.contains_ptr(ptr as *const ()),
738            "slot was not allocated from this slab"
739        );
740        // SAFETY: Caller guarantees slot came from this slab.
741        let value = unsafe { ptr::read(ptr) };
742        let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
743        unsafe { self.free_ptr(data_ptr) };
744        value
745    }
746
747    /// Frees the slot without dropping the value.
748    ///
749    /// Use when the value has already been moved out or dropped.
750    ///
751    /// # Safety
752    ///
753    /// - `slot` must have been allocated from this slab
754    /// - The value must already be dropped or moved out
755    #[inline]
756    pub unsafe fn reclaim<T: ?Sized>(&self, slot: Slot<T>) {
757        let ptr = slot.into_raw();
758        debug_assert!(
759            self.contains_ptr(ptr as *const ()),
760            "slot was not allocated from this slab"
761        );
762        // SAFETY: Caller guarantees slot came from this slab and value handled.
763        unsafe {
764            let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
765            self.free_ptr(data_ptr);
766        }
767    }
768}
769
770// =============================================================================
771// Raw slab helpers — unbounded::Slab<AlignedBytes<N>>
772// =============================================================================
773
774impl<const N: usize> crate::unbounded::Slab<AlignedBytes<N>> {
775    /// Inserts a value into the slab, returning a [`Slot`](byte::Slot) handle.
776    ///
777    /// Always succeeds — grows the slab if needed.
778    ///
779    /// # Compile-Time Checks
780    ///
781    /// Fails to compile if `T` is too large or too aligned for the slot.
782    #[inline]
783    pub fn insert<T>(&self, value: T) -> Slot<T> {
784        const {
785            assert!(size_of::<T>() <= N, "T does not fit in byte slab slot");
786        };
787        const {
788            assert!(
789                align_of::<T>() <= align_of::<AlignedBytes<N>>(),
790                "T alignment exceeds slot alignment"
791            );
792        };
793
794        let (slot_ptr, _chunk_idx) = self.claim_ptr();
795        // SAFETY: slot_ptr is valid and exclusively ours from claim_ptr.
796        unsafe {
797            let t_ptr = slot_ptr as *mut T;
798            ptr::write(t_ptr, value);
799            let t_size = size_of::<T>();
800            if t_size < N {
801                ptr::write_bytes((slot_ptr as *mut u8).add(t_size), 0, N - t_size);
802            }
803        }
804        // SAFETY: We just wrote a valid T at slot_ptr.
805        unsafe { Slot::from_raw(slot_ptr as *mut T) }
806    }
807
808    /// Drops the value and frees the slot.
809    ///
810    /// Handles both thin and fat pointers: extracts the data pointer for
811    /// freeing regardless of whether `T` is `Sized` or `dyn Trait`.
812    ///
813    /// # Safety
814    ///
815    /// - `slot` must have been allocated from this slab
816    #[inline]
817    pub unsafe fn remove<T: ?Sized>(&self, slot: Slot<T>) {
818        let ptr = slot.into_raw();
819        debug_assert!(
820            self.contains_ptr(ptr as *const ()),
821            "slot was not allocated from this slab"
822        );
823        // SAFETY: Caller guarantees slot came from this slab.
824        unsafe {
825            ptr::drop_in_place(ptr);
826            let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
827            self.free_ptr(data_ptr);
828        }
829    }
830
831    /// Extracts the value and frees the slot (Sized only).
832    ///
833    /// # Safety
834    ///
835    /// - `slot` must have been allocated from this slab
836    #[inline]
837    pub unsafe fn take_value<T>(&self, slot: Slot<T>) -> T {
838        let ptr = slot.into_raw();
839        debug_assert!(
840            self.contains_ptr(ptr as *const ()),
841            "slot was not allocated from this slab"
842        );
843        // SAFETY: Caller guarantees slot came from this slab.
844        let value = unsafe { ptr::read(ptr) };
845        let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
846        unsafe { self.free_ptr(data_ptr) };
847        value
848    }
849
850    /// Frees the slot without dropping the value.
851    ///
852    /// Use when the value has already been moved out or dropped.
853    ///
854    /// # Safety
855    ///
856    /// - `slot` must have been allocated from this slab
857    /// - The value must already be dropped or moved out
858    #[inline]
859    pub unsafe fn reclaim<T: ?Sized>(&self, slot: Slot<T>) {
860        let ptr = slot.into_raw();
861        debug_assert!(
862            self.contains_ptr(ptr as *const ()),
863            "slot was not allocated from this slab"
864        );
865        // SAFETY: Caller guarantees slot came from this slab and value handled.
866        unsafe {
867            let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
868            self.free_ptr(data_ptr);
869        }
870    }
871}