nexus_slab/byte.rs
1//! Byte slab types for type-erased allocation.
2//!
3//! This module provides:
4//! - [`AlignedBytes`] — fixed-size byte storage with pointer alignment
5//! - [`BoundedByteAlloc`] / [`UnboundedByteAlloc`] — traits for byte slab allocators
6//! - [`BoxSlot`] — RAII handle for TLS byte allocators
7//! (8 bytes for `Sized` types, 16 bytes for `dyn Trait`)
8//! - [`Slot`] — move-only handle for struct-owned byte slabs
9//! (8 bytes for `Sized` types, 16 bytes for `dyn Trait`)
10
11use std::borrow::{Borrow, BorrowMut};
12use std::fmt;
13use std::marker::PhantomData;
14use std::mem::{self, align_of, size_of};
15use std::ops::{Deref, DerefMut};
16use std::pin::Pin;
17use std::ptr;
18
19use crate::alloc::{Alloc, Full, LocalStatic};
20use crate::shared::{RawSlot, SlotCell};
21
22// =============================================================================
23// AlignedBytes
24// =============================================================================
25
26/// Fixed-size byte storage with pointer alignment.
27///
28/// Used as `SlotCell<AlignedBytes<N>>` in byte slab allocators. The 8-byte
29/// alignment matches the `next_free` pointer in the `SlotCell` union and
30/// covers all common types (up to `u64`, pointers, most structs).
31///
32/// Types requiring greater than 8-byte alignment (e.g., SIMD vectors)
33/// cannot be stored in a byte slab.
34///
35/// `Copy` guarantees `drop_in_place` is a compile-time no-op.
36#[derive(Clone, Copy)]
37#[repr(C, align(8))]
38pub struct AlignedBytes<const N: usize> {
39 bytes: [u8; N],
40}
41
42// =============================================================================
43// Traits
44// =============================================================================
45
46/// Trait for bounded byte slab allocators.
47///
48/// Provides raw slot claiming so [`BoxSlot`] can write `T` directly
49/// into slot memory without constructing an intermediate `AlignedBytes`.
50///
51/// # Safety
52///
53/// Implementors must guarantee:
54/// - `claim_raw` returns a valid, vacant slot from the TLS slab
55/// - The returned pointer is exclusively owned by the caller
56pub unsafe trait BoundedByteAlloc: Alloc {
57 /// Claims a raw slot pointer from the freelist.
58 ///
59 /// Returns `None` if the allocator is full.
60 fn claim_raw() -> Option<*mut SlotCell<Self::Item>>;
61}
62
63/// Trait for unbounded byte slab allocators.
64///
65/// Always succeeds — grows the allocator if needed.
66///
67/// # Safety
68///
69/// Same guarantees as [`BoundedByteAlloc`], plus the returned pointer
70/// is always valid (allocator grows on demand).
71pub unsafe trait UnboundedByteAlloc: Alloc {
72 /// Claims a raw slot pointer, growing the allocator if needed.
73 fn claim_raw() -> *mut SlotCell<Self::Item>;
74
75 /// Ensures at least `count` chunks are allocated.
76 fn reserve_chunks(count: usize);
77
78 /// Returns the number of allocated chunks.
79 fn chunk_count() -> usize;
80}
81
82// =============================================================================
83// BoxSlot<T, A>
84// =============================================================================
85
86/// RAII handle to a byte-slab-allocated value, generic over allocator.
87///
88/// `BoxSlot<T, A>` stores a value of type `T` in a byte slab managed by
89/// allocator `A`. The allocator manages [`AlignedBytes<N>`] storage, while
90/// this handle provides typed access via `Deref<Target = T>` and correctly
91/// drops `T` when the handle is dropped.
92///
93/// # Size
94///
95/// - 8 bytes for `Sized` types (thin pointer)
96/// - 16 bytes for `dyn Trait` types (fat pointer = data ptr + vtable ptr)
97///
98/// # Thread Safety
99///
100/// `BoxSlot` is `!Send` and `!Sync`. It must only be used from the
101/// thread that created it.
102///
103/// # Compile-Time Safety
104///
105/// [`try_new`](Self::try_new) and [`new`](Self::new) include `const`
106/// assertions that verify:
107/// - `size_of::<T>() <= N` — T fits in the slot
108/// - `align_of::<T>() <= 8` — T alignment is compatible
109///
110/// Violations are compile errors, not runtime panics.
111#[must_use = "dropping BoxSlot returns it to the allocator"]
112pub struct BoxSlot<T: ?Sized, A: Alloc> {
113 ptr: *mut T,
114 _marker: PhantomData<(A, *const ())>,
115}
116
117// =============================================================================
118// Sized-only constructors (bounded)
119// =============================================================================
120
121impl<T, A: BoundedByteAlloc> BoxSlot<T, A> {
122 /// Tries to create a new slot containing the given value.
123 ///
124 /// Returns `Err(Full(value))` if the allocator is at capacity,
125 /// giving the value back to the caller.
126 ///
127 /// # Compile-Time Checks
128 ///
129 /// Fails to compile if `T` is too large or too aligned for the slot.
130 #[inline]
131 pub fn try_new(value: T) -> Result<Self, Full<T>> {
132 const {
133 assert!(
134 size_of::<T>() <= size_of::<A::Item>(),
135 "T does not fit in byte slab slot"
136 );
137 };
138 const {
139 assert!(
140 align_of::<T>() <= align_of::<A::Item>(),
141 "T alignment exceeds slot alignment"
142 );
143 };
144
145 match A::claim_raw() {
146 Some(slot_ptr) => {
147 // SAFETY: slot_ptr is a valid, vacant slot exclusively owned
148 // by us. T fits within AlignedBytes<N> (const asserted above).
149 // SlotCell is repr(C) union with fields at offset 0;
150 // ManuallyDrop and MaybeUninit are transparent; AlignedBytes
151 // is repr(C) with bytes at offset 0. So slot_ptr points to
152 // where T's bytes go.
153 unsafe {
154 write_and_zero_pad::<T, A>(slot_ptr, value);
155 }
156 Ok(BoxSlot {
157 ptr: slot_ptr as *mut T,
158 _marker: PhantomData,
159 })
160 }
161 None => Err(Full(value)),
162 }
163 }
164
165 /// Tries to create a slot containing `value`, returning a handle typed
166 /// as `BoxSlot<U, A>` where `U: ?Sized`.
167 ///
168 /// The `coerce` function converts the concrete `*mut T` to a fat pointer
169 /// `*mut U` (e.g., `|p| p as *mut dyn Trait`).
170 ///
171 /// # Compile-Time Checks
172 ///
173 /// Same as [`try_new`](Self::try_new).
174 #[inline]
175 pub fn try_new_as<U: ?Sized>(
176 value: T,
177 coerce: fn(*mut T) -> *mut U,
178 ) -> Result<BoxSlot<U, A>, Full<T>> {
179 match Self::try_new(value) {
180 Ok(slot) => Ok(slot.unsize(coerce)),
181 Err(full) => Err(full),
182 }
183 }
184}
185
186// =============================================================================
187// Sized-only constructors (unbounded)
188// =============================================================================
189
190impl<T, A: UnboundedByteAlloc> BoxSlot<T, A> {
191 /// Creates a new slot containing the given value.
192 ///
193 /// Always succeeds — grows the allocator if needed.
194 ///
195 /// # Compile-Time Checks
196 ///
197 /// Fails to compile if `T` is too large or too aligned for the slot.
198 #[inline]
199 pub fn new(value: T) -> Self {
200 const {
201 assert!(
202 size_of::<T>() <= size_of::<A::Item>(),
203 "T does not fit in byte slab slot"
204 );
205 };
206 const {
207 assert!(
208 align_of::<T>() <= align_of::<A::Item>(),
209 "T alignment exceeds slot alignment"
210 );
211 };
212
213 let slot_ptr = A::claim_raw();
214 // SAFETY: Same as try_new — slot_ptr is valid and exclusively ours.
215 unsafe {
216 write_and_zero_pad::<T, A>(slot_ptr, value);
217 }
218 BoxSlot {
219 ptr: slot_ptr as *mut T,
220 _marker: PhantomData,
221 }
222 }
223
224 /// Creates a slot containing `value`, returning a handle typed as
225 /// `BoxSlot<U, A>` where `U: ?Sized`.
226 ///
227 /// The `coerce` function converts the concrete `*mut T` to a fat pointer
228 /// `*mut U` (e.g., `|p| p as *mut dyn Trait`).
229 ///
230 /// # Compile-Time Checks
231 ///
232 /// Same as [`new`](Self::new).
233 #[inline]
234 pub fn new_as<U: ?Sized>(value: T, coerce: fn(*mut T) -> *mut U) -> BoxSlot<U, A> {
235 Self::new(value).unsize(coerce)
236 }
237}
238
239// =============================================================================
240// Sized-only methods
241// =============================================================================
242
243impl<T, A: Alloc> BoxSlot<T, A> {
244 /// Extracts the value from the slot, deallocating the slot.
245 #[inline]
246 pub fn into_inner(self) -> T {
247 // SAFETY: T is Sized, so self.ptr is a thin pointer whose address
248 // is the start of the SlotCell. Read the value, reconstruct the
249 // Slot for freeing.
250 let data_ptr = self.ptr;
251 mem::forget(self);
252 let value = unsafe { ptr::read(data_ptr) };
253 // SAFETY: data_ptr is the address of the SlotCell<A::Item>.
254 // Reconstruct RawSlot to pass to A::free.
255 let slot = unsafe { RawSlot::from_ptr(data_ptr as *mut SlotCell<A::Item>) };
256 unsafe { A::free(slot) };
257 value
258 }
259
260 /// Replaces the value in the slot, returning the old value.
261 #[inline]
262 pub fn replace(&mut self, value: T) -> T {
263 // SAFETY: We own the slot exclusively (&mut self). T is at offset 0.
264 unsafe {
265 let old = ptr::read(self.ptr);
266 ptr::write(self.ptr, value);
267 old
268 }
269 }
270
271 /// Converts this `BoxSlot<T, A>` into a `BoxSlot<U, A>` where
272 /// `U: ?Sized`, using the given coercion function.
273 ///
274 /// This is the low-level API for unsizing. For convenience, use the
275 /// [`box_dyn!`](crate::box_dyn) or
276 /// [`try_box_dyn!`](crate::try_box_dyn) macros.
277 ///
278 /// # Example
279 ///
280 /// ```ignore
281 /// let sized: BoxSlot<MyHandler, A> = BoxSlot::new(handler);
282 /// let dyn_slot: BoxSlot<dyn Handler<E>, A> = sized.unsize(|p| p as *mut dyn Handler<E>);
283 /// ```
284 #[inline]
285 pub fn unsize<U: ?Sized>(self, coerce: fn(*mut T) -> *mut U) -> BoxSlot<U, A> {
286 let thin_ptr = self.ptr;
287 let fat_ptr = coerce(thin_ptr);
288 // Verify the coercion didn't change the data pointer.
289 // This is assert, not debug_assert — an incorrect coerce function
290 // would cause UB in Drop (wrong data pointer → wrong slot freed).
291 assert_eq!(
292 fat_ptr as *const () as usize, thin_ptr as *const () as usize,
293 "coerce function must not change the data pointer address"
294 );
295 mem::forget(self);
296 BoxSlot {
297 ptr: fat_ptr,
298 _marker: PhantomData,
299 }
300 }
301}
302
303// =============================================================================
304// ?Sized methods
305// =============================================================================
306
307impl<T: ?Sized, A: Alloc> BoxSlot<T, A> {
308 /// Leaks the slot permanently, returning an immutable reference.
309 ///
310 /// The value will never be dropped or deallocated.
311 #[inline]
312 pub fn leak(self) -> LocalStatic<T> {
313 let ptr = self.ptr.cast_const();
314 mem::forget(self);
315 // SAFETY: Slot is permanently leaked. ptr points to a valid T.
316 unsafe { LocalStatic::new(ptr) }
317 }
318
319 /// Returns a pinned reference to the value.
320 ///
321 /// Slab-allocated values have stable addresses — they never move while
322 /// the slot is occupied.
323 #[inline]
324 pub fn pin(&self) -> Pin<&T> {
325 // SAFETY: Slab values have stable addresses. The BoxSlot owns
326 // the slot, so the value cannot be freed while this reference exists.
327 unsafe { Pin::new_unchecked(&**self) }
328 }
329
330 /// Returns a pinned mutable reference to the value.
331 ///
332 /// Slab-allocated values have stable addresses — they never move while
333 /// the slot is occupied.
334 #[inline]
335 pub fn pin_mut(&mut self) -> Pin<&mut T> {
336 // SAFETY: Slab values have stable addresses. We have exclusive
337 // access (&mut self).
338 unsafe { Pin::new_unchecked(&mut **self) }
339 }
340}
341
342// =============================================================================
343// Trait Implementations
344// =============================================================================
345
346impl<T: ?Sized, A: Alloc> Deref for BoxSlot<T, A> {
347 type Target = T;
348
349 #[inline]
350 fn deref(&self) -> &T {
351 // SAFETY: self.ptr points to a valid, occupied T value within the
352 // slab. For Sized T this is a thin pointer cast; for dyn Trait this
353 // is a fat pointer that carries the vtable.
354 unsafe { &*self.ptr }
355 }
356}
357
358impl<T: ?Sized, A: Alloc> DerefMut for BoxSlot<T, A> {
359 #[inline]
360 fn deref_mut(&mut self) -> &mut T {
361 // SAFETY: We have &mut self, guaranteeing exclusive access.
362 unsafe { &mut *self.ptr }
363 }
364}
365
366impl<T: ?Sized, A: Alloc> AsRef<T> for BoxSlot<T, A> {
367 #[inline]
368 fn as_ref(&self) -> &T {
369 self
370 }
371}
372
373impl<T: ?Sized, A: Alloc> AsMut<T> for BoxSlot<T, A> {
374 #[inline]
375 fn as_mut(&mut self) -> &mut T {
376 self
377 }
378}
379
380impl<T: ?Sized, A: Alloc> Borrow<T> for BoxSlot<T, A> {
381 #[inline]
382 fn borrow(&self) -> &T {
383 self
384 }
385}
386
387impl<T: ?Sized, A: Alloc> BorrowMut<T> for BoxSlot<T, A> {
388 #[inline]
389 fn borrow_mut(&mut self) -> &mut T {
390 self
391 }
392}
393
394impl<T: ?Sized, A: Alloc> Drop for BoxSlot<T, A> {
395 #[inline]
396 fn drop(&mut self) {
397 // SAFETY: We own the slot. Drop T first, then free the slot.
398 // For dyn Trait, drop_in_place dispatches through the vtable.
399 // Extract the data pointer (drops vtable for fat ptrs) to
400 // reconstruct the Slot for A::free.
401 //
402 // A::free MUST NOT call drop_in_place — it only returns the slot
403 // to the freelist. We handle the drop here. This is guaranteed by
404 // the byte Alloc trait's free() contract (see alloc.rs).
405 unsafe {
406 ptr::drop_in_place(self.ptr);
407 let data_ptr = self.ptr as *mut () as *mut SlotCell<A::Item>;
408 let slot = RawSlot::from_ptr(data_ptr);
409 A::free(slot);
410 }
411 }
412}
413
414impl<T: fmt::Debug + ?Sized, A: Alloc> fmt::Debug for BoxSlot<T, A> {
415 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
416 f.debug_struct("BoxSlot").field("value", &&**self).finish()
417 }
418}
419
420// =============================================================================
421// Slot<T: ?Sized>
422// =============================================================================
423
424/// Move-only handle to a value stored in a byte slab.
425///
426/// Unlike [`BoxSlot`] (TLS allocator), `Slot` is for struct-owned slabs
427/// (`bounded::Slab<AlignedBytes<N>>` or `unbounded::Slab<AlignedBytes<N>>`).
428/// It does NOT auto-free on drop — the caller must return it to the slab via
429/// [`remove`](crate::bounded::Slab::remove),
430/// [`take_value`](crate::bounded::Slab::take_value), or
431/// [`reclaim`](crate::bounded::Slab::reclaim).
432///
433/// # Size
434///
435/// - 8 bytes for `Sized` types (thin pointer)
436/// - 16 bytes for `dyn Trait` types (fat pointer)
437///
438/// # Thread Safety
439///
440/// `Slot` is `!Send` and `!Sync`.
441#[must_use = "slot must be freed via slab.remove() or slab.take_value()"]
442pub struct Slot<T: ?Sized> {
443 ptr: *mut T,
444 _marker: PhantomData<*const ()>, // !Send + !Sync
445}
446
447// =============================================================================
448// Slot — Sized-only methods
449// =============================================================================
450
451impl<T> Slot<T> {
452 /// Unsizes this handle (e.g., concrete → dyn Trait).
453 ///
454 /// The `coerce` function converts the concrete `*mut T` to a fat pointer
455 /// `*mut U` (e.g., `|p| p as *mut dyn Trait`).
456 #[inline]
457 pub fn unsize<U: ?Sized>(self, coerce: fn(*mut T) -> *mut U) -> Slot<U> {
458 let thin_ptr = self.ptr;
459 let fat_ptr = coerce(thin_ptr);
460 // assert, not debug_assert — an incorrect coerce function would cause
461 // UB when the slab frees the wrong slot.
462 assert_eq!(
463 fat_ptr as *const () as usize, thin_ptr as *const () as usize,
464 "coerce function must not change the data pointer address"
465 );
466 mem::forget(self);
467 Slot {
468 ptr: fat_ptr,
469 _marker: PhantomData,
470 }
471 }
472}
473
474// =============================================================================
475// Slot — ?Sized methods
476// =============================================================================
477
478impl<T: ?Sized> Slot<T> {
479 /// Creates a `Slot` from a raw pointer.
480 ///
481 /// # Safety
482 ///
483 /// - `ptr` must point to a valid, live value within a byte slab
484 /// - The caller transfers ownership to the `Slot`
485 #[inline]
486 pub(crate) unsafe fn from_raw(ptr: *mut T) -> Self {
487 Slot {
488 ptr,
489 _marker: PhantomData,
490 }
491 }
492
493 /// Extracts the raw pointer, consuming the `Slot` without running
494 /// the debug-mode leak detector.
495 #[inline]
496 pub(crate) fn into_raw(self) -> *mut T {
497 let ptr = self.ptr;
498 mem::forget(self);
499 ptr
500 }
501
502 /// Returns a pinned reference to the value.
503 ///
504 /// Slab-allocated values have stable addresses — they never move while
505 /// the slot is occupied.
506 #[inline]
507 pub fn pin(&self) -> Pin<&T> {
508 // SAFETY: Slab values have stable addresses. The Slot owns
509 // the value, so it cannot be freed while this reference exists.
510 unsafe { Pin::new_unchecked(&**self) }
511 }
512
513 /// Returns a pinned mutable reference to the value.
514 ///
515 /// Slab-allocated values have stable addresses — they never move while
516 /// the slot is occupied.
517 #[inline]
518 pub fn pin_mut(&mut self) -> Pin<&mut T> {
519 // SAFETY: Slab values have stable addresses. We have exclusive
520 // access (&mut self).
521 unsafe { Pin::new_unchecked(&mut **self) }
522 }
523}
524
525// =============================================================================
526// Slot — Trait Implementations
527// =============================================================================
528
529impl<T: ?Sized> Deref for Slot<T> {
530 type Target = T;
531
532 #[inline]
533 fn deref(&self) -> &T {
534 // SAFETY: self.ptr points to a valid, occupied T value within the
535 // slab. For Sized T this is a thin pointer; for dyn Trait this is
536 // a fat pointer carrying the vtable.
537 unsafe { &*self.ptr }
538 }
539}
540
541impl<T: ?Sized> DerefMut for Slot<T> {
542 #[inline]
543 fn deref_mut(&mut self) -> &mut T {
544 // SAFETY: We have &mut self, guaranteeing exclusive access.
545 unsafe { &mut *self.ptr }
546 }
547}
548
549impl<T: ?Sized> AsRef<T> for Slot<T> {
550 #[inline]
551 fn as_ref(&self) -> &T {
552 self
553 }
554}
555
556impl<T: ?Sized> AsMut<T> for Slot<T> {
557 #[inline]
558 fn as_mut(&mut self) -> &mut T {
559 self
560 }
561}
562
563impl<T: ?Sized> Borrow<T> for Slot<T> {
564 #[inline]
565 fn borrow(&self) -> &T {
566 self
567 }
568}
569
570impl<T: ?Sized> BorrowMut<T> for Slot<T> {
571 #[inline]
572 fn borrow_mut(&mut self) -> &mut T {
573 self
574 }
575}
576
577impl<T: fmt::Debug + ?Sized> fmt::Debug for Slot<T> {
578 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
579 f.debug_struct("Slot").field("value", &&**self).finish()
580 }
581}
582
583#[cfg(debug_assertions)]
584impl<T: ?Sized> Drop for Slot<T> {
585 fn drop(&mut self) {
586 if std::thread::panicking() {
587 // During unwinding: log but don't abort. Leak is lesser evil than abort.
588 eprintln!(
589 "byte::Slot<{}> leaked during panic unwind (was not freed)",
590 std::any::type_name::<T>()
591 );
592 } else {
593 panic!(
594 "byte::Slot<{}> dropped without being freed — \
595 call slab.remove() or slab.take_value()",
596 std::any::type_name::<T>()
597 );
598 }
599 }
600}
601
602// =============================================================================
603// Internal helpers
604// =============================================================================
605
606/// Writes `value` into a slot and zeroes trailing bytes.
607///
608/// # Safety
609///
610/// - `slot_ptr` must be a valid, exclusively-owned, vacant slot
611/// - `T` must fit within `A::Item` (caller must const-assert)
612#[inline]
613unsafe fn write_and_zero_pad<T, A: Alloc>(slot_ptr: *mut SlotCell<A::Item>, value: T) {
614 // SAFETY: Caller guarantees slot_ptr is valid and exclusively owned.
615 // T fits within A::Item (caller must const-assert).
616 unsafe {
617 ptr::write(slot_ptr as *mut T, value);
618 // Ensures the full AlignedBytes<N> is deterministically initialized.
619 // When size_of::<T>() == size_of::<A::Item>(), the compiler
620 // eliminates this entirely.
621 let t_size = size_of::<T>();
622 let slot_size = size_of::<A::Item>();
623 if t_size < slot_size {
624 ptr::write_bytes((slot_ptr as *mut u8).add(t_size), 0, slot_size - t_size);
625 }
626 }
627}
628
629// =============================================================================
630// Convenience macros
631// =============================================================================
632
633/// Creates a `BoxSlot<dyn Trait, A>` from a concrete value.
634///
635/// For unbounded byte allocators (always succeeds).
636///
637/// # Example
638///
639/// ```ignore
640/// let handler = nexus_slab::box_dyn!(
641/// msg_alloc::Allocator, dyn Handler<E>, my_handler
642/// );
643/// ```
644#[macro_export]
645macro_rules! box_dyn {
646 ($alloc:ty, $dyn_ty:ty, $value:expr) => {{ <$crate::byte::BoxSlot<_, $alloc>>::new_as($value, |__p| __p as *mut $dyn_ty) }};
647}
648
649/// Creates a `BoxSlot<dyn Trait, A>` from a concrete value.
650///
651/// For bounded byte allocators (returns `Result`).
652///
653/// # Example
654///
655/// ```ignore
656/// let handler = nexus_slab::try_box_dyn!(
657/// msg_alloc::Allocator, dyn Handler<E>, my_handler
658/// )?;
659/// ```
660#[macro_export]
661macro_rules! try_box_dyn {
662 ($alloc:ty, $dyn_ty:ty, $value:expr) => {{ <$crate::byte::BoxSlot<_, $alloc>>::try_new_as($value, |__p| __p as *mut $dyn_ty) }};
663}
664
665// =============================================================================
666// Raw slab helpers — bounded::Slab<AlignedBytes<N>>
667// =============================================================================
668
669impl<const N: usize> crate::bounded::Slab<AlignedBytes<N>> {
670 /// Inserts a value into the slab, returning a [`Slot`](byte::Slot) handle.
671 ///
672 /// Returns `Err(value)` if the slab is full.
673 ///
674 /// # Compile-Time Checks
675 ///
676 /// Fails to compile if `T` is too large or too aligned for the slot.
677 #[inline]
678 pub fn try_insert<T>(&self, value: T) -> Result<Slot<T>, T> {
679 const {
680 assert!(size_of::<T>() <= N, "T does not fit in byte slab slot");
681 };
682 const {
683 assert!(
684 align_of::<T>() <= align_of::<AlignedBytes<N>>(),
685 "T alignment exceeds slot alignment"
686 );
687 };
688
689 match self.claim_ptr() {
690 Some(slot_ptr) => {
691 // SAFETY: slot_ptr is valid and exclusively ours from claim_ptr.
692 // T fits (const-asserted). SlotCell repr(C) union has data at
693 // offset 0.
694 unsafe {
695 let t_ptr = slot_ptr as *mut T;
696 ptr::write(t_ptr, value);
697 let t_size = size_of::<T>();
698 if t_size < N {
699 ptr::write_bytes((slot_ptr as *mut u8).add(t_size), 0, N - t_size);
700 }
701 }
702 // SAFETY: We just wrote a valid T at slot_ptr.
703 Ok(unsafe { Slot::from_raw(slot_ptr as *mut T) })
704 }
705 None => Err(value),
706 }
707 }
708
709 /// Drops the value and frees the slot.
710 ///
711 /// Handles both thin and fat pointers: extracts the data pointer for
712 /// freeing regardless of whether `T` is `Sized` or `dyn Trait`.
713 ///
714 /// # Safety
715 ///
716 /// - `slot` must have been allocated from this slab
717 #[inline]
718 pub unsafe fn remove<T: ?Sized>(&self, slot: Slot<T>) {
719 let ptr = slot.into_raw();
720 debug_assert!(
721 self.contains_ptr(ptr as *const ()),
722 "slot was not allocated from this slab"
723 );
724 // SAFETY: Caller guarantees slot came from this slab.
725 unsafe {
726 ptr::drop_in_place(ptr);
727 let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
728 self.free_ptr(data_ptr);
729 }
730 }
731
732 /// Extracts the value and frees the slot (Sized only).
733 ///
734 /// # Safety
735 ///
736 /// - `slot` must have been allocated from this slab
737 #[inline]
738 pub unsafe fn take_value<T>(&self, slot: Slot<T>) -> T {
739 let ptr = slot.into_raw();
740 debug_assert!(
741 self.contains_ptr(ptr as *const ()),
742 "slot was not allocated from this slab"
743 );
744 // SAFETY: Caller guarantees slot came from this slab.
745 let value = unsafe { ptr::read(ptr) };
746 let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
747 unsafe { self.free_ptr(data_ptr) };
748 value
749 }
750
751 /// Frees the slot without dropping the value.
752 ///
753 /// Use when the value has already been moved out or dropped.
754 ///
755 /// # Safety
756 ///
757 /// - `slot` must have been allocated from this slab
758 /// - The value must already be dropped or moved out
759 #[inline]
760 pub unsafe fn reclaim<T: ?Sized>(&self, slot: Slot<T>) {
761 let ptr = slot.into_raw();
762 debug_assert!(
763 self.contains_ptr(ptr as *const ()),
764 "slot was not allocated from this slab"
765 );
766 // SAFETY: Caller guarantees slot came from this slab and value handled.
767 unsafe {
768 let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
769 self.free_ptr(data_ptr);
770 }
771 }
772}
773
774// =============================================================================
775// Raw slab helpers — unbounded::Slab<AlignedBytes<N>>
776// =============================================================================
777
778impl<const N: usize> crate::unbounded::Slab<AlignedBytes<N>> {
779 /// Inserts a value into the slab, returning a [`Slot`](byte::Slot) handle.
780 ///
781 /// Always succeeds — grows the slab if needed.
782 ///
783 /// # Compile-Time Checks
784 ///
785 /// Fails to compile if `T` is too large or too aligned for the slot.
786 #[inline]
787 pub fn insert<T>(&self, value: T) -> Slot<T> {
788 const {
789 assert!(size_of::<T>() <= N, "T does not fit in byte slab slot");
790 };
791 const {
792 assert!(
793 align_of::<T>() <= align_of::<AlignedBytes<N>>(),
794 "T alignment exceeds slot alignment"
795 );
796 };
797
798 let (slot_ptr, _chunk_idx) = self.claim_ptr();
799 // SAFETY: slot_ptr is valid and exclusively ours from claim_ptr.
800 unsafe {
801 let t_ptr = slot_ptr as *mut T;
802 ptr::write(t_ptr, value);
803 let t_size = size_of::<T>();
804 if t_size < N {
805 ptr::write_bytes((slot_ptr as *mut u8).add(t_size), 0, N - t_size);
806 }
807 }
808 // SAFETY: We just wrote a valid T at slot_ptr.
809 unsafe { Slot::from_raw(slot_ptr as *mut T) }
810 }
811
812 /// Drops the value and frees the slot.
813 ///
814 /// Handles both thin and fat pointers: extracts the data pointer for
815 /// freeing regardless of whether `T` is `Sized` or `dyn Trait`.
816 ///
817 /// # Safety
818 ///
819 /// - `slot` must have been allocated from this slab
820 #[inline]
821 pub unsafe fn remove<T: ?Sized>(&self, slot: Slot<T>) {
822 let ptr = slot.into_raw();
823 debug_assert!(
824 self.contains_ptr(ptr as *const ()),
825 "slot was not allocated from this slab"
826 );
827 // SAFETY: Caller guarantees slot came from this slab.
828 unsafe {
829 ptr::drop_in_place(ptr);
830 let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
831 self.free_ptr(data_ptr);
832 }
833 }
834
835 /// Extracts the value and frees the slot (Sized only).
836 ///
837 /// # Safety
838 ///
839 /// - `slot` must have been allocated from this slab
840 #[inline]
841 pub unsafe fn take_value<T>(&self, slot: Slot<T>) -> T {
842 let ptr = slot.into_raw();
843 debug_assert!(
844 self.contains_ptr(ptr as *const ()),
845 "slot was not allocated from this slab"
846 );
847 // SAFETY: Caller guarantees slot came from this slab.
848 let value = unsafe { ptr::read(ptr) };
849 let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
850 unsafe { self.free_ptr(data_ptr) };
851 value
852 }
853
854 /// Frees the slot without dropping the value.
855 ///
856 /// Use when the value has already been moved out or dropped.
857 ///
858 /// # Safety
859 ///
860 /// - `slot` must have been allocated from this slab
861 /// - The value must already be dropped or moved out
862 #[inline]
863 pub unsafe fn reclaim<T: ?Sized>(&self, slot: Slot<T>) {
864 let ptr = slot.into_raw();
865 debug_assert!(
866 self.contains_ptr(ptr as *const ()),
867 "slot was not allocated from this slab"
868 );
869 // SAFETY: Caller guarantees slot came from this slab and value handled.
870 unsafe {
871 let data_ptr = ptr as *mut () as *mut SlotCell<AlignedBytes<N>>;
872 self.free_ptr(data_ptr);
873 }
874 }
875}