nexus_slab/shared.rs
1//! Shared internals for bounded and unbounded slab implementations.
2
3use core::borrow::{Borrow, BorrowMut};
4use core::fmt;
5use core::mem::{ManuallyDrop, MaybeUninit};
6use core::ops::{Deref, DerefMut};
7
8// =============================================================================
9// Full<T>
10// =============================================================================
11
12/// Error returned when a bounded allocator is full.
13///
14/// Contains the value that could not be allocated, allowing recovery.
15pub struct Full<T>(pub T);
16
17impl<T> Full<T> {
18 /// Consumes the error, returning the value that could not be allocated.
19 #[inline]
20 pub fn into_inner(self) -> T {
21 self.0
22 }
23}
24
25impl<T> fmt::Debug for Full<T> {
26 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27 f.write_str("Full(..)")
28 }
29}
30
31impl<T> fmt::Display for Full<T> {
32 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
33 f.write_str("allocator full")
34 }
35}
36
37#[cfg(feature = "std")]
38impl<T> std::error::Error for Full<T> {}
39
40// =============================================================================
41// SlotCell
42// =============================================================================
43
44/// SLUB-style slot: freelist pointer overlaid on value storage.
45///
46/// When vacant: `next_free` is active — points to next free slot (or null).
47/// When occupied: `value` is active — contains the user's `T`.
48///
49/// These fields occupy the SAME bytes. Writing `value` overwrites `next_free`
50/// and vice versa. There is no header, no tag, no sentinel — the Slot RAII
51/// handle (`Slot`) is the proof of occupancy.
52///
53/// Size: `max(8, size_of::<T>())`.
54#[repr(C)]
55pub union SlotCell<T> {
56 next_free: *mut SlotCell<T>,
57 value: ManuallyDrop<MaybeUninit<T>>,
58}
59
60impl<T> SlotCell<T> {
61 /// Creates a new vacant slot with the given next_free pointer.
62 #[inline]
63 pub(crate) fn vacant(next_free: *mut SlotCell<T>) -> Self {
64 SlotCell { next_free }
65 }
66
67 /// Writes a value into this slot, transitioning it from vacant to occupied.
68 ///
69 /// # Safety
70 ///
71 /// The slot must be vacant (no live value present).
72 #[inline]
73 pub(crate) unsafe fn write_value(&mut self, value: T) {
74 self.value = ManuallyDrop::new(MaybeUninit::new(value));
75 }
76
77 /// Reads the value out of this slot without dropping it.
78 ///
79 /// # Safety
80 ///
81 /// The slot must be occupied with a valid `T`.
82 /// After this call, the caller owns the value and the slot must not be
83 /// read again without a subsequent write.
84 #[inline]
85 pub(crate) unsafe fn read_value(&self) -> T {
86 // SAFETY: Caller guarantees the slot is occupied.
87 unsafe { core::ptr::read(self.value.as_ptr()) }
88 }
89
90 /// Drops the value in place without returning it.
91 ///
92 /// # Safety
93 ///
94 /// The slot must be occupied with a valid `T`.
95 #[inline]
96 pub(crate) unsafe fn drop_value_in_place(&mut self) {
97 // SAFETY: Caller guarantees the slot is occupied.
98 unsafe {
99 core::ptr::drop_in_place((*self.value).as_mut_ptr());
100 }
101 }
102
103 /// Returns a reference to the occupied value.
104 ///
105 /// # Safety
106 ///
107 /// The slot must be occupied with a valid `T`.
108 #[inline]
109 pub unsafe fn value_ref(&self) -> &T {
110 // SAFETY: Caller guarantees the slot is occupied.
111 unsafe { self.value.assume_init_ref() }
112 }
113
114 /// Returns a mutable reference to the occupied value.
115 ///
116 /// # Safety
117 ///
118 /// The slot must be occupied with a valid `T`.
119 /// Caller must have exclusive access.
120 #[inline]
121 pub unsafe fn value_mut(&mut self) -> &mut T {
122 // SAFETY: Caller guarantees the slot is occupied.
123 unsafe { (*self.value).assume_init_mut() }
124 }
125
126 /// Returns a raw const pointer to the value storage.
127 ///
128 /// # Safety
129 ///
130 /// The slot must be occupied.
131 #[inline]
132 pub unsafe fn value_ptr(&self) -> *const T {
133 // SAFETY: Caller guarantees the slot is occupied.
134 unsafe { self.value.as_ptr() }
135 }
136
137 /// Returns a raw mutable pointer to the value storage from a raw pointer.
138 ///
139 /// This avoids creating an intermediate `&SlotCell<T>` reference, which
140 /// would give the result read-only provenance under stacked borrows.
141 /// Use this when you need `*mut T` from a `*mut SlotCell<T>`.
142 ///
143 /// # Safety
144 ///
145 /// `ptr` must be non-null and point to an occupied `SlotCell<T>`.
146 #[inline]
147 pub unsafe fn value_ptr_mut(ptr: *mut SlotCell<T>) -> *mut T {
148 // SAFETY: SlotCell is repr(C) and value is ManuallyDrop<MaybeUninit<T>>.
149 // ManuallyDrop is repr(transparent), MaybeUninit is repr(transparent)
150 // for the value. The value field is at offset 0 (repr(C) union).
151 // We cast through the raw pointer without creating a reference,
152 // preserving write provenance.
153 ptr.cast::<T>()
154 }
155
156 /// Returns the next_free pointer.
157 ///
158 /// # Safety
159 ///
160 /// The slot must be vacant.
161 #[inline]
162 pub(crate) unsafe fn get_next_free(&self) -> *mut SlotCell<T> {
163 // SAFETY: Caller guarantees the slot is vacant.
164 unsafe { self.next_free }
165 }
166
167 /// Sets the next_free pointer.
168 ///
169 /// # Safety
170 ///
171 /// Caller must be transitioning this slot to vacant.
172 #[inline]
173 pub(crate) unsafe fn set_next_free(&mut self, next: *mut SlotCell<T>) {
174 self.next_free = next;
175 }
176}
177
178// =============================================================================
179// Slot<T> — Raw Pointer Wrapper
180// =============================================================================
181
182/// Raw slot handle — pointer wrapper, NOT RAII.
183///
184/// `Slot<T>` is a thin wrapper around a pointer to a [`SlotCell<T>`]. It is
185/// analogous to `malloc` returning a pointer: the caller owns the memory and
186/// must explicitly free it via [`Slab::free()`](crate::bounded::Slab::free).
187///
188/// # Size
189///
190/// 8 bytes (one pointer).
191///
192/// # Thread Safety
193///
194/// `Slot` is `!Send` and `!Sync`. It must only be used from the thread that
195/// created it.
196///
197/// # Debug-Mode Leak Detection
198///
199/// In debug builds, dropping a `Slot` without calling `free()` or
200/// `take()` panics. Use [`into_raw()`](Self::into_raw) to extract the
201/// pointer and disarm the detector. In release builds there is no `Drop`
202/// impl — forgetting to call `free()` silently leaks the slot.
203///
204/// # Borrow Traits
205///
206/// `Slot<T>` implements `Borrow<T>` and `BorrowMut<T>`, enabling use as
207/// HashMap keys that borrow `T` for lookups.
208#[repr(transparent)]
209pub struct Slot<T>(*mut SlotCell<T>);
210
211impl<T> Slot<T> {
212 /// Internal construction from a raw pointer.
213 ///
214 /// # Safety
215 ///
216 /// `ptr` must be a valid pointer to an occupied `SlotCell<T>` within a slab.
217 #[inline]
218 pub(crate) unsafe fn from_ptr(ptr: *mut SlotCell<T>) -> Self {
219 Slot(ptr)
220 }
221
222 /// Returns the raw pointer to the slot cell.
223 #[inline]
224 pub fn as_ptr(&self) -> *mut SlotCell<T> {
225 self.0
226 }
227
228 /// Consumes the handle, returning the raw pointer without running Drop.
229 ///
230 /// The caller is responsible for the slot from this point — either
231 /// reconstruct via [`from_raw()`](Self::from_raw) or manage manually.
232 /// Disarms the debug-mode leak detector.
233 #[inline]
234 pub fn into_raw(self) -> *mut SlotCell<T> {
235 let ptr = self.0;
236 core::mem::forget(self);
237 ptr
238 }
239
240 /// Reconstructs a `Slot` from a raw pointer previously obtained
241 /// via [`into_raw()`](Self::into_raw).
242 ///
243 /// # Safety
244 ///
245 /// `ptr` must be a valid pointer to an occupied `SlotCell<T>` within
246 /// a slab, originally obtained from `into_raw()` on this type.
247 #[inline]
248 pub unsafe fn from_raw(ptr: *mut SlotCell<T>) -> Self {
249 Slot(ptr)
250 }
251
252 /// Creates a duplicate pointer to the same slot.
253 ///
254 /// # Safety
255 ///
256 /// Caller must ensure the slot is not freed while any clone exists.
257 /// Intended for refcounting wrappers (e.g., nexus-collections' RcHandle).
258 #[inline]
259 pub unsafe fn clone_ptr(&self) -> Self {
260 Slot(self.0)
261 }
262
263 /// Returns a pinned reference to the value.
264 ///
265 /// Slab-backed memory never moves (no reallocation), so `Pin` is
266 /// sound without requiring `T: Unpin`. Useful for async code that
267 /// needs `Pin<&mut T>` for polling futures stored in a slab.
268 #[inline]
269 pub fn pin(&self) -> core::pin::Pin<&T> {
270 // SAFETY: The slab never moves its slot storage after init.
271 // The value at this pointer is stable for the slot's lifetime.
272 unsafe { core::pin::Pin::new_unchecked(&**self) }
273 }
274
275 /// Returns a pinned mutable reference to the value.
276 ///
277 /// See [`pin()`](Self::pin) for the safety rationale.
278 #[inline]
279 pub fn pin_mut(&mut self) -> core::pin::Pin<&mut T> {
280 // SAFETY: Same as pin() — slab memory never moves.
281 // We have &mut self, guaranteeing exclusive access.
282 unsafe { core::pin::Pin::new_unchecked(&mut **self) }
283 }
284}
285
286impl<T> Deref for Slot<T> {
287 type Target = T;
288
289 #[inline]
290 fn deref(&self) -> &Self::Target {
291 // SAFETY: Slot was created from a valid, occupied SlotCell.
292 unsafe { (*self.0).value_ref() }
293 }
294}
295
296impl<T> DerefMut for Slot<T> {
297 #[inline]
298 fn deref_mut(&mut self) -> &mut Self::Target {
299 // SAFETY: We have &mut self, guaranteeing exclusive access.
300 unsafe { (*self.0).value_mut() }
301 }
302}
303
304impl<T> AsRef<T> for Slot<T> {
305 #[inline]
306 fn as_ref(&self) -> &T {
307 self
308 }
309}
310
311impl<T> AsMut<T> for Slot<T> {
312 #[inline]
313 fn as_mut(&mut self) -> &mut T {
314 self
315 }
316}
317
318impl<T> Borrow<T> for Slot<T> {
319 #[inline]
320 fn borrow(&self) -> &T {
321 self
322 }
323}
324
325impl<T> BorrowMut<T> for Slot<T> {
326 #[inline]
327 fn borrow_mut(&mut self) -> &mut T {
328 self
329 }
330}
331
332// Slot is intentionally NOT Clone/Copy.
333// Move-only semantics prevent double-free at compile time.
334
335impl<T: fmt::Debug> fmt::Debug for Slot<T> {
336 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
337 f.debug_struct("Slot").field("value", &**self).finish()
338 }
339}
340
341#[cfg(debug_assertions)]
342impl<T> Drop for Slot<T> {
343 fn drop(&mut self) {
344 #[cfg(feature = "std")]
345 if std::thread::panicking() {
346 return; // Don't double-panic during unwind
347 }
348 panic!(
349 "Slot<{}> dropped without being freed — call slab.free(slot) or slab.take(slot)",
350 core::any::type_name::<T>()
351 );
352 }
353}