nexus_slab/shared.rs
1//! Shared internals for bounded and unbounded slab implementations.
2
3use core::borrow::{Borrow, BorrowMut};
4use core::fmt;
5use core::mem::{ManuallyDrop, MaybeUninit};
6use core::ops::{Deref, DerefMut};
7
8// =============================================================================
9// Full<T>
10// =============================================================================
11
12/// Error returned when a bounded allocator is full.
13///
14/// Contains the value that could not be allocated, allowing recovery.
15pub struct Full<T>(pub T);
16
17impl<T> Full<T> {
18 /// Consumes the error, returning the value that could not be allocated.
19 #[inline]
20 pub fn into_inner(self) -> T {
21 self.0
22 }
23}
24
25impl<T> fmt::Debug for Full<T> {
26 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27 f.write_str("Full(..)")
28 }
29}
30
31impl<T> fmt::Display for Full<T> {
32 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
33 f.write_str("allocator full")
34 }
35}
36
37#[cfg(feature = "std")]
38impl<T> std::error::Error for Full<T> {}
39
40// =============================================================================
41// SlotCell
42// =============================================================================
43
44/// SLUB-style slot: freelist pointer overlaid on value storage.
45///
46/// When vacant: `next_free` is active — points to next free slot (or null).
47/// When occupied: `value` is active — contains the user's `T`.
48///
49/// These fields occupy the SAME bytes. Writing `value` overwrites `next_free`
50/// and vice versa. There is no header, no tag, no sentinel — the Slot RAII
51/// handle (`Slot`) is the proof of occupancy.
52///
53/// Size: `max(8, size_of::<T>())`.
54#[repr(C)]
55pub union SlotCell<T> {
56 next_free: *mut SlotCell<T>,
57 value: ManuallyDrop<MaybeUninit<T>>,
58}
59
60impl<T> SlotCell<T> {
61 /// Creates a new vacant slot with the given next_free pointer.
62 #[inline]
63 pub(crate) fn vacant(next_free: *mut SlotCell<T>) -> Self {
64 SlotCell { next_free }
65 }
66
67 /// Writes a value into this slot, transitioning it from vacant to occupied.
68 ///
69 /// # Safety
70 ///
71 /// The slot must be vacant (no live value present).
72 #[inline]
73 pub(crate) unsafe fn write_value(&mut self, value: T) {
74 self.value = ManuallyDrop::new(MaybeUninit::new(value));
75 }
76
77 /// Reads the value out of this slot without dropping it.
78 ///
79 /// # Safety
80 ///
81 /// The slot must be occupied with a valid `T`.
82 /// After this call, the caller owns the value and the slot must not be
83 /// read again without a subsequent write.
84 #[inline]
85 pub(crate) unsafe fn read_value(&self) -> T {
86 // SAFETY: Caller guarantees the slot is occupied.
87 unsafe { core::ptr::read(self.value.as_ptr()) }
88 }
89
90 /// Drops the value in place without returning it.
91 ///
92 /// # Safety
93 ///
94 /// The slot must be occupied with a valid `T`.
95 #[inline]
96 pub(crate) unsafe fn drop_value_in_place(&mut self) {
97 // SAFETY: Caller guarantees the slot is occupied.
98 unsafe {
99 core::ptr::drop_in_place((*self.value).as_mut_ptr());
100 }
101 }
102
103 /// Returns a reference to the occupied value.
104 ///
105 /// # Safety
106 ///
107 /// The slot must be occupied with a valid `T`.
108 #[inline]
109 pub unsafe fn value_ref(&self) -> &T {
110 // SAFETY: Caller guarantees the slot is occupied.
111 unsafe { self.value.assume_init_ref() }
112 }
113
114 /// Returns a mutable reference to the occupied value.
115 ///
116 /// # Safety
117 ///
118 /// The slot must be occupied with a valid `T`.
119 /// Caller must have exclusive access.
120 #[inline]
121 pub unsafe fn value_mut(&mut self) -> &mut T {
122 // SAFETY: Caller guarantees the slot is occupied.
123 unsafe { (*self.value).assume_init_mut() }
124 }
125
126 /// Returns a raw const pointer to the value storage.
127 ///
128 /// # Safety
129 ///
130 /// The slot must be occupied.
131 #[inline]
132 pub unsafe fn value_ptr(&self) -> *const T {
133 // SAFETY: Caller guarantees the slot is occupied.
134 unsafe { self.value.as_ptr() }
135 }
136
137 /// Returns the next_free pointer.
138 ///
139 /// # Safety
140 ///
141 /// The slot must be vacant.
142 #[inline]
143 pub(crate) unsafe fn get_next_free(&self) -> *mut SlotCell<T> {
144 // SAFETY: Caller guarantees the slot is vacant.
145 unsafe { self.next_free }
146 }
147
148 /// Sets the next_free pointer.
149 ///
150 /// # Safety
151 ///
152 /// Caller must be transitioning this slot to vacant.
153 #[inline]
154 pub(crate) unsafe fn set_next_free(&mut self, next: *mut SlotCell<T>) {
155 self.next_free = next;
156 }
157}
158
159// =============================================================================
160// Slot<T> — Raw Pointer Wrapper
161// =============================================================================
162
163/// Raw slot handle — pointer wrapper, NOT RAII.
164///
165/// `Slot<T>` is a thin wrapper around a pointer to a [`SlotCell<T>`]. It is
166/// analogous to `malloc` returning a pointer: the caller owns the memory and
167/// must explicitly free it via [`Slab::free()`](crate::bounded::Slab::free).
168///
169/// # Size
170///
171/// 8 bytes (one pointer).
172///
173/// # Thread Safety
174///
175/// `Slot` is `!Send` and `!Sync`. It must only be used from the thread that
176/// created it.
177///
178/// # Debug-Mode Leak Detection
179///
180/// In debug builds, dropping a `Slot` without calling `free()` or
181/// `take()` panics. Use [`into_raw()`](Self::into_raw) to extract the
182/// pointer and disarm the detector. In release builds there is no `Drop`
183/// impl — forgetting to call `free()` silently leaks the slot.
184///
185/// # Borrow Traits
186///
187/// `Slot<T>` implements `Borrow<T>` and `BorrowMut<T>`, enabling use as
188/// HashMap keys that borrow `T` for lookups.
189#[repr(transparent)]
190pub struct Slot<T>(*mut SlotCell<T>);
191
192impl<T> Slot<T> {
193 /// Internal construction from a raw pointer.
194 ///
195 /// # Safety
196 ///
197 /// `ptr` must be a valid pointer to an occupied `SlotCell<T>` within a slab.
198 #[inline]
199 pub(crate) unsafe fn from_ptr(ptr: *mut SlotCell<T>) -> Self {
200 Slot(ptr)
201 }
202
203 /// Returns the raw pointer to the slot cell.
204 #[inline]
205 pub fn as_ptr(&self) -> *mut SlotCell<T> {
206 self.0
207 }
208
209 /// Consumes the handle, returning the raw pointer without running Drop.
210 ///
211 /// The caller is responsible for the slot from this point — either
212 /// reconstruct via [`from_raw()`](Self::from_raw) or manage manually.
213 /// Disarms the debug-mode leak detector.
214 #[inline]
215 pub fn into_raw(self) -> *mut SlotCell<T> {
216 let ptr = self.0;
217 core::mem::forget(self);
218 ptr
219 }
220
221 /// Reconstructs a `Slot` from a raw pointer previously obtained
222 /// via [`into_raw()`](Self::into_raw).
223 ///
224 /// # Safety
225 ///
226 /// `ptr` must be a valid pointer to an occupied `SlotCell<T>` within
227 /// a slab, originally obtained from `into_raw()` on this type.
228 #[inline]
229 pub unsafe fn from_raw(ptr: *mut SlotCell<T>) -> Self {
230 Slot(ptr)
231 }
232
233 /// Creates a duplicate pointer to the same slot.
234 ///
235 /// # Safety
236 ///
237 /// Caller must ensure the slot is not freed while any clone exists.
238 /// Intended for refcounting wrappers (e.g., nexus-collections' RcHandle).
239 #[inline]
240 pub unsafe fn clone_ptr(&self) -> Self {
241 Slot(self.0)
242 }
243
244 /// Returns a pinned reference to the value.
245 ///
246 /// Slab-backed memory never moves (no reallocation), so `Pin` is
247 /// sound without requiring `T: Unpin`. Useful for async code that
248 /// needs `Pin<&mut T>` for polling futures stored in a slab.
249 #[inline]
250 pub fn pin(&self) -> core::pin::Pin<&T> {
251 // SAFETY: The slab never moves its slot storage after init.
252 // The value at this pointer is stable for the slot's lifetime.
253 unsafe { core::pin::Pin::new_unchecked(&**self) }
254 }
255
256 /// Returns a pinned mutable reference to the value.
257 ///
258 /// See [`pin()`](Self::pin) for the safety rationale.
259 #[inline]
260 pub fn pin_mut(&mut self) -> core::pin::Pin<&mut T> {
261 // SAFETY: Same as pin() — slab memory never moves.
262 // We have &mut self, guaranteeing exclusive access.
263 unsafe { core::pin::Pin::new_unchecked(&mut **self) }
264 }
265}
266
267impl<T> Deref for Slot<T> {
268 type Target = T;
269
270 #[inline]
271 fn deref(&self) -> &Self::Target {
272 // SAFETY: Slot was created from a valid, occupied SlotCell.
273 unsafe { (*self.0).value_ref() }
274 }
275}
276
277impl<T> DerefMut for Slot<T> {
278 #[inline]
279 fn deref_mut(&mut self) -> &mut Self::Target {
280 // SAFETY: We have &mut self, guaranteeing exclusive access.
281 unsafe { (*self.0).value_mut() }
282 }
283}
284
285impl<T> AsRef<T> for Slot<T> {
286 #[inline]
287 fn as_ref(&self) -> &T {
288 self
289 }
290}
291
292impl<T> AsMut<T> for Slot<T> {
293 #[inline]
294 fn as_mut(&mut self) -> &mut T {
295 self
296 }
297}
298
299impl<T> Borrow<T> for Slot<T> {
300 #[inline]
301 fn borrow(&self) -> &T {
302 self
303 }
304}
305
306impl<T> BorrowMut<T> for Slot<T> {
307 #[inline]
308 fn borrow_mut(&mut self) -> &mut T {
309 self
310 }
311}
312
313// Slot is intentionally NOT Clone/Copy.
314// Move-only semantics prevent double-free at compile time.
315
316impl<T: fmt::Debug> fmt::Debug for Slot<T> {
317 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
318 f.debug_struct("Slot").field("value", &**self).finish()
319 }
320}
321
322#[cfg(debug_assertions)]
323impl<T> Drop for Slot<T> {
324 fn drop(&mut self) {
325 #[cfg(feature = "std")]
326 if std::thread::panicking() {
327 return; // Don't double-panic during unwind
328 }
329 panic!(
330 "Slot<{}> dropped without being freed — call slab.free(slot) or slab.take(slot)",
331 core::any::type_name::<T>()
332 );
333 }
334}