nexus_timer/store.rs
1//! Slab storage traits for instance-based (non-ZST) slabs.
2//!
3//! Single trait hierarchy: [`SlabStore`] provides allocation and deallocation.
4//! [`BoundedStore`] extends it with fallible allocation for callers who want
5//! graceful error handling.
6//!
7//! # Allocation and OOM
8//!
9//! [`SlabStore::alloc`] always returns a valid slot. For unbounded slabs this
10//! is guaranteed by growth. For bounded slabs, exceeding capacity **panics**.
11//!
12//! This is a deliberate design choice: bounded capacity is a deployment
13//! constraint, not a runtime negotiation. If you hit the limit, your capacity
14//! planning is wrong and the system should fail loudly — the same way a
15//! process panics on OOM. Silently dropping timers or events is worse than
16//! crashing.
17//!
18//! Use [`BoundedStore::try_alloc`] if you need graceful error handling at
19//! specific call sites.
20
21use nexus_slab::Full;
22use nexus_slab::shared::{Slot, SlotCell};
23use nexus_slab::{bounded, unbounded};
24
25// Re-export concrete slab types so downstream crates (nexus-rt) can name
26// them in type defaults without adding nexus-slab as a direct dependency.
27pub use bounded::Slab as BoundedSlab;
28pub use unbounded::Slab as UnboundedSlab;
29
30// =============================================================================
31// Traits
32// =============================================================================
33
34/// Base trait for slab storage — allocation, deallocation, and value extraction.
35///
36/// # Allocation
37///
38/// [`alloc`](Self::alloc) always returns a valid slot:
39///
40/// - **Unbounded slabs** grow as needed — allocation never fails.
41/// - **Bounded slabs** panic if capacity is exceeded. This is intentional:
42/// running out of pre-allocated capacity is a capacity planning error,
43/// equivalent to OOM. The system should crash loudly rather than silently
44/// drop work.
45///
46/// For fallible allocation on bounded slabs, use [`BoundedStore::try_alloc`].
47///
48/// # Safety
49///
50/// Implementors must uphold:
51///
52/// - `free` must drop the value and return the slot to the freelist.
53/// - `take` must move the value out and return the slot to the freelist.
54/// - The slot must have been allocated from `self`.
55pub unsafe trait SlabStore {
56 /// The type stored in each slot.
57 type Item;
58
59 /// Allocates a slot with the given value.
60 ///
61 /// # Panics
62 ///
63 /// Panics if the store is at capacity (bounded slabs only). This is a
64 /// capacity planning error — size your slabs for peak load.
65 fn alloc(&self, value: Self::Item) -> Slot<Self::Item>;
66
67 /// Drops the value and returns the slot to the freelist.
68 ///
69 /// # Safety
70 ///
71 /// - `slot` must have been allocated from this store.
72 /// - No references to the slot's value may exist.
73 unsafe fn free(&self, slot: Slot<Self::Item>);
74
75 /// Moves the value out and returns the slot to the freelist.
76 ///
77 /// # Safety
78 ///
79 /// - `slot` must have been allocated from this store.
80 /// - No references to the slot's value may exist.
81 unsafe fn take(&self, slot: Slot<Self::Item>) -> Self::Item;
82
83 /// Returns a slot to the freelist by raw pointer.
84 ///
85 /// Does NOT drop the value — caller must have already dropped or moved it.
86 ///
87 /// # Safety
88 ///
89 /// - `ptr` must point to a slot within this store.
90 /// - The value must already be dropped or moved out.
91 unsafe fn free_ptr(&self, ptr: *mut SlotCell<Self::Item>);
92}
93
94/// Bounded (fixed-capacity) storage — provides fallible allocation.
95///
96/// Use [`try_alloc`](Self::try_alloc) when you need graceful error handling.
97/// For the common case where capacity exhaustion is a fatal error, use
98/// [`SlabStore::alloc`] directly (it panics on bounded-full).
99pub trait BoundedStore: SlabStore {
100 /// Attempts to allocate a slot with the given value.
101 ///
102 /// Returns `Err(Full(value))` if storage is at capacity.
103 fn try_alloc(&self, value: Self::Item) -> Result<Slot<Self::Item>, Full<Self::Item>>;
104}
105
106// =============================================================================
107// Impls for bounded::Slab
108// =============================================================================
109
110// SAFETY: bounded::Slab::free drops the value and returns to freelist.
111// bounded::Slab::take moves value out and returns to freelist.
112// bounded::Slab::free_ptr returns slot to freelist without dropping.
113unsafe impl<T> SlabStore for bounded::Slab<T> {
114 type Item = T;
115
116 #[inline]
117 fn alloc(&self, value: T) -> Slot<T> {
118 self.try_alloc(value).unwrap_or_else(|full| {
119 // Drop the value inside Full, then panic.
120 drop(full);
121 panic!(
122 "bounded slab: capacity exceeded (type: {})",
123 std::any::type_name::<T>(),
124 );
125 })
126 }
127
128 #[inline]
129 unsafe fn free(&self, slot: Slot<T>) {
130 // SAFETY: caller guarantees slot was allocated from this slab
131 unsafe { bounded::Slab::free(self, slot) }
132 }
133
134 #[inline]
135 unsafe fn take(&self, slot: Slot<T>) -> T {
136 // SAFETY: caller guarantees slot was allocated from this slab
137 unsafe { bounded::Slab::take(self, slot) }
138 }
139
140 #[inline]
141 unsafe fn free_ptr(&self, ptr: *mut SlotCell<T>) {
142 // SAFETY: caller guarantees ptr is within this slab
143 unsafe { bounded::Slab::free_ptr(self, ptr) }
144 }
145}
146
147impl<T> BoundedStore for bounded::Slab<T> {
148 #[inline]
149 fn try_alloc(&self, value: T) -> Result<Slot<T>, Full<T>> {
150 bounded::Slab::try_alloc(self, value)
151 }
152}
153
154// =============================================================================
155// Impls for unbounded::Slab
156// =============================================================================
157
158// SAFETY: unbounded::Slab::free drops the value and returns to freelist.
159// unbounded::Slab::take moves value out and returns to freelist.
160// unbounded::Slab::free_ptr returns slot to freelist without dropping.
161unsafe impl<T> SlabStore for unbounded::Slab<T> {
162 type Item = T;
163
164 #[inline]
165 fn alloc(&self, value: T) -> Slot<T> {
166 unbounded::Slab::alloc(self, value)
167 }
168
169 #[inline]
170 unsafe fn free(&self, slot: Slot<T>) {
171 // SAFETY: caller guarantees slot was allocated from this slab
172 unsafe { unbounded::Slab::free(self, slot) }
173 }
174
175 #[inline]
176 unsafe fn take(&self, slot: Slot<T>) -> T {
177 // SAFETY: caller guarantees slot was allocated from this slab
178 unsafe { unbounded::Slab::take(self, slot) }
179 }
180
181 #[inline]
182 unsafe fn free_ptr(&self, ptr: *mut SlotCell<T>) {
183 // SAFETY: caller guarantees ptr is within this slab
184 unsafe { unbounded::Slab::free_ptr(self, ptr) }
185 }
186}
187
188#[cfg(test)]
189mod tests {
190 use super::*;
191
192 #[test]
193 fn bounded_store_roundtrip() {
194 let slab = bounded::Slab::<u64>::with_capacity(16);
195 let slot = SlabStore::alloc(&slab, 42);
196 assert_eq!(*slot, 42);
197 // SAFETY: slot was allocated from this slab
198 let val = unsafe { SlabStore::take(&slab, slot) };
199 assert_eq!(val, 42);
200 }
201
202 #[test]
203 fn unbounded_store_roundtrip() {
204 let slab = unbounded::Slab::<u64>::with_chunk_capacity(16);
205 let slot = SlabStore::alloc(&slab, 99);
206 assert_eq!(*slot, 99);
207 // SAFETY: slot was allocated from this slab
208 unsafe { SlabStore::free(&slab, slot) };
209 }
210
211 #[test]
212 fn bounded_try_alloc_graceful() {
213 let slab = bounded::Slab::<u64>::with_capacity(1);
214 let _s1 = BoundedStore::try_alloc(&slab, 1).unwrap();
215 let err = BoundedStore::try_alloc(&slab, 2).unwrap_err();
216 assert_eq!(err.into_inner(), 2);
217 }
218
219 #[test]
220 #[should_panic(expected = "capacity exceeded")]
221 fn bounded_alloc_panics_on_full() {
222 let slab = bounded::Slab::<u64>::with_capacity(1);
223 let _s1 = SlabStore::alloc(&slab, 1);
224 let _s2 = SlabStore::alloc(&slab, 2); // panics
225 }
226}