Skip to main content

laminar_core/alloc/
scratch.rs

1//! Thread-local scratch buffer for temporary allocations.
2//!
3//! Provides fast, allocation-free temporary storage for hot path code.
4//! Each thread gets its own buffer that can be reset between events.
5
6use std::cell::UnsafeCell;
7
8/// Default scratch buffer size (64KB).
9pub const DEFAULT_SCRATCH_SIZE: usize = 64 * 1024;
10
11/// Thread-local scratch buffer for temporary allocations.
12///
13/// Provides fast, allocation-free temporary storage that can be used
14/// in Ring 0 hot path code. The buffer is automatically reset after
15/// each event is processed.
16///
17/// # Usage Pattern
18///
19/// ```rust,ignore
20/// use laminar_core::alloc::ScratchBuffer;
21///
22/// fn process_event(event: &Event) {
23///     // Allocate temporary space from scratch buffer
24///     let temp = ScratchBuffer::alloc(256);
25///     temp.copy_from_slice(&event.data[..256]);
26///
27///     // Use temp...
28///
29///     // Reset at end of event processing
30///     ScratchBuffer::reset();
31/// }
32/// ```
33///
34/// # Thread Safety
35///
36/// Each thread has its own scratch buffer. The buffer is NOT thread-safe
37/// and should only be accessed from a single thread.
38///
39/// # Panics
40///
41/// Panics if allocation request exceeds buffer capacity.
42pub struct ScratchBuffer<const SIZE: usize = DEFAULT_SCRATCH_SIZE> {
43    /// The underlying buffer
44    buffer: [u8; SIZE],
45    /// Current allocation position
46    position: usize,
47}
48
49impl<const SIZE: usize> ScratchBuffer<SIZE> {
50    /// Create a new scratch buffer.
51    #[must_use]
52    pub const fn new() -> Self {
53        Self {
54            buffer: [0; SIZE],
55            position: 0,
56        }
57    }
58
59    /// Get the total capacity of the scratch buffer.
60    #[inline]
61    #[must_use]
62    pub const fn capacity(&self) -> usize {
63        SIZE
64    }
65
66    /// Get the number of bytes remaining.
67    #[inline]
68    #[must_use]
69    pub fn remaining(&self) -> usize {
70        SIZE - self.position
71    }
72
73    /// Get the number of bytes used.
74    #[inline]
75    #[must_use]
76    pub fn used(&self) -> usize {
77        self.position
78    }
79
80    /// Reset the scratch buffer for reuse.
81    ///
82    /// Does NOT zero the memory - just resets the position.
83    #[inline]
84    pub fn reset(&mut self) {
85        self.position = 0;
86    }
87
88    /// Allocate a slice from the scratch buffer.
89    ///
90    /// # Arguments
91    ///
92    /// * `size` - Number of bytes to allocate
93    ///
94    /// # Returns
95    ///
96    /// Mutable slice of the requested size.
97    ///
98    /// # Panics
99    ///
100    /// Panics if there isn't enough space remaining.
101    #[inline]
102    pub fn alloc_slice(&mut self, size: usize) -> &mut [u8] {
103        let start = self.position;
104        let end = start + size;
105
106        assert!(
107            end <= SIZE,
108            "Scratch buffer overflow: requested {size} bytes, only {} remaining",
109            self.remaining()
110        );
111
112        self.position = end;
113        &mut self.buffer[start..end]
114    }
115
116    /// Try to allocate a slice, returning None if not enough space.
117    #[inline]
118    pub fn try_alloc_slice(&mut self, size: usize) -> Option<&mut [u8]> {
119        let start = self.position;
120        let end = start + size;
121
122        if end > SIZE {
123            return None;
124        }
125
126        self.position = end;
127        Some(&mut self.buffer[start..end])
128    }
129
130    /// Allocate space for a value and return a mutable reference.
131    ///
132    /// The space is properly aligned for the type.
133    ///
134    /// # Panics
135    ///
136    /// Panics if there isn't enough space.
137    #[inline]
138    pub fn alloc<T: Copy + Default>(&mut self) -> &mut T {
139        let align = std::mem::align_of::<T>();
140        let size = std::mem::size_of::<T>();
141
142        // Align position
143        let aligned_pos = (self.position + align - 1) & !(align - 1);
144        let end = aligned_pos + size;
145
146        assert!(
147            end <= SIZE,
148            "Scratch buffer overflow: requested {} bytes (aligned), only {} remaining",
149            size,
150            SIZE - aligned_pos
151        );
152
153        self.position = end;
154
155        // SAFETY: We've ensured the memory is properly aligned and within bounds.
156        // The memory is initialized to default.
157        #[allow(unsafe_code)]
158        unsafe {
159            let ptr = self.buffer.as_mut_ptr().add(aligned_pos).cast::<T>();
160            ptr.write(T::default());
161            &mut *ptr
162        }
163    }
164
165    /// Allocate space for a value with initialization.
166    ///
167    /// # Panics
168    ///
169    /// Panics if there isn't enough space remaining in the buffer.
170    #[inline]
171    pub fn alloc_with<T: Copy>(&mut self, value: T) -> &mut T {
172        let align = std::mem::align_of::<T>();
173        let size = std::mem::size_of::<T>();
174
175        // Align position
176        let aligned_pos = (self.position + align - 1) & !(align - 1);
177        let end = aligned_pos + size;
178
179        assert!(
180            end <= SIZE,
181            "Scratch buffer overflow: requested {} bytes (aligned), only {} remaining",
182            size,
183            SIZE - aligned_pos
184        );
185
186        self.position = end;
187
188        // SAFETY: We've ensured the memory is properly aligned and within bounds.
189        #[allow(unsafe_code)]
190        unsafe {
191            let ptr = self.buffer.as_mut_ptr().add(aligned_pos).cast::<T>();
192            ptr.write(value);
193            &mut *ptr
194        }
195    }
196}
197
198impl<const SIZE: usize> Default for ScratchBuffer<SIZE> {
199    fn default() -> Self {
200        Self::new()
201    }
202}
203
204// Thread-local scratch buffer instance
205thread_local! {
206    static THREAD_SCRATCH: UnsafeCell<ScratchBuffer<DEFAULT_SCRATCH_SIZE>> =
207        const { UnsafeCell::new(ScratchBuffer::new()) };
208}
209
210/// Static methods for accessing thread-local scratch buffer.
211impl ScratchBuffer<DEFAULT_SCRATCH_SIZE> {
212    /// Allocate from the thread-local scratch buffer.
213    ///
214    /// # Arguments
215    ///
216    /// * `size` - Number of bytes to allocate
217    ///
218    /// # Returns
219    ///
220    /// Mutable slice that is valid until `reset()` is called.
221    ///
222    /// # Panics
223    ///
224    /// Panics if the requested size exceeds remaining capacity.
225    ///
226    /// # Safety Note
227    ///
228    /// The returned reference is only valid until the next `reset()` call.
229    /// It's the caller's responsibility to ensure the slice is not used
230    /// after reset.
231    #[inline]
232    #[must_use]
233    pub fn thread_local_alloc(size: usize) -> &'static mut [u8] {
234        THREAD_SCRATCH.with(|scratch| {
235            // SAFETY: We're the only accessor on this thread, and the
236            // returned reference is valid until reset() is called.
237            #[allow(unsafe_code)]
238            unsafe {
239                (*scratch.get()).alloc_slice(size)
240            }
241        })
242    }
243
244    /// Reset the thread-local scratch buffer.
245    ///
246    /// Call this at the end of each event to reclaim space.
247    #[inline]
248    pub fn thread_local_reset() {
249        THREAD_SCRATCH.with(|scratch| {
250            // SAFETY: We're the only accessor on this thread.
251            #[allow(unsafe_code)]
252            unsafe {
253                (*scratch.get()).reset();
254            }
255        });
256    }
257
258    /// Get remaining capacity in thread-local buffer.
259    #[inline]
260    #[must_use]
261    pub fn thread_local_remaining() -> usize {
262        THREAD_SCRATCH.with(|scratch| {
263            // SAFETY: We're only reading.
264            #[allow(unsafe_code)]
265            unsafe {
266                (*scratch.get()).remaining()
267            }
268        })
269    }
270
271    /// Get used bytes in thread-local buffer.
272    #[inline]
273    #[must_use]
274    pub fn thread_local_used() -> usize {
275        THREAD_SCRATCH.with(|scratch| {
276            // SAFETY: We're only reading.
277            #[allow(unsafe_code)]
278            unsafe {
279                (*scratch.get()).used()
280            }
281        })
282    }
283}
284
285#[cfg(test)]
286mod tests {
287    use super::*;
288
289    #[test]
290    fn test_new_buffer() {
291        let buf: ScratchBuffer<1024> = ScratchBuffer::new();
292        assert_eq!(buf.capacity(), 1024);
293        assert_eq!(buf.remaining(), 1024);
294        assert_eq!(buf.used(), 0);
295    }
296
297    #[test]
298    fn test_alloc_slice() {
299        let mut buf: ScratchBuffer<1024> = ScratchBuffer::new();
300
301        let slice1 = buf.alloc_slice(100);
302        assert_eq!(slice1.len(), 100);
303        assert_eq!(buf.used(), 100);
304        assert_eq!(buf.remaining(), 924);
305
306        let slice2 = buf.alloc_slice(200);
307        assert_eq!(slice2.len(), 200);
308        assert_eq!(buf.used(), 300);
309    }
310
311    #[test]
312    fn test_try_alloc_slice() {
313        let mut buf: ScratchBuffer<100> = ScratchBuffer::new();
314
315        assert!(buf.try_alloc_slice(50).is_some());
316        assert!(buf.try_alloc_slice(50).is_some());
317        assert!(buf.try_alloc_slice(1).is_none()); // No space left
318    }
319
320    #[test]
321    fn test_reset() {
322        let mut buf: ScratchBuffer<1024> = ScratchBuffer::new();
323
324        buf.alloc_slice(500);
325        assert_eq!(buf.used(), 500);
326
327        buf.reset();
328        assert_eq!(buf.used(), 0);
329        assert_eq!(buf.remaining(), 1024);
330    }
331
332    #[test]
333    fn test_alloc_typed() {
334        let mut buf: ScratchBuffer<1024> = ScratchBuffer::new();
335
336        let val: &mut u64 = buf.alloc();
337        *val = 42;
338        assert_eq!(*val, 42);
339
340        let val2: &mut u32 = buf.alloc_with(100);
341        assert_eq!(*val2, 100);
342    }
343
344    #[test]
345    fn test_alignment() {
346        let mut buf: ScratchBuffer<1024> = ScratchBuffer::new();
347
348        // Allocate 1 byte to misalign
349        buf.alloc_slice(1);
350
351        // u64 requires 8-byte alignment
352        let val: &mut u64 = buf.alloc();
353        let ptr = std::ptr::from_ref::<u64>(val) as usize;
354        assert_eq!(ptr % 8, 0, "u64 should be 8-byte aligned");
355    }
356
357    #[test]
358    #[should_panic(expected = "Scratch buffer overflow")]
359    fn test_overflow_panic() {
360        let mut buf: ScratchBuffer<100> = ScratchBuffer::new();
361        buf.alloc_slice(101); // Should panic
362    }
363
364    #[test]
365    fn test_thread_local() {
366        // Reset first
367        ScratchBuffer::thread_local_reset();
368
369        assert_eq!(ScratchBuffer::thread_local_used(), 0);
370
371        let slice = ScratchBuffer::thread_local_alloc(128);
372        slice[0] = 42;
373        assert_eq!(ScratchBuffer::thread_local_used(), 128);
374
375        ScratchBuffer::thread_local_reset();
376        assert_eq!(ScratchBuffer::thread_local_used(), 0);
377    }
378
379    #[test]
380    fn test_multiple_allocs() {
381        let mut buf: ScratchBuffer<1024> = ScratchBuffer::new();
382
383        for i in 0..10 {
384            let slice = buf.alloc_slice(50);
385            slice[0] = u8::try_from(i).unwrap();
386        }
387
388        assert_eq!(buf.used(), 500);
389    }
390
391    #[test]
392    fn test_default() {
393        let buf: ScratchBuffer<256> = ScratchBuffer::default();
394        assert_eq!(buf.capacity(), 256);
395    }
396}