stack_arena/
buffer_arena.rs

1use std::{alloc::Layout, ptr::NonNull};
2
3use crate::{AllocError, Allocator, chunk::Chunk};
4
5/// A buffer-based arena allocator that manages a single memory chunk.
6///
7/// `BufferArena` is a low-level component used by [`StackArena`](crate::StackArena) to manage individual
8/// memory chunks. It provides efficient allocation within a single contiguous memory
9/// region and implements the [`Allocator`](crate::Allocator) trait for memory management operations.
10///
11/// Each `BufferArena` contains a memory chunk and a pointer to the current position
12/// in the buffer, allowing for efficient bump allocation (also known as linear allocation).
13///
14/// # Memory Management
15///
16/// - Uses a simple bump allocation strategy for maximum performance
17/// - Allocates memory sequentially from a single contiguous chunk
18/// - Follows LIFO (Last-In-First-Out) deallocation pattern
19/// - Provides methods to check remaining capacity and allocation suitability
20///
21/// # Performance
22///
23/// `BufferArena` is designed for maximum allocation speed:
24///
25/// - Allocation is a simple pointer increment operation
26/// - No fragmentation within a single chunk
27/// - Minimal overhead per allocation
28/// - Efficient memory reuse with the LIFO pattern
29#[derive(Debug)]
30pub struct BufferArena {
31    /// The underlying buffer of bytes
32    pub(crate) store: Chunk,
33    /// The current offset within the buffer
34    pub(crate) ptr: NonNull<u8>,
35}
36
37impl Default for BufferArena {
38    #[inline(always)]
39    fn default() -> Self {
40        let store = Chunk::default();
41        let ptr = store.base;
42        Self { store, ptr }
43    }
44}
45
46impl BufferArena {
47    /// Creates a new buffer arena with the specified capacity.
48    ///
49    /// The capacity will be rounded up to the next power of two if it's not
50    /// already a power of two.
51    ///
52    /// # Parameters
53    ///
54    /// * `capacity` - The capacity of the buffer in bytes
55    ///
56    /// # Examples
57    ///
58    /// ```
59    /// use stack_arena::BufferArena;
60    ///
61    /// let arena = BufferArena::with_capacity(1024);
62    /// ```
63    #[inline]
64    pub fn with_capacity(capacity: usize) -> Self {
65        let store = Chunk::new(capacity);
66        let ptr = store.base;
67        Self { store, ptr }
68    }
69
70    /// Returns the number of bytes currently used in the buffer.
71    ///
72    /// # Examples
73    ///
74    /// ```
75    /// use stack_arena::BufferArena;
76    ///
77    /// let arena = BufferArena::with_capacity(1024);
78    /// assert_eq!(arena.used(), 0);
79    /// ```
80    #[inline]
81    pub fn used(&self) -> usize {
82        unsafe { self.ptr.byte_offset_from_unsigned(self.store.base) }
83    }
84
85    /// Returns the number of bytes remaining in the buffer.
86    ///
87    /// # Examples
88    ///
89    /// ```
90    /// use stack_arena::BufferArena;
91    ///
92    /// let arena = BufferArena::with_capacity(1024);
93    /// assert_eq!(arena.remaining(), 1024);
94    /// ```
95    #[inline]
96    pub fn remaining(&self) -> usize {
97        unsafe { self.store.limit.byte_offset_from_unsigned(self.ptr) }
98    }
99
100    /// Checks if the given pointer is contained within this buffer.
101    ///
102    /// # Parameters
103    ///
104    /// * `ptr` - The pointer to check
105    ///
106    /// # Returns
107    ///
108    /// `true` if the pointer is within the buffer's memory range, `false` otherwise.
109    #[inline]
110    pub fn contains(&self, ptr: NonNull<u8>) -> bool {
111        self.store.contains(ptr)
112    }
113
114    /// Returns `true` if the buffer has enough space to allocate the given layout.
115    ///
116    /// This method checks if there is enough space in the buffer to allocate
117    /// memory with the specified layout, taking alignment requirements into account.
118    ///
119    /// # Parameters
120    ///
121    /// * `layout` - The memory layout to check
122    ///
123    /// # Returns
124    ///
125    /// `true` if there is enough space, `false` otherwise.
126    #[inline]
127    pub fn sufficient_for(&self, layout: Layout) -> bool {
128        self.ptr.align_offset(layout.align()) + layout.size() <= self.remaining()
129    }
130}
131
132/// Implements conversion from `Chunk` to `BufferArena`.
133///
134/// This allows creating a `BufferArena` from an existing `Chunk`.
135impl From<Chunk> for BufferArena {
136    #[inline]
137    fn from(value: Chunk) -> Self {
138        let ptr = value.base;
139        Self { store: value, ptr }
140    }
141}
142
143/// Implementation of the `Allocator` trait for `BufferArena`.
144///
145/// This implementation provides efficient memory allocation within a single
146/// contiguous memory chunk using a bump allocation strategy. It follows the
147/// LIFO (Last-In-First-Out) pattern for memory management.
148impl Allocator for BufferArena {
149    /// Allocates memory with the specified layout.
150    ///
151    /// This method allocates memory from the current position in the buffer,
152    /// ensuring proper alignment. It advances the buffer pointer by the size
153    /// of the allocation.
154    ///
155    /// # Safety
156    ///
157    /// This method is unsafe because it returns a raw pointer that must be used
158    /// carefully to avoid memory safety issues.
159    #[inline]
160    unsafe fn allocate(&mut self, layout: Layout) -> Result<NonNull<[u8]>, crate::AllocError> {
161        debug_assert!(layout.align() > 0);
162        debug_assert!(layout.align() <= 4096);
163        if self.sufficient_for(layout) {
164            let ptr = unsafe { self.ptr.add(self.ptr.align_offset(layout.align())) };
165            self.ptr = unsafe { ptr.add(layout.size()) };
166            Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
167        } else {
168            Err(AllocError::CapacityExceeded {
169                requested: layout.size(),
170                remaining: self.remaining(),
171            })
172        }
173    }
174
175    /// Deallocates memory previously allocated by this allocator.
176    ///
177    /// This method resets the buffer pointer to the start of the deallocated
178    /// memory, effectively "freeing" all memory allocated after it as well.
179    /// This implements the LIFO deallocation pattern.
180    ///
181    /// # Safety
182    ///
183    /// This method is unsafe because it must be called with a pointer returned
184    /// by `allocate` and the same layout that was used to allocate it.
185    #[inline]
186    unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
187        debug_assert!(self.store.contains(ptr.cast()));
188        debug_assert!(unsafe { ptr.add(layout.size()) } <= self.store.limit);
189        self.ptr = ptr;
190    }
191
192    /// Grows a previously allocated memory block to a larger size.
193    ///
194    /// This method extends the size of an existing allocation by advancing
195    /// the buffer pointer. It **only supports growing the last allocation**
196    /// (following LIFO pattern). Attempting to grow any other allocation
197    /// will trigger a debug assertion failure.
198    ///
199    /// # Safety
200    ///
201    /// This method is unsafe because it must be called with a pointer returned
202    /// by `allocate` and the same layout that was used to allocate it.
203    #[inline]
204    unsafe fn grow(
205        &mut self,
206        ptr: NonNull<u8>,
207        old_layout: Layout,
208        new_layout: Layout,
209    ) -> Result<NonNull<[u8]>, crate::AllocError> {
210        debug_assert_eq!(
211            unsafe { ptr.add(old_layout.size()) },
212            self.ptr,
213            "grow is only supported for the last allocation: {ptr:?} old_:{old_layout:?}, new_{new_layout:?}"
214        );
215        debug_assert_eq!(old_layout.align(), new_layout.align());
216        match old_layout.size().cmp(&new_layout.size()) {
217            std::cmp::Ordering::Less => {
218                if unsafe { ptr.add(new_layout.size()) } <= self.store.limit {
219                    self.ptr = unsafe { ptr.add(new_layout.size()) };
220                    return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
221                } else {
222                    Err(AllocError::CapacityExceeded {
223                        requested: new_layout.size() - old_layout.size(),
224                        remaining: self.remaining(),
225                    })
226                }
227            }
228            std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
229            std::cmp::Ordering::Greater => unreachable!("use shrink instead"),
230        }
231    }
232
233    /// Shrinks a previously allocated memory block to a smaller size.
234    ///
235    /// This method reduces the size of an existing allocation by moving
236    /// the buffer pointer back. It assumes the memory block being shrunk is the
237    /// most recently allocated block (following LIFO pattern).
238    ///
239    /// # Safety
240    ///
241    /// This method is unsafe because it must be called with a pointer returned
242    /// by `allocate` and the same layout that was used to allocate it.
243    #[inline]
244    unsafe fn shrink(
245        &mut self,
246        ptr: NonNull<u8>,
247        old_layout: Layout,
248        new_layout: Layout,
249    ) -> Result<NonNull<[u8]>, crate::AllocError> {
250        debug_assert_eq!(unsafe { ptr.add(old_layout.size()) }, self.ptr);
251        debug_assert_eq!(old_layout.align(), new_layout.align());
252        match old_layout.size().cmp(&new_layout.size()) {
253            std::cmp::Ordering::Greater => {
254                self.ptr = unsafe { ptr.add(new_layout.size()) };
255                return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
256            }
257            std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
258            std::cmp::Ordering::Less => unreachable!("use grow instead"),
259        }
260    }
261}
262
263/// Implements conversion from `BufferArena` to `Chunk`.
264///
265/// This allows extracting the underlying chunk from a `BufferArena`.
266impl Into<Chunk> for BufferArena {
267    /// Converts the buffer arena into its underlying chunk.
268    #[inline]
269    fn into(self) -> Chunk {
270        self.store
271    }
272}
273
274#[cfg(test)]
275mod tests {
276    use super::*;
277
278    #[test]
279    fn test_buffer_arena_new_and_capacity() {
280        let cap = 63;
281        let arena = BufferArena::with_capacity(cap);
282        assert_eq!(arena.used(), 0);
283        assert_eq!(arena.remaining(), 64);
284        assert_eq!(arena.used(), 0);
285    }
286
287    #[test]
288    fn test_chunk_conversion() {
289        let chunk = Chunk::new(128);
290        let arena: BufferArena = chunk.into();
291        assert_eq!(arena.used(), 0);
292        assert_eq!(arena.remaining(), 128);
293
294        let to_chunk: Chunk = arena.into();
295        assert_eq!(to_chunk.capacity(), 128);
296    }
297
298    #[test]
299    fn test_allocate_and_deallocate() {
300        let mut arena = BufferArena::with_capacity(32);
301        let layout = Layout::from_size_align(8, 1).unwrap();
302        let ptr = unsafe { arena.allocate(layout).unwrap() };
303        assert_eq!(ptr.len(), 8);
304        assert_eq!(arena.used(), 8);
305
306        // Deallocate should reset offset if LIFO
307        unsafe { arena.deallocate(ptr.cast(), layout) };
308        assert_eq!(arena.used(), 0);
309    }
310
311    #[test]
312    fn test_alignment() {
313        let mut arena = BufferArena::with_capacity(128);
314        unsafe { arena.ptr.write_bytes(0, arena.remaining()) };
315        let mut prev_end = arena.ptr;
316
317        for (i, align) in [1, 2, 4, 8, 16, 32, 4096].into_iter().rev().enumerate() {
318            let size = i + 1;
319            let layout = Layout::from_size_align(size, align).unwrap();
320            let ptr = unsafe { arena.allocate_zeroed(layout).unwrap() };
321            let addr = ptr.cast::<u8>().as_ptr() as usize;
322            // Check alignment
323            assert_eq!(addr % align, 0, "addr {ptr:?} not aligned to {align}");
324            // Write data and verify
325            let fill = size as u8;
326            unsafe { ptr.cast::<u8>().write_bytes(fill, layout.size()) };
327            let data = unsafe { ptr.as_ref() };
328            assert_eq!(data, vec![fill; size].as_slice());
329
330            // Ensure allocations do not overlap
331            assert!(ptr.cast() >= prev_end, "Allocation overlapped previous");
332            prev_end = unsafe { ptr.cast().add(layout.size()) };
333        }
334        assert_eq!(arena.used(), 79);
335        let written =
336            unsafe { std::slice::from_raw_parts(arena.store.base.as_ptr(), arena.used()) };
337        assert_eq!(
338            written,
339            [
340                1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
341                0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 0, 0, 0, 0, 0,
342                4, 4, 4, 4, 5, 5, 5, 5, 5, 0, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7
343            ]
344            .as_ref()
345        );
346        unsafe { arena.deallocate(arena.store.base, Layout::from_size_align_unchecked(8, 1)) };
347        assert_eq!(arena.used(), 0);
348    }
349
350    #[test]
351    fn test_allocate_full_arena() {
352        let mut arena = BufferArena::with_capacity(16);
353        let layout = Layout::from_size_align(16, 1).unwrap();
354        let ptr = unsafe { arena.allocate(layout).unwrap() };
355        assert_eq!(ptr.len(), 16);
356        assert_eq!(arena.used(), 16);
357
358        // Next allocation should fail
359        let layout2 = Layout::from_size_align(1, 1).unwrap();
360        assert!(unsafe { arena.allocate(layout2) }.is_err());
361    }
362
363    #[test]
364    fn test_grow_allocation() {
365        let mut arena = BufferArena::with_capacity(32);
366        let layout = Layout::from_size_align(8, 1).unwrap();
367        let ptr = unsafe { arena.allocate(layout).unwrap() };
368
369        let new_layout = Layout::from_size_align(16, 1).unwrap();
370        let grown = unsafe { arena.grow(ptr.cast(), layout, new_layout).unwrap() };
371        assert_eq!(grown.len(), 16);
372        assert_eq!(arena.used(), 16);
373    }
374
375    #[test]
376    fn test_shrink_allocation() {
377        let mut arena = BufferArena::with_capacity(32);
378        let layout = Layout::from_size_align(16, 1).unwrap();
379        let ptr = unsafe { arena.allocate(layout).unwrap() };
380
381        let new_layout = Layout::from_size_align(8, 1).unwrap();
382        let shrunk = unsafe { arena.shrink(ptr.cast(), layout, new_layout).unwrap() };
383        assert_eq!(shrunk.len(), 8);
384        // The used offset should be reduced accordingly
385        assert_eq!(arena.used(), 8);
386    }
387
388    #[test]
389    fn test_multiple_allocate_and_deallocate() {
390        use std::alloc::Layout;
391
392        let mut arena = BufferArena::with_capacity(64);
393        let layout = Layout::from_size_align(8, 1).unwrap();
394
395        // Allocate and deallocate 5 times in a row
396        for _ in 0..5 {
397            let ptr = unsafe { arena.allocate(layout).unwrap() };
398            assert_eq!(ptr.len(), 8);
399            assert_eq!(arena.used(), 8);
400            unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
401            assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
402
403            // Deallocate should reset offset if LIFO
404            unsafe { arena.deallocate(ptr.cast(), layout) };
405            assert_eq!(arena.used(), 0);
406        }
407
408        // Allocate several, then deallocate in reverse order
409        let mut ptrs = Vec::new();
410        for _ in 0..4 {
411            let ptr = unsafe { arena.allocate(layout).unwrap() };
412            unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
413            assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
414            ptrs.push(ptr);
415        }
416        assert_eq!(arena.used(), 32);
417
418        for ptr in ptrs.into_iter().rev() {
419            unsafe { arena.deallocate(ptr.cast(), layout) };
420        }
421        assert_eq!(arena.used(), 0);
422    }
423
424    #[test]
425    fn test_multi_alloc() {
426        // Create a BufferArena with a very small chunk size
427        let mut arena = BufferArena::with_capacity(16);
428
429        // First allocation - should fit in the chunk
430        let layout = Layout::from_size_align(8, 8).unwrap();
431        let ptr = unsafe { arena.allocate(layout) }.unwrap();
432        unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
433
434        // Verify data
435        let data1 = unsafe { ptr.as_ref() };
436        assert_eq!(data1, [0xAA; 8].as_slice());
437
438        // Check remaining space
439        let remaining = arena.remaining();
440        assert_eq!(remaining, 8);
441
442        // Try to grow the allocation
443        // If there's enough space, grow the allocation
444        let new_layout = Layout::from_size_align(layout.size() + 4, layout.align()).unwrap();
445        let grown_ptr = unsafe { arena.grow(ptr.cast(), layout, new_layout) }.unwrap();
446
447        // Write to the newly grown portion
448        unsafe {
449            grown_ptr
450                .cast::<u8>()
451                .add(layout.size())
452                .write_bytes(0xBB, 4)
453        };
454
455        assert_eq!(arena.remaining(), 4);
456
457        // Verify the grown data
458        let grown_data = unsafe {
459            std::slice::from_raw_parts(grown_ptr.as_ptr() as *const u8, new_layout.size())
460        };
461        let mut expected = vec![0xAA; layout.size()];
462        expected.extend_from_slice(&[0xBB; 4]);
463        assert_eq!(grown_data, expected.as_slice());
464
465        let layout = unsafe { Layout::from_size_align_unchecked(4, 4) };
466        let ptr = unsafe { arena.allocate(layout).unwrap() };
467        assert_eq!(ptr.len(), layout.size());
468        assert_eq!(arena.remaining(), 0);
469    }
470}