stack_arena/
buffer_arena.rs

1use std::{alloc::Layout, ptr::NonNull};
2
3use crate::{AllocError, Allocator, chunk::Chunk};
4
5/// A buffer-based arena allocator that manages a single memory chunk.
6///
7/// `BufferArena` is a low-level component used by [`StackArena`](crate::StackArena) to manage individual
8/// memory chunks. It provides efficient allocation within a single contiguous memory
9/// region and implements the [`Allocator`](crate::Allocator) trait for memory management operations.
10///
11/// Each `BufferArena` contains a memory chunk and a pointer to the current position
12/// in the buffer, allowing for efficient bump allocation (also known as linear allocation).
13///
14/// # Memory Management
15///
16/// - Uses a simple bump allocation strategy for maximum performance
17/// - Allocates memory sequentially from a single contiguous chunk
18/// - Follows LIFO (Last-In-First-Out) deallocation pattern
19/// - Provides methods to check remaining capacity and allocation suitability
20///
21/// # Performance
22///
23/// `BufferArena` is designed for maximum allocation speed:
24///
25/// - Allocation is a simple pointer increment operation
26/// - No fragmentation within a single chunk
27/// - Minimal overhead per allocation
28/// - Efficient memory reuse with the LIFO pattern
29#[derive(Debug)]
30pub struct BufferArena {
31    /// The underlying buffer of bytes
32    pub(crate) store: Chunk,
33    /// The current offset within the buffer
34    pub(crate) ptr: NonNull<u8>,
35}
36
37impl Default for BufferArena {
38    #[inline(always)]
39    fn default() -> Self {
40        let store = Chunk::default();
41        let ptr = store.base();
42        Self { store, ptr }
43    }
44}
45
46impl BufferArena {
47    /// Creates a new buffer arena with the specified capacity.
48    ///
49    /// The capacity will be rounded up to the next power of two if it's not
50    /// already a power of two.
51    ///
52    /// # Parameters
53    ///
54    /// * `capacity` - The capacity of the buffer in bytes
55    ///
56    /// # Examples
57    ///
58    /// ```
59    /// use stack_arena::BufferArena;
60    ///
61    /// let arena = BufferArena::with_capacity(4096);
62    /// ```
63    #[inline]
64    pub fn with_capacity(capacity: usize) -> Self {
65        let store = Chunk::with_capacity(capacity);
66        let ptr = store.base();
67        Self { store, ptr }
68    }
69
70    /// Returns the number of bytes currently used in the buffer.
71    ///
72    /// # Examples
73    ///
74    /// ```
75    /// use stack_arena::BufferArena;
76    ///
77    /// let arena = BufferArena::with_capacity(4096);
78    /// assert_eq!(arena.used(), 0);
79    /// ```
80    #[inline]
81    pub fn used(&self) -> usize {
82        unsafe { self.ptr.byte_offset_from_unsigned(self.store.base()) }
83    }
84
85    /// Returns the number of bytes remaining in the buffer.
86    ///
87    /// # Examples
88    ///
89    /// ```
90    /// use stack_arena::BufferArena;
91    ///
92    /// let arena = BufferArena::with_capacity(4096);
93    /// assert_eq!(arena.remaining(), 4096);
94    /// ```
95    #[inline]
96    pub fn remaining(&self) -> usize {
97        unsafe { self.store.limit().byte_offset_from_unsigned(self.ptr) }
98    }
99
100    /// Checks if the given pointer is contained within this buffer.
101    ///
102    /// # Parameters
103    ///
104    /// * `ptr` - The pointer to check
105    ///
106    /// # Returns
107    ///
108    /// `true` if the pointer is within the buffer's memory range, `false` otherwise.
109    #[inline]
110    pub fn contains(&self, ptr: NonNull<u8>) -> bool {
111        self.store.contains(ptr)
112    }
113
114    /// Returns `true` if the buffer has enough space to allocate the given layout.
115    ///
116    /// This method checks if there is enough space in the buffer to allocate
117    /// memory with the specified layout, taking alignment requirements into account.
118    ///
119    /// # Parameters
120    ///
121    /// * `layout` - The memory layout to check
122    ///
123    /// # Returns
124    ///
125    /// `true` if there is enough space, `false` otherwise.
126    #[inline]
127    pub fn sufficient_for(&self, layout: Layout) -> bool {
128        self.ptr.align_offset(layout.align()) + layout.size() <= self.remaining()
129    }
130}
131
132/// Implements conversion from `Chunk` to `BufferArena`.
133///
134/// This allows creating a `BufferArena` from an existing `Chunk`.
135impl From<Chunk> for BufferArena {
136    #[inline]
137    fn from(value: Chunk) -> Self {
138        let ptr = value.base();
139        Self { store: value, ptr }
140    }
141}
142
143/// Implementation of the `Allocator` trait for `BufferArena`.
144///
145/// This implementation provides efficient memory allocation within a single
146/// contiguous memory chunk using a bump allocation strategy. It follows the
147/// LIFO (Last-In-First-Out) pattern for memory management.
148impl Allocator for BufferArena {
149    /// Allocates memory with the specified layout.
150    ///
151    /// This method allocates memory from the current position in the buffer,
152    /// ensuring proper alignment. It advances the buffer pointer by the size
153    /// of the allocation.
154    ///
155    /// # Safety
156    ///
157    /// This method is unsafe because it returns a raw pointer that must be used
158    /// carefully to avoid memory safety issues.
159    #[inline]
160    unsafe fn allocate(&mut self, layout: Layout) -> Result<NonNull<[u8]>, crate::AllocError> {
161        debug_assert!(layout.align() > 0);
162        debug_assert!(layout.align() <= 4096);
163        if self.sufficient_for(layout) {
164            let ptr = unsafe { self.ptr.add(self.ptr.align_offset(layout.align())) };
165            self.ptr = unsafe { ptr.add(layout.size()) };
166            Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
167        } else {
168            Err(AllocError::CapacityExceeded {
169                requested: layout.size(),
170                remaining: self.remaining(),
171            })
172        }
173    }
174
175    /// Deallocates memory previously allocated by this allocator.
176    ///
177    /// This method resets the buffer pointer to the start of the deallocated
178    /// memory, effectively "freeing" all memory allocated after it as well.
179    /// This implements the LIFO deallocation pattern.
180    ///
181    /// # Safety
182    ///
183    /// This method is unsafe because it must be called with a pointer returned
184    /// by `allocate` and the same layout that was used to allocate it.
185    #[inline]
186    unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
187        debug_assert!(self.store.contains(ptr.cast()));
188        debug_assert!(unsafe { ptr.add(layout.size()) } <= self.store.limit());
189        self.ptr = ptr;
190    }
191
192    /// Grows a previously allocated memory block to a larger size.
193    ///
194    /// This method extends the size of an existing allocation by advancing
195    /// the buffer pointer. It **only supports growing the last allocation**
196    /// (following LIFO pattern). Attempting to grow any other allocation
197    /// will trigger a debug assertion failure.
198    ///
199    /// # Safety
200    ///
201    /// This method is unsafe because it must be called with a pointer returned
202    /// by `allocate` and the same layout that was used to allocate it.
203    #[inline]
204    unsafe fn grow(
205        &mut self,
206        ptr: NonNull<u8>,
207        old_layout: Layout,
208        new_layout: Layout,
209    ) -> Result<NonNull<[u8]>, crate::AllocError> {
210        debug_assert_eq!(
211            unsafe { ptr.add(old_layout.size()) },
212            self.ptr,
213            "last allocation only"
214        );
215        debug_assert_eq!(old_layout.align(), new_layout.align());
216        match old_layout.size().cmp(&new_layout.size()) {
217            std::cmp::Ordering::Less => {
218                if unsafe { ptr.add(new_layout.size()) } <= self.store.limit() {
219                    self.ptr = unsafe { ptr.add(new_layout.size()) };
220                    return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
221                } else {
222                    Err(AllocError::CapacityExceeded {
223                        requested: new_layout.size() - old_layout.size(),
224                        remaining: self.remaining(),
225                    })
226                }
227            }
228            std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
229            std::cmp::Ordering::Greater => unreachable!("use shrink instead"),
230        }
231    }
232
233    /// Shrinks a previously allocated memory block to a smaller size.
234    ///
235    /// This method reduces the size of an existing allocation by moving
236    /// the buffer pointer back. It assumes the memory block being shrunk is the
237    /// most recently allocated block (following LIFO pattern).
238    ///
239    /// # Safety
240    ///
241    /// This method is unsafe because it must be called with a pointer returned
242    /// by `allocate` and the same layout that was used to allocate it.
243    #[inline]
244    unsafe fn shrink(
245        &mut self,
246        ptr: NonNull<u8>,
247        old_layout: Layout,
248        new_layout: Layout,
249    ) -> Result<NonNull<[u8]>, crate::AllocError> {
250        debug_assert_eq!(
251            unsafe { ptr.add(old_layout.size()) },
252            self.ptr,
253            "last allocation only"
254        );
255        debug_assert_eq!(old_layout.align(), new_layout.align());
256        match old_layout.size().cmp(&new_layout.size()) {
257            std::cmp::Ordering::Greater => {
258                self.ptr = unsafe { ptr.add(new_layout.size()) };
259                return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
260            }
261            std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
262            std::cmp::Ordering::Less => unreachable!("use grow instead"),
263        }
264    }
265}
266
267/// Implements conversion from `BufferArena` to `Chunk`.
268///
269/// This allows extracting the underlying chunk from a `BufferArena`.
270impl Into<Chunk> for BufferArena {
271    /// Converts the buffer arena into its underlying chunk.
272    #[inline]
273    fn into(self) -> Chunk {
274        self.store
275    }
276}
277
278#[cfg(test)]
279mod tests {
280    use super::*;
281
282    #[test]
283    fn test_buffer_arena_new_and_capacity() {
284        let cap = 4096;
285        let arena = BufferArena::with_capacity(cap);
286        assert_eq!(arena.used(), 0);
287        assert_eq!(arena.remaining(), cap);
288    }
289
290    #[test]
291    fn test_chunk_conversion() {
292        let chunk = Chunk::with_capacity(4096);
293        let arena: BufferArena = chunk.into();
294        assert_eq!(arena.used(), 0);
295        assert_eq!(arena.remaining(), 4096);
296
297        let to_chunk: Chunk = arena.into();
298        assert_eq!(to_chunk.capacity(), 4096);
299    }
300
301    #[test]
302    fn test_allocate_and_deallocate() {
303        let mut arena = BufferArena::with_capacity(4096);
304        let layout = Layout::from_size_align(8, 1).unwrap();
305        let ptr = unsafe { arena.allocate(layout).unwrap() };
306        assert_eq!(ptr.len(), 8);
307        assert_eq!(arena.used(), 8);
308
309        // Deallocate should reset offset if LIFO
310        unsafe { arena.deallocate(ptr.cast(), layout) };
311        assert_eq!(arena.used(), 0);
312    }
313
314    #[test]
315    fn test_alignment() {
316        let mut arena = BufferArena::with_capacity(4096);
317        unsafe { arena.ptr.write_bytes(0, arena.remaining()) };
318        let mut prev_end = arena.ptr;
319
320        for (i, align) in [1, 2, 4, 8, 16, 32, 4096].into_iter().rev().enumerate() {
321            let size = i + 1;
322            let layout = Layout::from_size_align(size, align).unwrap();
323            let ptr = unsafe { arena.allocate_zeroed(layout).unwrap() };
324            let addr = ptr.cast::<u8>().as_ptr() as usize;
325            // Check alignment
326            assert_eq!(addr % align, 0, "addr {ptr:?} not aligned to {align}");
327            // Write data and verify
328            let fill = size as u8;
329            unsafe { ptr.cast::<u8>().write_bytes(fill, layout.size()) };
330            let data = unsafe { ptr.as_ref() };
331            assert_eq!(data, vec![fill; size].as_slice());
332
333            // Ensure allocations do not overlap
334            assert!(ptr.cast() >= prev_end, "Allocation overlapped previous");
335            prev_end = unsafe { ptr.cast().add(layout.size()) };
336        }
337        assert_eq!(arena.used(), 79);
338        let written =
339            unsafe { std::slice::from_raw_parts(arena.store.base().as_ptr(), arena.used()) };
340        assert_eq!(
341            written,
342            [
343                1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
344                0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 0, 0, 0, 0, 0,
345                4, 4, 4, 4, 5, 5, 5, 5, 5, 0, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7
346            ]
347            .as_ref()
348        );
349        unsafe { arena.deallocate(arena.store.base(), Layout::from_size_align_unchecked(8, 1)) };
350        assert_eq!(arena.used(), 0);
351    }
352
353    #[test]
354    fn test_allocate_full_arena() {
355        let mut arena = BufferArena::with_capacity(4096);
356        let layout = Layout::from_size_align(4096, 1).unwrap();
357        let ptr = unsafe { arena.allocate(layout).unwrap() };
358        assert_eq!(ptr.len(), 4096);
359        assert_eq!(arena.used(), 4096);
360
361        // Next allocation should fail
362        let layout2 = Layout::from_size_align(1, 1).unwrap();
363        assert!(unsafe { arena.allocate(layout2) }.is_err());
364    }
365
366    #[test]
367    fn test_grow_allocation() {
368        let mut arena = BufferArena::with_capacity(4096);
369        let layout = Layout::from_size_align(8, 1).unwrap();
370        let ptr = unsafe { arena.allocate(layout).unwrap() };
371
372        let new_layout = Layout::from_size_align(16, 1).unwrap();
373        let grown = unsafe { arena.grow(ptr.cast(), layout, new_layout).unwrap() };
374        assert_eq!(grown.len(), 16);
375        assert_eq!(arena.used(), 16);
376    }
377
378    #[test]
379    fn test_shrink_allocation() {
380        let mut arena = BufferArena::with_capacity(4096);
381        let layout = Layout::from_size_align(16, 1).unwrap();
382        let ptr = unsafe { arena.allocate(layout).unwrap() };
383
384        let new_layout = Layout::from_size_align(8, 1).unwrap();
385        let shrunk = unsafe { arena.shrink(ptr.cast(), layout, new_layout).unwrap() };
386        assert_eq!(shrunk.len(), 8);
387        // The used offset should be reduced accordingly
388        assert_eq!(arena.used(), 8);
389    }
390
391    #[test]
392    fn test_multiple_allocate_and_deallocate() {
393        use std::alloc::Layout;
394
395        let mut arena = BufferArena::with_capacity(4096);
396        let layout = Layout::from_size_align(8, 1).unwrap();
397
398        // Allocate and deallocate 5 times in a row
399        for _ in 0..5 {
400            let ptr = unsafe { arena.allocate(layout).unwrap() };
401            assert_eq!(ptr.len(), 8);
402            assert_eq!(arena.used(), 8);
403            unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
404            assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
405
406            // Deallocate should reset offset if LIFO
407            unsafe { arena.deallocate(ptr.cast(), layout) };
408            assert_eq!(arena.used(), 0);
409        }
410
411        // Allocate several, then deallocate in reverse order
412        let mut ptrs = Vec::new();
413        for _ in 0..4 {
414            let ptr = unsafe { arena.allocate(layout).unwrap() };
415            unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
416            assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
417            ptrs.push(ptr);
418        }
419        assert_eq!(arena.used(), 32);
420
421        for ptr in ptrs.into_iter().rev() {
422            unsafe { arena.deallocate(ptr.cast(), layout) };
423        }
424        assert_eq!(arena.used(), 0);
425    }
426
427    #[test]
428    fn test_multi_alloc() {
429        // Create a BufferArena with a very small chunk size
430        let mut arena = BufferArena::with_capacity(4096);
431
432        // First allocation - should fit in the chunk
433        let layout = Layout::from_size_align(8, 8).unwrap();
434        let ptr = unsafe { arena.allocate(layout) }.unwrap();
435        unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
436
437        // Verify data
438        let data1 = unsafe { ptr.as_ref() };
439        assert_eq!(data1, [0xAA; 8].as_slice());
440
441        // Check remaining space
442        let remaining = arena.remaining();
443        assert_eq!(remaining, 4088);
444
445        // Try to grow the allocation
446        // If there's enough space, grow the allocation
447        let new_layout = Layout::from_size_align(layout.size() + 4, layout.align()).unwrap();
448        let grown_ptr = unsafe { arena.grow(ptr.cast(), layout, new_layout) }.unwrap();
449
450        // Write to the newly grown portion
451        unsafe {
452            grown_ptr
453                .cast::<u8>()
454                .add(layout.size())
455                .write_bytes(0xBB, 4)
456        };
457
458        assert_eq!(arena.remaining(), 4084);
459
460        // Verify the grown data
461        let grown_data = unsafe {
462            std::slice::from_raw_parts(grown_ptr.as_ptr() as *const u8, new_layout.size())
463        };
464        let mut expected = vec![0xAA; layout.size()];
465        expected.extend_from_slice(&[0xBB; 4]);
466        assert_eq!(grown_data, expected.as_slice());
467
468        let layout = unsafe { Layout::from_size_align_unchecked(4, 4) };
469        let ptr = unsafe { arena.allocate(layout).unwrap() };
470        assert_eq!(ptr.len(), layout.size());
471        assert_eq!(arena.remaining(), 4080);
472    }
473}