stack_arena/
buffer_arena.rs

1use std::{alloc::Layout, ptr::NonNull};
2
3use crate::{Allocator, chunk::Chunk};
4
5/// A buffer-based arena allocator that manages a single memory chunk.
6///
7/// `BufferArena` is a low-level component used by [`StackArena`](crate::StackArena) to manage individual
8/// memory chunks. It provides efficient allocation within a single contiguous memory
9/// region and implements the [`Allocator`](crate::Allocator) trait for memory management operations.
10///
11/// Each `BufferArena` contains a memory chunk and a pointer to the current position
12/// in the buffer, allowing for efficient bump allocation (also known as linear allocation).
13///
14/// # Memory Management
15///
16/// - Uses a simple bump allocation strategy for maximum performance
17/// - Allocates memory sequentially from a single contiguous chunk
18/// - Follows LIFO (Last-In-First-Out) deallocation pattern
19/// - Provides methods to check remaining capacity and allocation suitability
20///
21/// # Performance
22///
23/// `BufferArena` is designed for maximum allocation speed:
24///
25/// - Allocation is a simple pointer increment operation
26/// - No fragmentation within a single chunk
27/// - Minimal overhead per allocation
28/// - Efficient memory reuse with the LIFO pattern
29#[derive(Debug)]
30pub struct BufferArena {
31    /// The underlying buffer of bytes
32    pub(crate) store: Chunk,
33    /// The current offset within the buffer
34    pub(crate) ptr: NonNull<u8>,
35}
36
37impl Default for BufferArena {
38    fn default() -> Self {
39        let store = Chunk::new(1 << 12);
40        let ptr = store.base;
41        Self { store, ptr }
42    }
43}
44
45impl BufferArena {
46    /// Creates a new buffer arena with the specified capacity.
47    ///
48    /// The capacity will be rounded up to the next power of two if it's not
49    /// already a power of two.
50    ///
51    /// # Parameters
52    ///
53    /// * `capacity` - The capacity of the buffer in bytes
54    ///
55    /// # Examples
56    ///
57    /// ```
58    /// use stack_arena::BufferArena;
59    ///
60    /// let arena = BufferArena::new(1024);
61    /// ```
62    #[inline(always)]
63    pub fn new(capacity: usize) -> Self {
64        let store = Chunk::new(capacity);
65        let ptr = store.base;
66        Self { store, ptr }
67    }
68
69    /// Returns the number of bytes currently used in the buffer.
70    ///
71    /// # Examples
72    ///
73    /// ```
74    /// use stack_arena::BufferArena;
75    ///
76    /// let arena = BufferArena::new(1024);
77    /// assert_eq!(arena.used(), 0);
78    /// ```
79    #[inline(always)]
80    pub fn used(&self) -> usize {
81        unsafe { self.ptr.byte_offset_from_unsigned(self.store.base) }
82    }
83
84    /// Returns the number of bytes remaining in the buffer.
85    ///
86    /// # Examples
87    ///
88    /// ```
89    /// use stack_arena::BufferArena;
90    ///
91    /// let arena = BufferArena::new(1024);
92    /// assert_eq!(arena.remaining(), 1024);
93    /// ```
94    #[inline(always)]
95    pub fn remaining(&self) -> usize {
96        unsafe { self.store.limit.byte_offset_from_unsigned(self.ptr) }
97    }
98
99    /// Checks if the given pointer is contained within this buffer.
100    ///
101    /// # Parameters
102    ///
103    /// * `ptr` - The pointer to check
104    ///
105    /// # Returns
106    ///
107    /// `true` if the pointer is within the buffer's memory range, `false` otherwise.
108    #[inline(always)]
109    pub fn contains(&self, ptr: NonNull<u8>) -> bool {
110        self.store.contains(ptr)
111    }
112
113    /// Returns `true` if the buffer has enough space to allocate the given layout.
114    ///
115    /// This method checks if there is enough space in the buffer to allocate
116    /// memory with the specified layout, taking alignment requirements into account.
117    ///
118    /// # Parameters
119    ///
120    /// * `layout` - The memory layout to check
121    ///
122    /// # Returns
123    ///
124    /// `true` if there is enough space, `false` otherwise.
125    #[inline(always)]
126    pub fn sufficient_for(&self, layout: Layout) -> bool {
127        self.ptr.align_offset(layout.align()) + layout.size() <= self.remaining()
128    }
129}
130
131/// Implements conversion from `Chunk` to `BufferArena`.
132///
133/// This allows creating a `BufferArena` from an existing `Chunk`.
134impl From<Chunk> for BufferArena {
135    fn from(value: Chunk) -> Self {
136        let ptr = value.base;
137        Self { store: value, ptr }
138    }
139}
140
141/// Implementation of the `Allocator` trait for `BufferArena`.
142///
143/// This implementation provides efficient memory allocation within a single
144/// contiguous memory chunk using a bump allocation strategy. It follows the
145/// LIFO (Last-In-First-Out) pattern for memory management.
146impl Allocator for BufferArena {
147    /// Allocates memory with the specified layout.
148    ///
149    /// This method allocates memory from the current position in the buffer,
150    /// ensuring proper alignment. It advances the buffer pointer by the size
151    /// of the allocation.
152    ///
153    /// # Safety
154    ///
155    /// This method is unsafe because it returns a raw pointer that must be used
156    /// carefully to avoid memory safety issues.
157    #[inline(always)]
158    unsafe fn allocate(&mut self, layout: Layout) -> Result<NonNull<[u8]>, crate::AllocError> {
159        debug_assert!(layout.align() > 0);
160        debug_assert!(layout.align() <= 4096);
161        if self.sufficient_for(layout) {
162            let ptr = unsafe { self.ptr.add(self.ptr.align_offset(layout.align())) };
163            self.ptr = unsafe { ptr.add(layout.size()) };
164            Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
165        } else {
166            Err(crate::AllocError {})
167        }
168    }
169
170    /// Deallocates memory previously allocated by this allocator.
171    ///
172    /// This method resets the buffer pointer to the start of the deallocated
173    /// memory, effectively "freeing" all memory allocated after it as well.
174    /// This implements the LIFO deallocation pattern.
175    ///
176    /// # Safety
177    ///
178    /// This method is unsafe because it must be called with a pointer returned
179    /// by `allocate` and the same layout that was used to allocate it.
180    #[inline(always)]
181    unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
182        debug_assert!(self.store.contains(ptr.cast()));
183        debug_assert!(unsafe { ptr.add(layout.size()) } <= self.store.limit);
184        if self.store.contains(ptr) {
185            self.ptr = ptr;
186        }
187    }
188
189    /// Grows a previously allocated memory block to a larger size.
190    ///
191    /// This method extends the size of an existing allocation by advancing
192    /// the buffer pointer. It **only supports growing the last allocation**
193    /// (following LIFO pattern). Attempting to grow any other allocation
194    /// will trigger a debug assertion failure.
195    ///
196    /// # Safety
197    ///
198    /// This method is unsafe because it must be called with a pointer returned
199    /// by `allocate` and the same layout that was used to allocate it.
200    #[inline(always)]
201    unsafe fn grow(
202        &mut self,
203        ptr: NonNull<u8>,
204        old_layout: Layout,
205        new_layout: Layout,
206    ) -> Result<NonNull<[u8]>, crate::AllocError> {
207        debug_assert_eq!(
208            unsafe { ptr.add(old_layout.size()) },
209            self.ptr,
210            "grow is only supported for the last allocation: {ptr:?} old_:{old_layout:?}, new_{new_layout:?}"
211        );
212        debug_assert_eq!(old_layout.align(), new_layout.align());
213        match old_layout.size().cmp(&new_layout.size()) {
214            std::cmp::Ordering::Less => {
215                debug_assert!(unsafe { ptr.add(new_layout.size()) } <= self.store.limit);
216                self.ptr = unsafe { ptr.add(new_layout.size()) };
217                return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
218            }
219            std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
220            std::cmp::Ordering::Greater => unreachable!("use shrink instead"),
221        }
222    }
223
224    /// Shrinks a previously allocated memory block to a smaller size.
225    ///
226    /// This method reduces the size of an existing allocation by moving
227    /// the buffer pointer back. It assumes the memory block being shrunk is the
228    /// most recently allocated block (following LIFO pattern).
229    ///
230    /// # Safety
231    ///
232    /// This method is unsafe because it must be called with a pointer returned
233    /// by `allocate` and the same layout that was used to allocate it.
234    #[inline(always)]
235    unsafe fn shrink(
236        &mut self,
237        ptr: NonNull<u8>,
238        old_layout: Layout,
239        new_layout: Layout,
240    ) -> Result<NonNull<[u8]>, crate::AllocError> {
241        debug_assert_eq!(unsafe { ptr.add(old_layout.size()) }, self.ptr);
242        debug_assert_eq!(old_layout.align(), new_layout.align());
243        match old_layout.size().cmp(&new_layout.size()) {
244            std::cmp::Ordering::Greater => {
245                self.ptr = unsafe { ptr.add(new_layout.size()) };
246                return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
247            }
248            std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
249            std::cmp::Ordering::Less => Err(crate::AllocError {}),
250        }
251    }
252}
253
254/// Implements conversion from `BufferArena` to `Chunk`.
255///
256/// This allows extracting the underlying chunk from a `BufferArena`.
257impl Into<Chunk> for BufferArena {
258    /// Converts the buffer arena into its underlying chunk.
259    #[inline(always)]
260    fn into(self) -> Chunk {
261        self.store
262    }
263}
264
265#[cfg(test)]
266mod tests {
267    use super::*;
268
269    #[test]
270    fn test_buffer_arena_new_and_capacity() {
271        let cap = 63;
272        let arena = BufferArena::new(cap);
273        assert_eq!(arena.used(), 0);
274        assert_eq!(arena.remaining(), 64);
275        assert_eq!(arena.used(), 0);
276    }
277
278    #[test]
279    fn test_chunk_conversion() {
280        let chunk = Chunk::new(128);
281        let arena: BufferArena = chunk.into();
282        assert_eq!(arena.used(), 0);
283        assert_eq!(arena.remaining(), 128);
284
285        let to_chunk: Chunk = arena.into();
286        assert_eq!(to_chunk.capacity(), 128);
287    }
288
289    #[test]
290    fn test_allocate_and_deallocate() {
291        let mut arena = BufferArena::new(32);
292        let layout = Layout::from_size_align(8, 1).unwrap();
293        let ptr = unsafe { arena.allocate(layout).unwrap() };
294        assert_eq!(ptr.len(), 8);
295        assert_eq!(arena.used(), 8);
296
297        // Deallocate should reset offset if LIFO
298        unsafe { arena.deallocate(ptr.cast(), layout) };
299        assert_eq!(arena.used(), 0);
300    }
301
302    #[test]
303    fn test_alignment() {
304        let mut arena = BufferArena::new(128);
305        unsafe { arena.ptr.write_bytes(0, arena.remaining()) };
306        let mut prev_end = arena.ptr;
307
308        for (i, align) in [1, 2, 4, 8, 16, 32, 4096].into_iter().rev().enumerate() {
309            let size = i + 1;
310            let layout = Layout::from_size_align(size, align).unwrap();
311            let ptr = unsafe { arena.allocate_zeroed(layout).unwrap() };
312            let addr = ptr.cast::<u8>().as_ptr() as usize;
313            // Check alignment
314            assert_eq!(addr % align, 0, "addr {ptr:?} not aligned to {align}");
315            // Write data and verify
316            let fill = size as u8;
317            unsafe { ptr.cast::<u8>().write_bytes(fill, layout.size()) };
318            let data = unsafe { ptr.as_ref() };
319            assert_eq!(data, vec![fill; size].as_slice());
320
321            // Ensure allocations do not overlap
322            assert!(ptr.cast() >= prev_end, "Allocation overlapped previous");
323            prev_end = unsafe { ptr.cast().add(layout.size()) };
324        }
325        assert_eq!(arena.used(), 79);
326        let written =
327            unsafe { std::slice::from_raw_parts(arena.store.base.as_ptr(), arena.used()) };
328        assert_eq!(
329            written,
330            [
331                1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
332                0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 0, 0, 0, 0, 0,
333                4, 4, 4, 4, 5, 5, 5, 5, 5, 0, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7
334            ]
335            .as_ref()
336        );
337        unsafe { arena.deallocate(arena.store.base, Layout::from_size_align_unchecked(8, 1)) };
338        assert_eq!(arena.used(), 0);
339    }
340
341    #[test]
342    fn test_allocate_full_arena() {
343        let mut arena = BufferArena::new(16);
344        let layout = Layout::from_size_align(16, 1).unwrap();
345        let ptr = unsafe { arena.allocate(layout).unwrap() };
346        assert_eq!(ptr.len(), 16);
347        assert_eq!(arena.used(), 16);
348
349        // Next allocation should fail
350        let layout2 = Layout::from_size_align(1, 1).unwrap();
351        assert!(unsafe { arena.allocate(layout2) }.is_err());
352    }
353
354    #[test]
355    fn test_grow_allocation() {
356        let mut arena = BufferArena::new(32);
357        let layout = Layout::from_size_align(8, 1).unwrap();
358        let ptr = unsafe { arena.allocate(layout).unwrap() };
359
360        let new_layout = Layout::from_size_align(16, 1).unwrap();
361        let grown = unsafe { arena.grow(ptr.cast(), layout, new_layout).unwrap() };
362        assert_eq!(grown.len(), 16);
363        assert_eq!(arena.used(), 16);
364    }
365
366    #[test]
367    fn test_shrink_allocation() {
368        let mut arena = BufferArena::new(32);
369        let layout = Layout::from_size_align(16, 1).unwrap();
370        let ptr = unsafe { arena.allocate(layout).unwrap() };
371
372        let new_layout = Layout::from_size_align(8, 1).unwrap();
373        let shrunk = unsafe { arena.shrink(ptr.cast(), layout, new_layout).unwrap() };
374        assert_eq!(shrunk.len(), 8);
375        // The used offset should be reduced accordingly
376        assert_eq!(arena.used(), 8);
377    }
378
379    #[test]
380    fn test_multiple_allocate_and_deallocate() {
381        use std::alloc::Layout;
382
383        let mut arena = BufferArena::new(64);
384        let layout = Layout::from_size_align(8, 1).unwrap();
385
386        // Allocate and deallocate 5 times in a row
387        for _ in 0..5 {
388            let ptr = unsafe { arena.allocate(layout).unwrap() };
389            assert_eq!(ptr.len(), 8);
390            assert_eq!(arena.used(), 8);
391            unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
392            assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
393
394            // Deallocate should reset offset if LIFO
395            unsafe { arena.deallocate(ptr.cast(), layout) };
396            assert_eq!(arena.used(), 0);
397        }
398
399        // Allocate several, then deallocate in reverse order
400        let mut ptrs = Vec::new();
401        for _ in 0..4 {
402            let ptr = unsafe { arena.allocate(layout).unwrap() };
403            unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
404            assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
405            ptrs.push(ptr);
406        }
407        assert_eq!(arena.used(), 32);
408
409        for ptr in ptrs.into_iter().rev() {
410            unsafe { arena.deallocate(ptr.cast(), layout) };
411        }
412        assert_eq!(arena.used(), 0);
413    }
414
415    #[test]
416    fn test_multi_alloc() {
417        // Create a BufferArena with a very small chunk size
418        let mut arena = BufferArena::new(16);
419
420        // First allocation - should fit in the chunk
421        let layout = Layout::from_size_align(8, 8).unwrap();
422        let ptr = unsafe { arena.allocate(layout) }.unwrap();
423        unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
424
425        // Verify data
426        let data1 = unsafe { ptr.as_ref() };
427        assert_eq!(data1, [0xAA; 8].as_slice());
428
429        // Check remaining space
430        let remaining = arena.remaining();
431        assert_eq!(remaining, 8);
432
433        // Try to grow the allocation
434        // If there's enough space, grow the allocation
435        let new_layout = Layout::from_size_align(layout.size() + 4, layout.align()).unwrap();
436        let grown_ptr = unsafe { arena.grow(ptr.cast(), layout, new_layout) }.unwrap();
437
438        // Write to the newly grown portion
439        unsafe {
440            grown_ptr
441                .cast::<u8>()
442                .add(layout.size())
443                .write_bytes(0xBB, 4)
444        };
445
446        assert_eq!(arena.remaining(), 4);
447
448        // Verify the grown data
449        let grown_data = unsafe {
450            std::slice::from_raw_parts(grown_ptr.as_ptr() as *const u8, new_layout.size())
451        };
452        let mut expected = vec![0xAA; layout.size()];
453        expected.extend_from_slice(&[0xBB; 4]);
454        assert_eq!(grown_data, expected.as_slice());
455
456        let layout = unsafe { Layout::from_size_align_unchecked(4, 4) };
457        let ptr = unsafe { arena.allocate(layout).unwrap() };
458        assert_eq!(ptr.len(), layout.size());
459        assert_eq!(arena.remaining(), 0);
460    }
461}