stack_arena/
stack_arena.rs

1//! A fast, stack-like arena allocator for efficient memory management.
2//!
3//! # Overview
4//!
5//! `StackArena` provides an efficient memory allocation strategy for scenarios where:
6//! - Multiple objects need to be allocated in sequence
7//! - Objects follow a stack-like (LIFO) allocation/deallocation pattern
8//! - Memory usage needs to be minimized with low per-allocation overhead
9//! - Performance is critical, such as in parsers, interpreters, or compilers
10//!
11//! # Features
12//!
13//! - Efficient allocation of variable-sized objects with minimal overhead
14//! - Stack-like (LIFO) allocation and deallocation pattern
15//! - Memory is managed in chunks, automatically growing when needed
16//! - Implements the [`Allocator`](crate::Allocator) trait for compatibility with allocation APIs
17//! - Low-level memory management with minimal overhead
18//! - Automatic chunk reuse for improved performance
19//! - Significantly faster than the system allocator for small, frequent allocations
20//!
21//! # Usage Examples
22//!
23//! Basic usage:
24//!
25//! ```
26//! use stack_arena::{StackArena, Allocator};
27//!
28//! let mut arena = StackArena::new();
29//!
30//! // Allocate memory for an object
31//! let layout = std::alloc::Layout::from_size_align(10, 1).unwrap();
32//! let ptr = unsafe { arena.allocate(layout).unwrap() };
33//!
34//! // Use the allocated memory
35//! unsafe { std::ptr::write_bytes(ptr.as_ptr() as *mut u8, 0xAA, 10) };
36//!
37//! // Deallocate when done
38//! unsafe { arena.deallocate(ptr.cast(), layout) };
39//! ```
40//!
41//! Creating with a custom chunk size:
42//!
43//! ```
44//! use stack_arena::StackArena;
45//!
46//! // Create an arena with 8KB chunks
47//! let mut arena = StackArena::with_chunk_size(8192);
48//! assert!(arena.is_empty());
49//! ```
50//!
51//! For higher-level operations, use [`ObjectStack`](crate::ObjectStack) instead:
52//!
53//! ```
54//! use stack_arena::ObjectStack;
55//!
56//! let mut stack = ObjectStack::new();
57//! stack.extend("Hello, ");
58//! stack.extend("world!");
59//! let ptr = stack.finish(); // Finalizes the object
60//! ```
61//!
62//! # Memory Management
63//!
64//! `StackArena` manages memory in chunks:
65//!
66//! - When the current chunk is full, a new larger chunk is allocated
67//! - Old chunks are stored for later reuse when objects are deallocated
68//! - Memory is allocated contiguously within chunks for better cache locality
69//! - The LIFO pattern allows for efficient memory reuse
70//!
71//! # Safety
72//!
73//! - All returned pointers are valid until the corresponding object is deallocated
74//! - Objects are stored in contiguous memory chunks
75//! - Unsafe operations are used internally for performance
76//! - The library follows LIFO (Last-In-First-Out) allocation pattern for efficiency
77//! - Users must ensure proper memory safety when working with raw pointers
78
79use std::{alloc::Layout, ptr::NonNull};
80
81use crate::{Allocator, BufferArena, chunk::Chunk};
82
83#[derive(Debug)]
84/// A stack-like arena allocator that efficiently manages memory in chunks.
85///
86/// `StackArena` provides a low-level memory allocation strategy where objects are allocated
87/// in a stack-like fashion (Last-In-First-Out). It manages memory in chunks, automatically
88/// allocating new chunks when needed, which reduces allocation overhead and improves performance.
89///
90/// # Memory Management
91///
92/// - Memory is organized in chunks, with a current active chunk for allocations
93/// - When the current chunk is full, a new larger chunk is allocated
94/// - Old chunks are stored to keep previous allocations valid until explicitly deallocated
95/// - Objects are allocated contiguously within chunks for better cache locality
96/// - Follows LIFO (Last-In-First-Out) allocation/deallocation pattern
97///
98/// # Use Cases
99///
100/// `StackArena` is particularly well-suited for:
101///
102/// - Parsers and compilers that need to allocate many temporary objects
103/// - Interpreters that need efficient memory management for short-lived objects
104/// - Data processing pipelines with predictable allocation patterns
105/// - Any scenario where many small allocations are made in sequence
106///
107/// # Thread Safety
108///
109/// `StackArena` is not thread-safe and should not be shared between threads
110/// without external synchronization.
111///
112/// # Performance
113///
114/// This allocator is optimized for scenarios with many small allocations that follow
115/// a stack-like pattern. Benchmarks show it significantly outperforms the system allocator:
116///
117/// - Up to 10x faster for consecutive small allocations
118/// - Minimal overhead for allocation and deallocation
119/// - Efficient memory reuse with the LIFO pattern
120pub struct StackArena {
121    /// Stores old chunks to keep previous allocations valid until explicitly deallocated.
122    /// This ensures that pointers returned by `allocate` remain valid even after new chunks are allocated.
123    store: Vec<Chunk>,
124
125    /// Tracks all allocated objects in LIFO order for proper deallocation.
126    stack: Vec<NonNull<[u8]>>,
127
128    /// The current active chunk used for new allocations.
129    current: BufferArena,
130
131    /// Default chunk size used when allocating new chunks.
132    /// When a new chunk is needed, its size will be at least this value.
133    default_chunk_size: usize,
134}
135
136impl Default for StackArena {
137    fn default() -> Self {
138        let default_chunk_size = 1 << 12;
139        Self {
140            store: Vec::with_capacity(16),
141            stack: Vec::with_capacity(256),
142            current: Default::default(),
143            default_chunk_size,
144        }
145    }
146}
147
148impl StackArena {
149    /// Creates a new empty `StackArena` with a default initial capacity.
150    ///
151    /// The initial chunk size is 1024 bytes.
152    ///
153    /// # Examples
154    ///
155    /// ```
156    /// use stack_arena::StackArena;
157    ///
158    /// let arena = StackArena::new();
159    /// assert!(arena.is_empty());
160    /// ```
161    #[inline(always)]
162    pub fn new() -> Self {
163        Self::with_chunk_size(4096)
164    }
165
166    /// Creates a new empty `StackArena` with a specified chunk size.
167    ///
168    /// # Parameters
169    ///
170    /// * `chunk_size` - The size in bytes to use for new chunks
171    ///
172    /// # Examples
173    ///
174    /// ```
175    /// use stack_arena::StackArena;
176    ///
177    /// // Create an arena with 4096-byte chunks
178    /// let arena = StackArena::with_chunk_size(4096);
179    /// assert!(arena.is_empty());
180    /// ```
181    #[inline]
182    pub fn with_chunk_size(chunk_size: usize) -> Self {
183        let current = BufferArena::with_capacity(chunk_size);
184        Self {
185            store: Vec::with_capacity(4),
186            stack: Vec::with_capacity(32),
187            current,
188            default_chunk_size: chunk_size,
189        }
190    }
191
192    /// Returns the number of objects currently on the stack.
193    ///
194    /// # Examples
195    ///
196    /// ```
197    /// use stack_arena::{StackArena, Allocator};
198    /// use std::alloc::Layout;
199    ///
200    /// let mut arena = StackArena::new();
201    /// assert_eq!(arena.len(), 0);
202    ///
203    /// // Allocate memory for an object
204    /// let layout = Layout::from_size_align(5, 1).unwrap();
205    /// let ptr = unsafe { arena.allocate(layout).unwrap() };
206    /// assert_eq!(arena.len(), 1);
207    /// ```
208    #[inline]
209    pub fn len(&self) -> usize {
210        self.stack.len()
211    }
212
213    /// Returns `true` if the arena contains no objects.
214    ///
215    /// # Examples
216    ///
217    /// ```
218    /// use stack_arena::{StackArena, Allocator};
219    /// use std::alloc::Layout;
220    ///
221    /// let mut arena = StackArena::new();
222    /// assert!(arena.is_empty());
223    ///
224    /// // Allocate memory for an object
225    /// let layout = Layout::from_size_align(5, 1).unwrap();
226    /// let ptr = unsafe { arena.allocate(layout).unwrap() };
227    /// assert!(!arena.is_empty());
228    /// ```
229    #[inline]
230    pub fn is_empty(&self) -> bool {
231        self.len() == 0
232    }
233
234    /// Removes the most recently pushed object from the stack.
235    ///
236    /// This method follows the LIFO (Last-In-First-Out) principle.
237    /// After popping, any pointers to the popped object become invalid.
238    ///
239    /// # Panics
240    ///
241    /// Panics if the stack is empty or if there is a partial object
242    /// being built (i.e., if `extend` has been called but `finish` has not).
243    ///
244    /// # Examples
245    ///
246    /// ```
247    /// use stack_arena::{StackArena, Allocator};
248    /// use std::alloc::Layout;
249    ///
250    /// let mut arena = StackArena::new();
251    ///
252    /// // Allocate first object
253    /// let layout1 = Layout::from_size_align(5, 1).unwrap();
254    /// let ptr1 = unsafe { arena.allocate(layout1).unwrap() };
255    ///
256    /// // Allocate second object
257    /// let layout2 = Layout::from_size_align(5, 1).unwrap();
258    /// let ptr2 = unsafe { arena.allocate(layout2).unwrap() };
259    /// assert_eq!(arena.len(), 2);
260    ///
261    /// arena.pop();
262    /// assert_eq!(arena.len(), 1);
263    /// ```
264    #[inline]
265    pub fn pop(&mut self) -> Option<NonNull<[u8]>> {
266        let ptr = *self.stack.last().unwrap();
267        let layout = Layout::for_value(unsafe { ptr.as_ref() });
268        unsafe { self.deallocate(ptr.cast(), layout) };
269        Some(ptr)
270    }
271
272    /// Returns the top object on the stack without removing it.
273    ///
274    /// This method returns a reference to the most recently allocated object
275    /// on the stack.
276    ///
277    /// # Returns
278    ///
279    /// An optional non-null pointer to the top object, or None if the stack is empty.
280    ///
281    /// # Examples
282    ///
283    /// ```
284    /// use stack_arena::{StackArena, Allocator};
285    /// use std::alloc::Layout;
286    ///
287    /// let mut arena = StackArena::new();
288    /// let layout = Layout::from_size_align(8, 1).unwrap();
289    /// let ptr = unsafe { arena.allocate(layout).unwrap() };
290    ///
291    /// // Get the top object
292    /// let top = arena.top();
293    /// assert!(top.is_some());
294    /// ```
295    #[inline]
296    pub fn top(&mut self) -> Option<NonNull<[u8]>> {
297        self.stack.last().map(|&ptr| ptr)
298    }
299
300    /// Rolls back to a specific object, freeing it and all objects allocated after it.
301    ///
302    /// This method allows for rolling back to a specific point in the allocation
303    /// history by providing a reference to an object on the stack.
304    ///
305    /// # Parameters
306    ///
307    /// * `data` - A reference to the object to free, along with all objects
308    ///   allocated after it.
309    ///
310    /// # Examples
311    ///
312    /// ```
313    /// use stack_arena::{StackArena, Allocator};
314    /// use std::alloc::Layout;
315    ///
316    /// let mut arena = StackArena::new();
317    ///
318    /// // Allocate some objects
319    /// let layout1 = Layout::from_size_align(8, 1).unwrap();
320    /// let ptr1 = unsafe { arena.allocate(layout1).unwrap() };
321    ///
322    /// let layout2 = Layout::from_size_align(16, 1).unwrap();
323    /// let ptr2 = unsafe { arena.allocate(layout2).unwrap() };
324    ///
325    /// // Roll back to the first object
326    /// unsafe {
327    ///     arena.rollback(ptr1.as_ref());
328    /// }
329    /// assert_eq!(arena.len(), 0); // All objects are freed
330    /// ```
331    #[inline]
332    pub fn rollback(&mut self, data: &[u8]) {
333        let data = data.as_ref();
334        unsafe {
335            self.deallocate(
336                NonNull::new_unchecked(data.as_ptr().cast_mut()),
337                Layout::for_value(data),
338            )
339        };
340    }
341}
342
343/// Implementation of the `Allocator` trait for `StackArena`.
344///
345/// This implementation allows `StackArena` to be used with APIs that require
346/// an allocator. The implementation follows the stack-like allocation pattern,
347/// where memory is allocated in chunks and objects are allocated contiguously.
348///
349/// # Safety
350///
351/// The implementation uses unsafe code internally to manage memory efficiently.
352/// Users should follow the LIFO (Last-In-First-Out) pattern when deallocating
353/// memory to ensure proper behavior.
354///
355/// # Performance
356///
357/// This allocator is optimized for scenarios with many small allocations that follow
358/// a stack-like pattern. It significantly outperforms the system allocator in these cases
359/// as demonstrated in the benchmarks.
360impl Allocator for StackArena {
361    /// Allocates memory with the specified layout.
362    ///
363    /// This method allocates a new object with the given layout in the current chunk.
364    /// If the current chunk doesn't have enough space, a new chunk is allocated.
365    ///
366    /// # Safety
367    ///
368    /// This method is unsafe because it returns a raw pointer that must be used
369    /// carefully to avoid memory safety issues.
370    unsafe fn allocate(
371        &mut self,
372        layout: std::alloc::Layout,
373    ) -> Result<std::ptr::NonNull<[u8]>, crate::AllocError> {
374        if !self.current.sufficient_for(layout) {
375            let capacity = layout.size().max(self.default_chunk_size);
376            let prev = std::mem::replace(&mut self.current, BufferArena::with_capacity(capacity));
377            self.store.push(prev.into());
378        }
379
380        unsafe {
381            let object = self.current.allocate(layout)?;
382            self.stack.push(object);
383            Ok(object)
384        }
385    }
386
387    /// Deallocates memory previously allocated by `allocate`.
388    ///
389    /// This method frees the specified object and all objects allocated after it,
390    /// following the stack-like (LIFO) deallocation pattern.
391    ///
392    /// # Safety
393    ///
394    /// This method is unsafe because it must be called with a pointer returned
395    /// by `allocate` and the same layout that was used to allocate it.
396    #[inline]
397    unsafe fn deallocate(&mut self, ptr: std::ptr::NonNull<u8>, layout: std::alloc::Layout) {
398        let object = NonNull::slice_from_raw_parts(ptr, layout.size());
399        // match self.stack.binary_search(&object) {
400        //     Ok(pos) => todo!(),
401        //     Err(_) => todo!(),
402        // };
403        while let Some(top) = self.stack.pop() {
404            if std::ptr::eq(object.as_ptr(), top.as_ptr()) {
405                break;
406            }
407        }
408        if self.current.contains(ptr) {
409            unsafe { self.current.deallocate(ptr, layout) };
410            return;
411        }
412        while let Some(_chunk) = self.store.pop_if(|chunk| !chunk.contains(ptr)) {
413            // eprintln!("free chunk: {chunk:?}");
414        }
415        if let Some(chunk) = self.store.pop() {
416            debug_assert!(chunk.contains(ptr));
417            self.current = chunk.into();
418        }
419        self.current.ptr = ptr;
420    }
421
422    /// Grows a previously allocated memory block to a larger size.
423    ///
424    /// This method is used to extend an existing allocation to a larger size.
425    /// It's used internally by the `extend` method. It **only supports growing
426    /// the last allocation** (following LIFO pattern). Attempting to grow any
427    /// other allocation will trigger a debug assertion failure.
428    ///
429    /// # Safety
430    ///
431    /// This method is unsafe because it must be called with a pointer returned
432    /// by `allocate` and the same layout that was used to allocate it.
433    unsafe fn grow(
434        &mut self,
435        ptr: NonNull<u8>,
436        old_layout: std::alloc::Layout,
437        new_layout: std::alloc::Layout,
438    ) -> Result<NonNull<[u8]>, crate::AllocError> {
439        let top = self.stack.pop().unwrap();
440        debug_assert_eq!(
441            top.cast().as_ptr(),
442            ptr.as_ptr(),
443            "this operation is only supported for the last allocation"
444        );
445        let object = if self.current.remaining() < new_layout.size() - old_layout.size() {
446            let capacity = new_layout.size().max(self.default_chunk_size);
447            let prev = std::mem::replace(&mut self.current, BufferArena::with_capacity(capacity));
448            self.store.push(prev.into());
449            unsafe {
450                let object = self.current.allocate(new_layout)?;
451                object
452                    .cast()
453                    .copy_from_nonoverlapping(ptr, old_layout.size());
454                object
455            }
456        } else {
457            unsafe { self.current.grow(ptr, old_layout, new_layout) }?
458        };
459        self.stack.push(object);
460        Ok(object)
461    }
462
463    /// Shrinks a previously allocated memory block to a smaller size.
464    ///
465    /// This method is used to reduce the size of an existing allocation.
466    ///
467    /// # Safety
468    ///
469    /// This method is unsafe because it must be called with a pointer returned
470    /// by `allocate` and the same layout that was used to allocate it.
471    #[inline]
472    unsafe fn shrink(
473        &mut self,
474        ptr: NonNull<u8>,
475        old_layout: Layout,
476        new_layout: Layout,
477    ) -> Result<NonNull<[u8]>, crate::AllocError> {
478        let top = self.stack.pop().unwrap();
479        debug_assert_eq!(
480            top.cast().as_ptr(),
481            ptr.as_ptr(),
482            "this operation is only supported for the last allocation"
483        );
484        debug_assert_eq!(top.cast().as_ptr(), ptr.as_ptr());
485        debug_assert_eq!(self.current.ptr, unsafe { ptr.add(old_layout.size()) });
486        let object = unsafe { self.current.shrink(ptr, old_layout, new_layout) }?;
487        self.stack.push(object);
488        Ok(object)
489    }
490}
491
492#[cfg(test)]
493mod tests {
494    use super::*;
495
496    #[test]
497    fn test_new() {
498        let stack = StackArena::new();
499        assert_eq!(stack.len(), 0);
500        assert!(stack.is_empty());
501    }
502
503    #[test]
504    fn test_allocate_deallocate() {
505        let mut stack = StackArena::new();
506
507        // Allocate some memory
508        let layout = Layout::from_size_align(16, 8).unwrap();
509        let ptr = unsafe { stack.allocate(layout) }.unwrap();
510
511        // Write some data
512        unsafe { std::ptr::write_bytes(ptr.as_ptr() as *mut u8, 0xAA, 16) };
513
514        // Verify data
515        let data = unsafe { std::slice::from_raw_parts(ptr.as_ptr() as *const u8, 16) };
516        assert_eq!(data, [0xAA; 16].as_slice());
517
518        // Deallocate
519        unsafe { stack.deallocate(ptr.cast(), layout) };
520        assert_eq!(stack.len(), 0);
521    }
522
523    #[test]
524    fn test_multi_allocations() {
525        let mut stack = StackArena::new();
526        let n = 10;
527        let mut allocations = Vec::with_capacity(n);
528
529        // Allocate multiple objects
530        for i in 0..n {
531            let layout = Layout::array::<u8>(i + 1).unwrap();
532            let ptr = unsafe { stack.allocate(layout) }.unwrap();
533
534            // Write some data
535            unsafe { std::ptr::write_bytes(ptr.as_ptr() as *mut u8, i as u8, i + 1) };
536
537            allocations.push((ptr, layout));
538        }
539
540        assert_eq!(stack.len(), n);
541
542        // Verify data
543        for (i, (ptr, _)) in allocations.iter().enumerate() {
544            let data = unsafe { std::slice::from_raw_parts(ptr.as_ptr() as *const u8, i + 1) };
545            assert_eq!(data, vec![i as u8; i + 1].as_slice());
546        }
547
548        // Deallocate in LIFO order
549        for (ptr, layout) in allocations.into_iter().rev() {
550            unsafe { stack.deallocate(ptr.cast(), layout) };
551        }
552
553        assert_eq!(stack.len(), 0);
554    }
555
556    #[test]
557    fn test_grow_shrink() {
558        let mut stack = StackArena::new();
559
560        // Allocate initial memory
561        let initial_layout = Layout::from_size_align(8, 8).unwrap();
562        let ptr = unsafe { stack.allocate(initial_layout) }.unwrap();
563
564        // Write initial data
565        unsafe { std::ptr::write_bytes(ptr.as_ptr() as *mut u8, 0xAA, 8) };
566
567        // Grow the allocation
568        let new_layout = Layout::from_size_align(16, 8).unwrap();
569        let grown_ptr = unsafe { stack.grow(ptr.cast(), initial_layout, new_layout) }.unwrap();
570
571        // Verify initial data is preserved
572        let data = unsafe { std::slice::from_raw_parts(grown_ptr.as_ptr() as *const u8, 8) };
573        assert_eq!(data, [0xAA; 8].as_slice());
574
575        // Write to the new space
576        unsafe { std::ptr::write_bytes((grown_ptr.as_ptr() as *mut u8).add(8), 0xBB, 8) };
577
578        // Shrink back to original size
579        let shrunk_ptr =
580            unsafe { stack.shrink(grown_ptr.cast(), new_layout, initial_layout) }.unwrap();
581
582        // Verify data
583        let final_data = unsafe { std::slice::from_raw_parts(shrunk_ptr.as_ptr() as *const u8, 8) };
584        assert_eq!(final_data, [0xAA; 8].as_slice());
585
586        // Deallocate
587        unsafe { stack.deallocate(shrunk_ptr.cast(), initial_layout) };
588        assert_eq!(stack.len(), 0);
589    }
590
591    /// Helper function to allocate memory and write data to it
592    #[inline]
593    fn allocate_and_write<A: Allocator>(allocator: &mut A, size: usize) -> (NonNull<[u8]>, Layout) {
594        let layout = Layout::from_size_align(size, 8).unwrap();
595        let ptr = unsafe { allocator.allocate(layout).unwrap() };
596
597        // Write some data
598        unsafe { std::ptr::write_bytes(ptr.as_ptr() as *mut u8, 0xAA, size) };
599
600        (ptr, layout)
601    }
602
603    /// Helper function to perform consecutive allocations
604    #[inline]
605    fn perform_consecutive_allocations<A: Allocator>(
606        allocator: &mut A,
607        count: usize,
608    ) -> Vec<(NonNull<[u8]>, Layout)> {
609        let mut ptrs = Vec::with_capacity(count);
610
611        for i in 0..count {
612            // Use different small sizes for each allocation
613            let size = (i % 8) + 1; // Sizes from 1 to 8 bytes
614            let (ptr, layout) = allocate_and_write(allocator, size);
615            ptrs.push((ptr, layout));
616        }
617
618        ptrs
619    }
620
621    #[test]
622    fn test_consecutive_allocations() {
623        let mut stack = StackArena::new();
624        let allocations = perform_consecutive_allocations(&mut stack, 10000);
625
626        // assert_eq!(stack.len(), 10);
627
628        // Deallocate in LIFO order
629        for (ptr, layout) in allocations.into_iter().rev() {
630            unsafe { stack.deallocate(ptr.cast(), layout) };
631        }
632
633        assert_eq!(stack.len(), 0);
634    }
635
636    #[test]
637    fn test_custom_chunk_size() {
638        // Create a stack arena with a custom chunk size
639        let mut stack = StackArena::with_chunk_size(256);
640
641        // Allocate an object that fits within the chunk
642        let small_layout = Layout::from_size_align(64, 8).unwrap();
643        let small_ptr = unsafe { stack.allocate(small_layout) }.unwrap();
644
645        // Allocate an object that exceeds the default chunk size (1024) but is less than
646        // our custom chunk size (256)
647        let medium_layout = Layout::from_size_align(128, 8).unwrap();
648        let medium_ptr = unsafe { stack.allocate(medium_layout) }.unwrap();
649
650        // Allocate an object that exceeds our custom chunk size
651        let large_layout = Layout::from_size_align(512, 8).unwrap();
652        let large_ptr = unsafe { stack.allocate(large_layout) }.unwrap();
653
654        // Verify all allocations succeeded
655        assert_eq!(stack.len(), 3);
656
657        // Clean up
658        unsafe {
659            stack.deallocate(large_ptr.cast(), large_layout);
660            stack.deallocate(medium_ptr.cast(), medium_layout);
661            stack.deallocate(small_ptr.cast(), small_layout);
662        }
663
664        assert_eq!(stack.len(), 0);
665    }
666
667    #[test]
668    fn test_cross_chunk_allocation_deallocation() {
669        // Create a stack arena with a very small chunk size to easily trigger new chunk allocations
670        let mut stack = StackArena::with_chunk_size(32);
671
672        // Allocate objects that will span across multiple chunks
673        let mut allocations = Vec::new();
674
675        let specs = [
676            (16, 8, 0xAA),
677            (8, 8, 0xBB),
678            (16, 8, 0xCC),
679            (8, 8, 0xDD),
680            (39, 4, 0xEE),
681        ];
682        for (size, align, fill) in specs {
683            let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
684            let ptr = unsafe { stack.allocate(layout).unwrap() };
685            unsafe { ptr.cast::<u8>().write_bytes(fill, layout.size()) };
686            allocations.push((ptr, layout));
687        }
688
689        // Verify all allocations
690        assert_eq!(stack.len(), specs.len());
691
692        for ((size, _align, fill), (ptr, _layout)) in std::iter::zip(specs, &allocations) {
693            let data = unsafe { ptr.as_ref() };
694            let expected = vec![fill; size];
695            assert_eq!(data, &expected);
696        }
697
698        // Deallocate in LIFO order to test chunk reuse
699        for (ptr, layout) in allocations.into_iter().rev() {
700            unsafe { stack.deallocate(ptr.cast(), layout) };
701        }
702
703        // Verify all memory has been deallocated
704        assert_eq!(stack.len(), 0);
705
706        // Allocate again to test chunk reuse
707        let layout5 = Layout::from_size_align(24, 8).unwrap();
708        let ptr5 = unsafe { stack.allocate(layout5) }.unwrap();
709        unsafe { ptr5.cast::<u8>().write_bytes(0xFF, layout5.size()) };
710
711        // Verify the new allocation
712        assert_eq!(stack.len(), 1);
713        let data5 =
714            unsafe { std::slice::from_raw_parts(ptr5.as_ptr() as *const u8, layout5.size()) };
715        assert_eq!(data5, [0xFF; 24].as_slice());
716    }
717
718    #[test]
719    fn test_stack_arena_grow_with_new_chunk() {
720        // Create a stack arena with a very small chunk size
721        let mut stack = StackArena::with_chunk_size(32);
722
723        // First allocation - should fit in the first chunk
724        let layout1 = Layout::from_size_align(8, 8).unwrap();
725        let ptr1 = unsafe { stack.allocate(layout1) }.unwrap();
726        unsafe { ptr1.cast::<u8>().write_bytes(0xAA, layout1.size()) };
727
728        // Allocate a second object to fill the first chunk
729        let layout2 = Layout::from_size_align(16, 8).unwrap();
730        let ptr2 = unsafe { stack.allocate(layout2) }.unwrap();
731        unsafe { ptr2.cast::<u8>().write_bytes(0xBB, layout2.size()) };
732
733        // Verify data integrity
734        let data1 =
735            unsafe { std::slice::from_raw_parts(ptr1.as_ptr() as *const u8, layout1.size()) };
736        assert_eq!(data1, [0xAA; 8].as_slice());
737
738        let data2 =
739            unsafe { std::slice::from_raw_parts(ptr2.as_ptr() as *const u8, layout2.size()) };
740        assert_eq!(data2, [0xBB; 16].as_slice());
741
742        // Deallocate the second object
743        unsafe { stack.deallocate(ptr2.cast(), layout2) };
744
745        // Now grow the first allocation to trigger a new chunk allocation
746        // The new size won't fit in the first chunk, so it should be copied to a new chunk
747        let layout1_grown = Layout::from_size_align(24, 8).unwrap();
748
749        let ptr1_grown = unsafe { stack.grow(ptr1.cast(), layout1, layout1_grown) }.unwrap();
750
751        // Write to the newly grown portion
752        unsafe {
753            ptr1_grown
754                .cast::<u8>()
755                .add(layout1.size())
756                .write_bytes(0xCC, layout1_grown.size() - layout1.size())
757        };
758
759        // Verify the grown object has correct data
760        let data1_grown = unsafe {
761            std::slice::from_raw_parts(ptr1_grown.as_ptr() as *const u8, layout1_grown.size())
762        };
763        let mut expected1_grown = vec![0xAA; layout1.size()];
764        expected1_grown.extend_from_slice(&vec![0xCC; layout1_grown.size() - layout1.size()]);
765        assert_eq!(data1_grown, expected1_grown.as_slice());
766
767        // Final cleanup
768        unsafe { stack.deallocate(ptr1_grown.cast(), layout1_grown) };
769        assert_eq!(stack.len(), 0);
770    }
771}