zigzag_alloc/alloc/arena.rs
1//! Block-based arena allocator with bulk deallocation.
2//!
3//! An [`ArenaAllocator`] tracks every individual allocation in a singly-linked
4//! list of [`Header`]–prefixed blocks. Each call to
5//! [`alloc`](Allocator::alloc) allocates a fresh block from the *backing*
6//! allocator; [`dealloc`](Allocator::dealloc) is a deliberate no-op. All
7//! memory is reclaimed at once via [`reset`](ArenaAllocator::reset) or when
8//! the arena is dropped.
9//!
10//! ## Trade-offs
11//!
12//! | Pro | Con |
13//! |-----|-----|
14//! | Individual allocations are very cheap | Each allocation has a `Header` overhead |
15//! | Bulk free is O(n) in number of allocations | `dealloc` is a no-op — no reuse within a cycle |
16//! | Works with any backing [`Allocator`] | Not thread-safe (`Cell` internals) |
17//!
18//! ## Example
19//!
20//! ```rust,ignore
21//! let sys = SystemAllocator;
22//! let arena = ArenaAllocator::new(&sys);
23//! let vec: ExVec<u32> = ExVec::new(&arena);
24//! // ... use vec ...
25//! arena.reset(); // frees everything at once
26//! ```
27
28use core::{
29 alloc::Layout,
30 cell::Cell,
31 ptr::{self, NonNull},
32};
33
34use super::allocator::Allocator;
35use crate::simd;
36
37// ── Header ───────────────────────────────────────────────────────────────────
38
39/// Internal per-block metadata written at the *start* of every arena
40/// allocation.
41///
42/// The linked-list formed by `next` pointers allows [`ArenaAllocator::reset`]
43/// to walk all live allocations and return them to the backing allocator.
44///
45/// # Memory Layout
46///
47/// ```text
48/// ┌──────────────────────────┬───────────────────────────────┐
49/// │ Header (user_offset) │ User data (user_size) │
50/// └──────────────────────────┴───────────────────────────────┘
51/// ^ ^
52/// full_ptr (backing alloc) full_ptr + user_offset (returned to caller)
53/// ```
54struct Header {
55 /// Pointer to the *full block* of the previous allocation, i.e. the
56 /// previous `full_ptr` (not the user pointer). `None` marks the list end.
57 next: Option<NonNull<u8>>,
58 /// Layout that was passed to the backing allocator for this block
59 /// (`Header` + padding + user data). Used to reconstruct the exact layout
60 /// needed for deallocation.
61 full_layout: Layout,
62 /// Size of the user-requested portion of this block (bytes).
63 user_size: usize,
64 /// Byte offset from `full_ptr` to the first byte of user data.
65 /// Computed via `Layout::extend` to satisfy alignment requirements.
66 user_offset: usize,
67}
68
69// ── ArenaAllocator ───────────────────────────────────────────────────────────
70
71/// A block-based allocator that frees all memory at once.
72///
73/// `ArenaAllocator<A>` is generic over its *backing* allocator `A` which is
74/// called for each individual block allocation and for every block
75/// deallocation during [`reset`](Self::reset).
76///
77/// The internal state uses [`Cell`] to allow shared (`&self`) allocation,
78/// which is necessary so that multiple collections can borrow the same arena
79/// simultaneously.
80///
81/// # Dropping
82///
83/// Dropping an `ArenaAllocator` automatically calls [`reset`](Self::reset),
84/// returning all memory to the backing allocator.
85pub struct ArenaAllocator<A: Allocator> {
86 /// Backing allocator used to obtain and release raw memory blocks.
87 backing: A,
88 /// Head of the singly-linked list of live allocations (stores `full_ptr`).
89 last_alloc: Cell<Option<NonNull<u8>>>,
90 /// Running count of allocations since last reset; useful for diagnostics.
91 alloc_count: Cell<usize>,
92}
93
94impl<A: Allocator> ArenaAllocator<A> {
95 /// Creates a new, empty arena backed by `backing`.
96 ///
97 /// No memory is allocated from `backing` until the first call to
98 /// [`alloc`](Allocator::alloc).
99 pub fn new(backing: A) -> Self {
100 Self {
101 backing,
102 last_alloc: Cell::new(None),
103 alloc_count: Cell::new(0),
104 }
105 }
106
107 /// Returns the number of live allocations currently managed by this arena.
108 ///
109 /// The counter is reset to zero by [`reset`](Self::reset) or
110 /// [`reset_zeroed`](Self::reset_zeroed).
111 #[inline]
112 pub fn alloc_count(&self) -> usize { self.alloc_count.get() }
113
114 /// Releases all memory blocks managed by this arena back to the backing
115 /// allocator.
116 ///
117 /// After this call, all pointers previously returned by
118 /// [`alloc`](Allocator::alloc) are invalid.
119 ///
120 /// # Safety
121 ///
122 /// The caller must ensure that **no** pointers previously obtained from
123 /// this arena are used after `reset` returns (use-after-free).
124 pub fn reset(&self) {
125 let mut current = self.last_alloc.get();
126 while let Some(full_ptr) = current {
127 // SAFETY: `full_ptr` was written by `alloc` and points to a valid
128 // `Header`. We read it before deallocating the block it lives in.
129 unsafe {
130 let header: Header = ptr::read(full_ptr.as_ptr() as *const Header);
131 current = header.next;
132 // SAFETY: `full_ptr` + `header.full_layout` match the original
133 // backing allocation exactly — this is the contract of `alloc`.
134 self.backing.dealloc(full_ptr, header.full_layout);
135 }
136 }
137 self.last_alloc.set(None);
138 self.alloc_count.set(0);
139 }
140
141 /// Zeroes the user-data portion of every block before releasing them.
142 ///
143 /// Equivalent to [`reset`](Self::reset) but erases sensitive data first
144 /// using SIMD-accelerated zero-fill.
145 ///
146 /// # Safety
147 ///
148 /// Same as [`reset`](Self::reset): all previously returned pointers are
149 /// invalidated.
150 pub fn reset_zeroed(&self) {
151 let mut current = self.last_alloc.get();
152 while let Some(full_ptr) = current {
153 // SAFETY: `full_ptr` points to a valid `Header` whose `user_offset`
154 // and `user_size` were computed by `alloc` and remain correct until
155 // the block is deallocated here.
156 unsafe {
157 let header: Header = ptr::read(full_ptr.as_ptr() as *const Header);
158 current = header.next;
159
160 if header.user_size > 0 {
161 // SAFETY: `full_ptr + user_offset` is the start of the user
162 // data region which is `user_size` bytes long — both values
163 // come from `Layout::extend` so pointer arithmetic is safe.
164 let user_ptr = full_ptr.as_ptr().add(header.user_offset);
165 simd::fill_bytes(user_ptr, 0, header.user_size);
166 }
167
168 // SAFETY: See comment in `reset`.
169 self.backing.dealloc(full_ptr, header.full_layout);
170 }
171 }
172 self.last_alloc.set(None);
173 self.alloc_count.set(0);
174 }
175}
176
177impl<A: Allocator> Allocator for ArenaAllocator<A> {
178 /// Allocates a new block, prepends a [`Header`], and returns a pointer to
179 /// the user-data region.
180 ///
181 /// Internally calls the backing allocator for a single block large enough
182 /// to hold both the `Header` and the user data (with proper alignment
183 /// padding in between), then links the new block at the head of the
184 /// internal list.
185 ///
186 /// # Safety
187 ///
188 /// * `layout.size()` must be greater than zero.
189 /// * The returned pointer is valid until the next call to
190 /// [`reset`](ArenaAllocator::reset) or until the arena is dropped.
191 unsafe fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
192 // Compute the combined layout: Header immediately followed by user data
193 // with appropriate padding to satisfy `layout.align()`.
194 // `Layout::extend` returns `(combined_layout, offset_of_user_data)`.
195 let (full_layout, user_offset) = Layout::new::<Header>()
196 .extend(layout)
197 .ok()?;
198
199 // SAFETY: Delegating to the backing allocator which must obey its own
200 // `alloc` contract — returning a valid pointer or None.
201 let full_ptr = unsafe { self.backing.alloc(full_layout)? };
202
203 let header = Header {
204 next: self.last_alloc.get(),
205 full_layout,
206 user_size: layout.size(),
207 user_offset,
208 };
209
210 // SAFETY: `full_ptr` is valid for at least `size_of::<Header>()` bytes
211 // because `full_layout` was computed with `Header` as its first member.
212 unsafe { ptr::write(full_ptr.as_ptr() as *mut Header, header) };
213
214 self.last_alloc.set(Some(full_ptr));
215 self.alloc_count.set(self.alloc_count.get() + 1);
216
217 // SAFETY: `user_offset` was produced by `Layout::extend`, which
218 // guarantees it is within the bounds of the allocated block.
219 NonNull::new(unsafe { full_ptr.as_ptr().add(user_offset) })
220 }
221
222 /// No-op. Arena memory cannot be freed individually.
223 ///
224 /// To release all memory, call [`reset`](ArenaAllocator::reset) or drop
225 /// the arena.
226 ///
227 /// # Safety
228 ///
229 /// Even though this method is a no-op, callers must not use `ptr` after
230 /// calling `dealloc` — the pointer will become dangling once `reset` is
231 /// eventually called.
232 unsafe fn dealloc(&self, _: NonNull<u8>, _: Layout) {}
233}
234
235impl<A: Allocator> Drop for ArenaAllocator<A> {
236 /// Releases all managed blocks by calling [`reset`](ArenaAllocator::reset).
237 fn drop(&mut self) { self.reset(); }
238}
239
240// ── ArenaExt ─────────────────────────────────────────────────────────────────
241
242/// Extension trait that adds zeroed allocation to arena-like allocators.
243pub trait ArenaExt: Allocator {
244 /// Allocates a block of memory and fills it with zeroes before returning.
245 ///
246 /// # Safety
247 ///
248 /// * `layout.size()` must be greater than zero.
249 /// * Same lifetime and invalidation rules as the underlying
250 /// [`alloc`](Allocator::alloc) apply.
251 unsafe fn alloc_zeroed(&self, layout: Layout) -> Option<NonNull<u8>>;
252}
253
254impl<A: Allocator> ArenaExt for ArenaAllocator<A> {
255 /// Allocates a block and zero-fills the user-data region with SIMD.
256 ///
257 /// Delegates to [`alloc`](Allocator::alloc) and then applies a SIMD
258 /// zero-fill, which is faster than a scalar loop for large allocations.
259 ///
260 /// # Safety
261 ///
262 /// * `layout.size()` must be greater than zero.
263 /// * The returned pointer becomes invalid after [`reset`](ArenaAllocator::reset)
264 /// or when the arena is dropped.
265 unsafe fn alloc_zeroed(&self, layout: Layout) -> Option<NonNull<u8>> {
266 // SAFETY: Forwarding to `alloc` which upholds its own safety contract.
267 let ptr = unsafe { self.alloc(layout)? };
268 if layout.size() > 0 {
269 // SAFETY: `ptr` is valid for exactly `layout.size()` bytes as
270 // guaranteed by the successful `alloc` call above.
271 unsafe { simd::fill_bytes(ptr.as_ptr(), 0, layout.size()) };
272 }
273 Some(ptr)
274 }
275}