1use std::alloc::{alloc, dealloc, Layout};
7use std::cell::RefCell;
8use std::ptr;
9
10pub struct Arena {
15 chunks: RefCell<Vec<Chunk>>,
16 chunk_size: usize,
17}
18
19struct Chunk {
20 data: *mut u8,
21 capacity: usize,
22 used: usize,
23}
24
25impl Arena {
26 pub fn new(chunk_size: usize) -> Self {
28 Self {
29 chunks: RefCell::new(Vec::new()),
30 chunk_size,
31 }
32 }
33
34 pub fn with_default_chunk_size() -> Self {
36 Self::new(1024 * 1024) }
38
39 pub fn alloc_vec<T>(&self, count: usize) -> ArenaVec<T> {
41 let size = count * std::mem::size_of::<T>();
42 let align = std::mem::align_of::<T>();
43
44 let ptr = self.alloc_raw(size, align);
45
46 ArenaVec {
47 ptr: ptr as *mut T,
48 len: 0,
49 capacity: count,
50 _phantom: std::marker::PhantomData,
51 }
52 }
53
54 fn alloc_raw(&self, size: usize, align: usize) -> *mut u8 {
56 assert!(align > 0 && align.is_power_of_two(), "Alignment must be a power of 2");
58 assert!(size > 0, "Cannot allocate zero bytes");
59 assert!(size <= isize::MAX as usize, "Allocation size too large");
60
61 let mut chunks = self.chunks.borrow_mut();
62
63 if let Some(chunk) = chunks.last_mut() {
65 let current = chunk.used;
67 let aligned = (current + align - 1) & !(align - 1);
68
69 if aligned < current {
71 panic!("Alignment calculation overflow");
72 }
73
74 let needed = aligned.checked_add(size)
75 .expect("Arena allocation size overflow");
76
77 if needed <= chunk.capacity {
78 chunk.used = needed;
79 return unsafe {
80 let ptr = chunk.data.add(aligned);
82 debug_assert!(ptr as usize >= chunk.data as usize, "Pointer underflow");
83 ptr
84 };
85 }
86 }
87
88 let chunk_size = self.chunk_size.max(size + align);
90 let layout = Layout::from_size_align(chunk_size, 64).unwrap();
91 let data = unsafe { alloc(layout) };
92
93 let aligned = align;
94 let chunk = Chunk {
95 data,
96 capacity: chunk_size,
97 used: aligned + size,
98 };
99
100 let ptr = unsafe { data.add(aligned) };
101 chunks.push(chunk);
102
103 ptr
104 }
105
106 pub fn reset(&self) {
108 let mut chunks = self.chunks.borrow_mut();
109 for chunk in chunks.iter_mut() {
110 chunk.used = 0;
111 }
112 }
113
114 pub fn allocated_bytes(&self) -> usize {
116 let chunks = self.chunks.borrow();
117 chunks.iter().map(|c| c.capacity).sum()
118 }
119
120 pub fn used_bytes(&self) -> usize {
122 let chunks = self.chunks.borrow();
123 chunks.iter().map(|c| c.used).sum()
124 }
125}
126
127impl Drop for Arena {
128 fn drop(&mut self) {
129 let chunks = self.chunks.borrow();
130 for chunk in chunks.iter() {
131 let layout = Layout::from_size_align(chunk.capacity, 64).unwrap();
132 unsafe {
133 dealloc(chunk.data, layout);
134 }
135 }
136 }
137}
138
139pub struct ArenaVec<T> {
141 ptr: *mut T,
142 len: usize,
143 capacity: usize,
144 _phantom: std::marker::PhantomData<T>,
145}
146
147impl<T> ArenaVec<T> {
148 pub fn push(&mut self, value: T) {
150 assert!(self.len < self.capacity, "ArenaVec capacity exceeded");
152 assert!(!self.ptr.is_null(), "ArenaVec pointer is null");
153
154 unsafe {
155 let offset_ptr = self.ptr.add(self.len);
157 debug_assert!(
158 offset_ptr as usize >= self.ptr as usize,
159 "Pointer arithmetic overflow"
160 );
161 ptr::write(offset_ptr, value);
162 }
163 self.len += 1;
164 }
165
166 pub fn len(&self) -> usize {
168 self.len
169 }
170
171 pub fn is_empty(&self) -> bool {
173 self.len == 0
174 }
175
176 pub fn capacity(&self) -> usize {
178 self.capacity
179 }
180
181 pub fn as_slice(&self) -> &[T] {
183 assert!(self.len <= self.capacity, "Length exceeds capacity");
185 assert!(!self.ptr.is_null(), "Cannot create slice from null pointer");
186
187 unsafe { std::slice::from_raw_parts(self.ptr, self.len) }
188 }
189
190 pub fn as_mut_slice(&mut self) -> &mut [T] {
192 assert!(self.len <= self.capacity, "Length exceeds capacity");
194 assert!(!self.ptr.is_null(), "Cannot create slice from null pointer");
195
196 unsafe { std::slice::from_raw_parts_mut(self.ptr, self.len) }
197 }
198}
199
200impl<T> std::ops::Deref for ArenaVec<T> {
201 type Target = [T];
202
203 fn deref(&self) -> &[T] {
204 self.as_slice()
205 }
206}
207
208impl<T> std::ops::DerefMut for ArenaVec<T> {
209 fn deref_mut(&mut self) -> &mut [T] {
210 self.as_mut_slice()
211 }
212}
213
214thread_local! {
216 static THREAD_ARENA: RefCell<Arena> = RefCell::new(Arena::with_default_chunk_size());
217}
218
219#[cfg(test)]
231mod tests {
232 use super::*;
233
234 #[test]
235 fn test_arena_alloc() {
236 let arena = Arena::new(1024);
237
238 let mut vec1 = arena.alloc_vec::<f32>(10);
239 vec1.push(1.0);
240 vec1.push(2.0);
241 vec1.push(3.0);
242
243 assert_eq!(vec1.len(), 3);
244 assert_eq!(vec1[0], 1.0);
245 assert_eq!(vec1[1], 2.0);
246 assert_eq!(vec1[2], 3.0);
247 }
248
249 #[test]
250 fn test_arena_multiple_allocs() {
251 let arena = Arena::new(1024);
252
253 let vec1 = arena.alloc_vec::<u32>(100);
254 let vec2 = arena.alloc_vec::<u64>(50);
255 let vec3 = arena.alloc_vec::<f32>(200);
256
257 assert_eq!(vec1.capacity(), 100);
258 assert_eq!(vec2.capacity(), 50);
259 assert_eq!(vec3.capacity(), 200);
260 }
261
262 #[test]
263 fn test_arena_reset() {
264 let arena = Arena::new(1024);
265
266 {
267 let _vec1 = arena.alloc_vec::<f32>(100);
268 let _vec2 = arena.alloc_vec::<f32>(100);
269 }
270
271 let used_before = arena.used_bytes();
272 arena.reset();
273 let used_after = arena.used_bytes();
274
275 assert!(used_after < used_before);
276 }
277}