1use std::alloc::{alloc, dealloc, Layout};
7use std::cell::RefCell;
8use std::ptr;
9
10pub struct Arena {
15 chunks: RefCell<Vec<Chunk>>,
16 chunk_size: usize,
17}
18
19struct Chunk {
20 data: *mut u8,
21 capacity: usize,
22 used: usize,
23}
24
25impl Arena {
26 pub fn new(chunk_size: usize) -> Self {
28 Self {
29 chunks: RefCell::new(Vec::new()),
30 chunk_size,
31 }
32 }
33
34 pub fn with_default_chunk_size() -> Self {
36 Self::new(1024 * 1024) }
38
39 pub fn alloc_vec<T>(&self, count: usize) -> ArenaVec<T> {
41 let size = count * std::mem::size_of::<T>();
42 let align = std::mem::align_of::<T>();
43
44 let ptr = self.alloc_raw(size, align);
45
46 ArenaVec {
47 ptr: ptr as *mut T,
48 len: 0,
49 capacity: count,
50 _phantom: std::marker::PhantomData,
51 }
52 }
53
54 fn alloc_raw(&self, size: usize, align: usize) -> *mut u8 {
56 assert!(
58 align > 0 && align.is_power_of_two(),
59 "Alignment must be a power of 2"
60 );
61 assert!(size > 0, "Cannot allocate zero bytes");
62 assert!(size <= isize::MAX as usize, "Allocation size too large");
63
64 let mut chunks = self.chunks.borrow_mut();
65
66 if let Some(chunk) = chunks.last_mut() {
68 let current = chunk.used;
70 let aligned = (current + align - 1) & !(align - 1);
71
72 if aligned < current {
74 panic!("Alignment calculation overflow");
75 }
76
77 let needed = aligned
78 .checked_add(size)
79 .expect("Arena allocation size overflow");
80
81 if needed <= chunk.capacity {
82 chunk.used = needed;
83 return unsafe {
84 let ptr = chunk.data.add(aligned);
86 debug_assert!(ptr as usize >= chunk.data as usize, "Pointer underflow");
87 ptr
88 };
89 }
90 }
91
92 let chunk_size = self.chunk_size.max(size + align);
94 let layout = Layout::from_size_align(chunk_size, 64).unwrap();
95 let data = unsafe { alloc(layout) };
96
97 let aligned = align;
98 let chunk = Chunk {
99 data,
100 capacity: chunk_size,
101 used: aligned + size,
102 };
103
104 let ptr = unsafe { data.add(aligned) };
105 chunks.push(chunk);
106
107 ptr
108 }
109
110 pub fn reset(&self) {
112 let mut chunks = self.chunks.borrow_mut();
113 for chunk in chunks.iter_mut() {
114 chunk.used = 0;
115 }
116 }
117
118 pub fn allocated_bytes(&self) -> usize {
120 let chunks = self.chunks.borrow();
121 chunks.iter().map(|c| c.capacity).sum()
122 }
123
124 pub fn used_bytes(&self) -> usize {
126 let chunks = self.chunks.borrow();
127 chunks.iter().map(|c| c.used).sum()
128 }
129}
130
131impl Drop for Arena {
132 fn drop(&mut self) {
133 let chunks = self.chunks.borrow();
134 for chunk in chunks.iter() {
135 let layout = Layout::from_size_align(chunk.capacity, 64).unwrap();
136 unsafe {
137 dealloc(chunk.data, layout);
138 }
139 }
140 }
141}
142
143pub struct ArenaVec<T> {
145 ptr: *mut T,
146 len: usize,
147 capacity: usize,
148 _phantom: std::marker::PhantomData<T>,
149}
150
151impl<T> ArenaVec<T> {
152 pub fn push(&mut self, value: T) {
154 assert!(self.len < self.capacity, "ArenaVec capacity exceeded");
156 assert!(!self.ptr.is_null(), "ArenaVec pointer is null");
157
158 unsafe {
159 let offset_ptr = self.ptr.add(self.len);
161 debug_assert!(
162 offset_ptr as usize >= self.ptr as usize,
163 "Pointer arithmetic overflow"
164 );
165 ptr::write(offset_ptr, value);
166 }
167 self.len += 1;
168 }
169
170 pub fn len(&self) -> usize {
172 self.len
173 }
174
175 pub fn is_empty(&self) -> bool {
177 self.len == 0
178 }
179
180 pub fn capacity(&self) -> usize {
182 self.capacity
183 }
184
185 pub fn as_slice(&self) -> &[T] {
187 assert!(self.len <= self.capacity, "Length exceeds capacity");
189 assert!(!self.ptr.is_null(), "Cannot create slice from null pointer");
190
191 unsafe { std::slice::from_raw_parts(self.ptr, self.len) }
192 }
193
194 pub fn as_mut_slice(&mut self) -> &mut [T] {
196 assert!(self.len <= self.capacity, "Length exceeds capacity");
198 assert!(!self.ptr.is_null(), "Cannot create slice from null pointer");
199
200 unsafe { std::slice::from_raw_parts_mut(self.ptr, self.len) }
201 }
202}
203
204impl<T> std::ops::Deref for ArenaVec<T> {
205 type Target = [T];
206
207 fn deref(&self) -> &[T] {
208 self.as_slice()
209 }
210}
211
212impl<T> std::ops::DerefMut for ArenaVec<T> {
213 fn deref_mut(&mut self) -> &mut [T] {
214 self.as_mut_slice()
215 }
216}
217
218thread_local! {
220 static THREAD_ARENA: RefCell<Arena> = RefCell::new(Arena::with_default_chunk_size());
221}
222
223#[cfg(test)]
235mod tests {
236 use super::*;
237
238 #[test]
239 fn test_arena_alloc() {
240 let arena = Arena::new(1024);
241
242 let mut vec1 = arena.alloc_vec::<f32>(10);
243 vec1.push(1.0);
244 vec1.push(2.0);
245 vec1.push(3.0);
246
247 assert_eq!(vec1.len(), 3);
248 assert_eq!(vec1[0], 1.0);
249 assert_eq!(vec1[1], 2.0);
250 assert_eq!(vec1[2], 3.0);
251 }
252
253 #[test]
254 fn test_arena_multiple_allocs() {
255 let arena = Arena::new(1024);
256
257 let vec1 = arena.alloc_vec::<u32>(100);
258 let vec2 = arena.alloc_vec::<u64>(50);
259 let vec3 = arena.alloc_vec::<f32>(200);
260
261 assert_eq!(vec1.capacity(), 100);
262 assert_eq!(vec2.capacity(), 50);
263 assert_eq!(vec3.capacity(), 200);
264 }
265
266 #[test]
267 fn test_arena_reset() {
268 let arena = Arena::new(1024);
269
270 {
271 let _vec1 = arena.alloc_vec::<f32>(100);
272 let _vec2 = arena.alloc_vec::<f32>(100);
273 }
274
275 let used_before = arena.used_bytes();
276 arena.reset();
277 let used_after = arena.used_bytes();
278
279 assert!(used_after < used_before);
280 }
281}