1use crate::types::EpochId;
10use parking_lot::RwLock;
11use std::alloc::{alloc, dealloc, Layout};
12use std::ptr::NonNull;
13use std::sync::atomic::{AtomicUsize, Ordering};
14
15const DEFAULT_CHUNK_SIZE: usize = 1024 * 1024;
17
18struct Chunk {
20 ptr: NonNull<u8>,
22 capacity: usize,
24 offset: AtomicUsize,
26}
27
28impl Chunk {
29 fn new(capacity: usize) -> Self {
31 let layout = Layout::from_size_align(capacity, 16).expect("Invalid layout");
32 let ptr = unsafe { alloc(layout) };
34 let ptr = NonNull::new(ptr).expect("Allocation failed");
35
36 Self {
37 ptr,
38 capacity,
39 offset: AtomicUsize::new(0),
40 }
41 }
42
43 fn try_alloc(&self, size: usize, align: usize) -> Option<NonNull<u8>> {
46 loop {
47 let current = self.offset.load(Ordering::Relaxed);
48
49 let aligned = (current + align - 1) & !(align - 1);
51 let new_offset = aligned + size;
52
53 if new_offset > self.capacity {
54 return None;
55 }
56
57 match self.offset.compare_exchange_weak(
59 current,
60 new_offset,
61 Ordering::AcqRel,
62 Ordering::Relaxed,
63 ) {
64 Ok(_) => {
65 let ptr = unsafe { self.ptr.as_ptr().add(aligned) };
67 return NonNull::new(ptr);
68 }
69 Err(_) => continue, }
71 }
72 }
73
74 fn used(&self) -> usize {
76 self.offset.load(Ordering::Relaxed)
77 }
78
79 fn remaining(&self) -> usize {
81 self.capacity - self.used()
82 }
83}
84
85impl Drop for Chunk {
86 fn drop(&mut self) {
87 let layout = Layout::from_size_align(self.capacity, 16).expect("Invalid layout");
88 unsafe { dealloc(self.ptr.as_ptr(), layout) };
90 }
91}
92
93unsafe impl Send for Chunk {}
95unsafe impl Sync for Chunk {}
96
97pub struct Arena {
103 epoch: EpochId,
105 chunks: RwLock<Vec<Chunk>>,
107 chunk_size: usize,
109 total_allocated: AtomicUsize,
111}
112
113impl Arena {
114 #[must_use]
116 pub fn new(epoch: EpochId) -> Self {
117 Self::with_chunk_size(epoch, DEFAULT_CHUNK_SIZE)
118 }
119
120 #[must_use]
122 pub fn with_chunk_size(epoch: EpochId, chunk_size: usize) -> Self {
123 let initial_chunk = Chunk::new(chunk_size);
124 Self {
125 epoch,
126 chunks: RwLock::new(vec![initial_chunk]),
127 chunk_size,
128 total_allocated: AtomicUsize::new(chunk_size),
129 }
130 }
131
132 #[must_use]
134 pub fn epoch(&self) -> EpochId {
135 self.epoch
136 }
137
138 pub fn alloc(&self, size: usize, align: usize) -> NonNull<u8> {
144 {
146 let chunks = self.chunks.read();
147 for chunk in chunks.iter().rev() {
148 if let Some(ptr) = chunk.try_alloc(size, align) {
149 return ptr;
150 }
151 }
152 }
153
154 self.alloc_new_chunk(size, align)
156 }
157
158 pub fn alloc_value<T>(&self, value: T) -> &mut T {
160 let ptr = self.alloc(std::mem::size_of::<T>(), std::mem::align_of::<T>());
161 unsafe {
163 let typed_ptr = ptr.as_ptr() as *mut T;
164 typed_ptr.write(value);
165 &mut *typed_ptr
166 }
167 }
168
169 pub fn alloc_slice<T: Copy>(&self, values: &[T]) -> &mut [T] {
171 if values.is_empty() {
172 return &mut [];
173 }
174
175 let size = std::mem::size_of::<T>() * values.len();
176 let align = std::mem::align_of::<T>();
177 let ptr = self.alloc(size, align);
178
179 unsafe {
181 let typed_ptr = ptr.as_ptr() as *mut T;
182 std::ptr::copy_nonoverlapping(values.as_ptr(), typed_ptr, values.len());
183 std::slice::from_raw_parts_mut(typed_ptr, values.len())
184 }
185 }
186
187 fn alloc_new_chunk(&self, size: usize, align: usize) -> NonNull<u8> {
189 let chunk_size = self.chunk_size.max(size + align);
190 let chunk = Chunk::new(chunk_size);
191
192 self.total_allocated
193 .fetch_add(chunk_size, Ordering::Relaxed);
194
195 let ptr = chunk
196 .try_alloc(size, align)
197 .expect("Fresh chunk should have space");
198
199 let mut chunks = self.chunks.write();
200 chunks.push(chunk);
201
202 ptr
203 }
204
205 #[must_use]
207 pub fn total_allocated(&self) -> usize {
208 self.total_allocated.load(Ordering::Relaxed)
209 }
210
211 #[must_use]
213 pub fn total_used(&self) -> usize {
214 let chunks = self.chunks.read();
215 chunks.iter().map(Chunk::used).sum()
216 }
217
218 #[must_use]
220 pub fn stats(&self) -> ArenaStats {
221 let chunks = self.chunks.read();
222 ArenaStats {
223 epoch: self.epoch,
224 chunk_count: chunks.len(),
225 total_allocated: self.total_allocated.load(Ordering::Relaxed),
226 total_used: chunks.iter().map(Chunk::used).sum(),
227 }
228 }
229}
230
231#[derive(Debug, Clone)]
233pub struct ArenaStats {
234 pub epoch: EpochId,
236 pub chunk_count: usize,
238 pub total_allocated: usize,
240 pub total_used: usize,
242}
243
244pub struct ArenaAllocator {
246 arenas: RwLock<hashbrown::HashMap<EpochId, Arena>>,
248 current_epoch: AtomicUsize,
250 chunk_size: usize,
252}
253
254impl ArenaAllocator {
255 #[must_use]
257 pub fn new() -> Self {
258 Self::with_chunk_size(DEFAULT_CHUNK_SIZE)
259 }
260
261 #[must_use]
263 pub fn with_chunk_size(chunk_size: usize) -> Self {
264 let allocator = Self {
265 arenas: RwLock::new(hashbrown::HashMap::new()),
266 current_epoch: AtomicUsize::new(0),
267 chunk_size,
268 };
269
270 let epoch = EpochId::INITIAL;
272 allocator
273 .arenas
274 .write()
275 .insert(epoch, Arena::with_chunk_size(epoch, chunk_size));
276
277 allocator
278 }
279
280 #[must_use]
282 pub fn current_epoch(&self) -> EpochId {
283 EpochId::new(self.current_epoch.load(Ordering::Acquire) as u64)
284 }
285
286 pub fn new_epoch(&self) -> EpochId {
288 let new_id = self.current_epoch.fetch_add(1, Ordering::AcqRel) as u64 + 1;
289 let epoch = EpochId::new(new_id);
290
291 let arena = Arena::with_chunk_size(epoch, self.chunk_size);
292 self.arenas.write().insert(epoch, arena);
293
294 epoch
295 }
296
297 pub fn arena(&self, epoch: EpochId) -> impl std::ops::Deref<Target = Arena> + '_ {
303 parking_lot::RwLockReadGuard::map(self.arenas.read(), |arenas| {
304 arenas.get(&epoch).expect("Epoch should exist")
305 })
306 }
307
308 pub fn alloc(&self, size: usize, align: usize) -> NonNull<u8> {
310 let epoch = self.current_epoch();
311 let arenas = self.arenas.read();
312 arenas.get(&epoch).expect("Current epoch exists").alloc(size, align)
313 }
314
315 pub fn drop_epoch(&self, epoch: EpochId) {
319 self.arenas.write().remove(&epoch);
320 }
321
322 #[must_use]
324 pub fn total_allocated(&self) -> usize {
325 self.arenas
326 .read()
327 .values()
328 .map(Arena::total_allocated)
329 .sum()
330 }
331}
332
333impl Default for ArenaAllocator {
334 fn default() -> Self {
335 Self::new()
336 }
337}
338
339#[cfg(test)]
340mod tests {
341 use super::*;
342
343 #[test]
344 fn test_arena_basic_allocation() {
345 let arena = Arena::new(EpochId::INITIAL);
346
347 let ptr1 = arena.alloc(100, 8);
349 let ptr2 = arena.alloc(200, 8);
350
351 assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
353 }
354
355 #[test]
356 fn test_arena_value_allocation() {
357 let arena = Arena::new(EpochId::INITIAL);
358
359 let value = arena.alloc_value(42u64);
360 assert_eq!(*value, 42);
361
362 *value = 100;
363 assert_eq!(*value, 100);
364 }
365
366 #[test]
367 fn test_arena_slice_allocation() {
368 let arena = Arena::new(EpochId::INITIAL);
369
370 let slice = arena.alloc_slice(&[1u32, 2, 3, 4, 5]);
371 assert_eq!(slice, &[1, 2, 3, 4, 5]);
372
373 slice[0] = 10;
374 assert_eq!(slice[0], 10);
375 }
376
377 #[test]
378 fn test_arena_large_allocation() {
379 let arena = Arena::with_chunk_size(EpochId::INITIAL, 1024);
380
381 let ptr = arena.alloc(2048, 8);
383 assert!(!ptr.as_ptr().is_null());
384
385 assert!(arena.stats().chunk_count >= 2);
387 }
388
389 #[test]
390 fn test_arena_allocator_epochs() {
391 let allocator = ArenaAllocator::new();
392
393 let epoch0 = allocator.current_epoch();
394 assert_eq!(epoch0, EpochId::INITIAL);
395
396 let epoch1 = allocator.new_epoch();
397 assert_eq!(epoch1, EpochId::new(1));
398
399 let epoch2 = allocator.new_epoch();
400 assert_eq!(epoch2, EpochId::new(2));
401
402 assert_eq!(allocator.current_epoch(), epoch2);
404 }
405
406 #[test]
407 fn test_arena_allocator_allocation() {
408 let allocator = ArenaAllocator::new();
409
410 let ptr1 = allocator.alloc(100, 8);
411 let ptr2 = allocator.alloc(100, 8);
412
413 assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
414 }
415
416 #[test]
417 fn test_arena_drop_epoch() {
418 let allocator = ArenaAllocator::new();
419
420 let initial_mem = allocator.total_allocated();
421
422 let epoch1 = allocator.new_epoch();
423 {
425 let arena = allocator.arena(epoch1);
426 arena.alloc(10000, 8);
427 }
428
429 let after_alloc = allocator.total_allocated();
430 assert!(after_alloc > initial_mem);
431
432 allocator.drop_epoch(epoch1);
434
435 let after_drop = allocator.total_allocated();
437 assert!(after_drop < after_alloc);
438 }
439
440 #[test]
441 fn test_arena_stats() {
442 let arena = Arena::with_chunk_size(EpochId::new(5), 4096);
443
444 let stats = arena.stats();
445 assert_eq!(stats.epoch, EpochId::new(5));
446 assert_eq!(stats.chunk_count, 1);
447 assert_eq!(stats.total_allocated, 4096);
448 assert_eq!(stats.total_used, 0);
449
450 arena.alloc(100, 8);
451 let stats = arena.stats();
452 assert!(stats.total_used >= 100);
453 }
454}