1#![allow(unsafe_code)]
12
13use std::alloc::{Layout, alloc, dealloc};
14use std::ptr::NonNull;
15use std::sync::atomic::{AtomicUsize, Ordering};
16
17use parking_lot::RwLock;
18
19use crate::types::EpochId;
20
21const DEFAULT_CHUNK_SIZE: usize = 1024 * 1024;
23
24struct Chunk {
26 ptr: NonNull<u8>,
28 capacity: usize,
30 offset: AtomicUsize,
32}
33
34impl Chunk {
35 fn new(capacity: usize) -> Self {
37 let layout = Layout::from_size_align(capacity, 16).expect("Invalid layout");
38 let ptr = unsafe { alloc(layout) };
40 let ptr = NonNull::new(ptr).expect("Allocation failed");
41
42 Self {
43 ptr,
44 capacity,
45 offset: AtomicUsize::new(0),
46 }
47 }
48
49 fn try_alloc(&self, size: usize, align: usize) -> Option<NonNull<u8>> {
52 loop {
53 let current = self.offset.load(Ordering::Relaxed);
54
55 let aligned = (current + align - 1) & !(align - 1);
57 let new_offset = aligned + size;
58
59 if new_offset > self.capacity {
60 return None;
61 }
62
63 match self.offset.compare_exchange_weak(
65 current,
66 new_offset,
67 Ordering::AcqRel,
68 Ordering::Relaxed,
69 ) {
70 Ok(_) => {
71 let ptr = unsafe { self.ptr.as_ptr().add(aligned) };
73 return NonNull::new(ptr);
74 }
75 Err(_) => continue, }
77 }
78 }
79
80 fn used(&self) -> usize {
82 self.offset.load(Ordering::Relaxed)
83 }
84
85 #[allow(dead_code)]
87 fn remaining(&self) -> usize {
88 self.capacity - self.used()
89 }
90}
91
92impl Drop for Chunk {
93 fn drop(&mut self) {
94 let layout = Layout::from_size_align(self.capacity, 16).expect("Invalid layout");
95 unsafe { dealloc(self.ptr.as_ptr(), layout) };
97 }
98}
99
100unsafe impl Send for Chunk {}
102unsafe impl Sync for Chunk {}
103
104pub struct Arena {
112 epoch: EpochId,
114 chunks: RwLock<Vec<Chunk>>,
116 chunk_size: usize,
118 total_allocated: AtomicUsize,
120}
121
122impl Arena {
123 #[must_use]
125 pub fn new(epoch: EpochId) -> Self {
126 Self::with_chunk_size(epoch, DEFAULT_CHUNK_SIZE)
127 }
128
129 #[must_use]
131 pub fn with_chunk_size(epoch: EpochId, chunk_size: usize) -> Self {
132 let initial_chunk = Chunk::new(chunk_size);
133 Self {
134 epoch,
135 chunks: RwLock::new(vec![initial_chunk]),
136 chunk_size,
137 total_allocated: AtomicUsize::new(chunk_size),
138 }
139 }
140
141 #[must_use]
143 pub fn epoch(&self) -> EpochId {
144 self.epoch
145 }
146
147 pub fn alloc(&self, size: usize, align: usize) -> NonNull<u8> {
153 {
155 let chunks = self.chunks.read();
156 for chunk in chunks.iter().rev() {
157 if let Some(ptr) = chunk.try_alloc(size, align) {
158 return ptr;
159 }
160 }
161 }
162
163 self.alloc_new_chunk(size, align)
165 }
166
167 pub fn alloc_value<T>(&self, value: T) -> &mut T {
169 let ptr = self.alloc(std::mem::size_of::<T>(), std::mem::align_of::<T>());
170 unsafe {
172 let typed_ptr = ptr.as_ptr() as *mut T;
173 typed_ptr.write(value);
174 &mut *typed_ptr
175 }
176 }
177
178 pub fn alloc_slice<T: Copy>(&self, values: &[T]) -> &mut [T] {
180 if values.is_empty() {
181 return &mut [];
182 }
183
184 let size = std::mem::size_of::<T>() * values.len();
185 let align = std::mem::align_of::<T>();
186 let ptr = self.alloc(size, align);
187
188 unsafe {
190 let typed_ptr = ptr.as_ptr() as *mut T;
191 std::ptr::copy_nonoverlapping(values.as_ptr(), typed_ptr, values.len());
192 std::slice::from_raw_parts_mut(typed_ptr, values.len())
193 }
194 }
195
196 fn alloc_new_chunk(&self, size: usize, align: usize) -> NonNull<u8> {
198 let chunk_size = self.chunk_size.max(size + align);
199 let chunk = Chunk::new(chunk_size);
200
201 self.total_allocated
202 .fetch_add(chunk_size, Ordering::Relaxed);
203
204 let ptr = chunk
205 .try_alloc(size, align)
206 .expect("Fresh chunk should have space");
207
208 let mut chunks = self.chunks.write();
209 chunks.push(chunk);
210
211 ptr
212 }
213
214 #[must_use]
216 pub fn total_allocated(&self) -> usize {
217 self.total_allocated.load(Ordering::Relaxed)
218 }
219
220 #[must_use]
222 pub fn total_used(&self) -> usize {
223 let chunks = self.chunks.read();
224 chunks.iter().map(Chunk::used).sum()
225 }
226
227 #[must_use]
229 pub fn stats(&self) -> ArenaStats {
230 let chunks = self.chunks.read();
231 ArenaStats {
232 epoch: self.epoch,
233 chunk_count: chunks.len(),
234 total_allocated: self.total_allocated.load(Ordering::Relaxed),
235 total_used: chunks.iter().map(Chunk::used).sum(),
236 }
237 }
238}
239
240#[derive(Debug, Clone)]
242pub struct ArenaStats {
243 pub epoch: EpochId,
245 pub chunk_count: usize,
247 pub total_allocated: usize,
249 pub total_used: usize,
251}
252
253pub struct ArenaAllocator {
258 arenas: RwLock<hashbrown::HashMap<EpochId, Arena>>,
260 current_epoch: AtomicUsize,
262 chunk_size: usize,
264}
265
266impl ArenaAllocator {
267 #[must_use]
269 pub fn new() -> Self {
270 Self::with_chunk_size(DEFAULT_CHUNK_SIZE)
271 }
272
273 #[must_use]
275 pub fn with_chunk_size(chunk_size: usize) -> Self {
276 let allocator = Self {
277 arenas: RwLock::new(hashbrown::HashMap::new()),
278 current_epoch: AtomicUsize::new(0),
279 chunk_size,
280 };
281
282 let epoch = EpochId::INITIAL;
284 allocator
285 .arenas
286 .write()
287 .insert(epoch, Arena::with_chunk_size(epoch, chunk_size));
288
289 allocator
290 }
291
292 #[must_use]
294 pub fn current_epoch(&self) -> EpochId {
295 EpochId::new(self.current_epoch.load(Ordering::Acquire) as u64)
296 }
297
298 pub fn new_epoch(&self) -> EpochId {
300 let new_id = self.current_epoch.fetch_add(1, Ordering::AcqRel) as u64 + 1;
301 let epoch = EpochId::new(new_id);
302
303 let arena = Arena::with_chunk_size(epoch, self.chunk_size);
304 self.arenas.write().insert(epoch, arena);
305
306 epoch
307 }
308
309 pub fn arena(&self, epoch: EpochId) -> impl std::ops::Deref<Target = Arena> + '_ {
315 parking_lot::RwLockReadGuard::map(self.arenas.read(), |arenas| {
316 arenas.get(&epoch).expect("Epoch should exist")
317 })
318 }
319
320 pub fn alloc(&self, size: usize, align: usize) -> NonNull<u8> {
322 let epoch = self.current_epoch();
323 let arenas = self.arenas.read();
324 arenas
325 .get(&epoch)
326 .expect("Current epoch exists")
327 .alloc(size, align)
328 }
329
330 pub fn drop_epoch(&self, epoch: EpochId) {
334 self.arenas.write().remove(&epoch);
335 }
336
337 #[must_use]
339 pub fn total_allocated(&self) -> usize {
340 self.arenas
341 .read()
342 .values()
343 .map(Arena::total_allocated)
344 .sum()
345 }
346}
347
348impl Default for ArenaAllocator {
349 fn default() -> Self {
350 Self::new()
351 }
352}
353
354#[cfg(test)]
355mod tests {
356 use super::*;
357
358 #[test]
359 fn test_arena_basic_allocation() {
360 let arena = Arena::new(EpochId::INITIAL);
361
362 let ptr1 = arena.alloc(100, 8);
364 let ptr2 = arena.alloc(200, 8);
365
366 assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
368 }
369
370 #[test]
371 fn test_arena_value_allocation() {
372 let arena = Arena::new(EpochId::INITIAL);
373
374 let value = arena.alloc_value(42u64);
375 assert_eq!(*value, 42);
376
377 *value = 100;
378 assert_eq!(*value, 100);
379 }
380
381 #[test]
382 fn test_arena_slice_allocation() {
383 let arena = Arena::new(EpochId::INITIAL);
384
385 let slice = arena.alloc_slice(&[1u32, 2, 3, 4, 5]);
386 assert_eq!(slice, &[1, 2, 3, 4, 5]);
387
388 slice[0] = 10;
389 assert_eq!(slice[0], 10);
390 }
391
392 #[test]
393 fn test_arena_large_allocation() {
394 let arena = Arena::with_chunk_size(EpochId::INITIAL, 1024);
395
396 let _ptr = arena.alloc(2048, 8);
398
399 assert!(arena.stats().chunk_count >= 2);
401 }
402
403 #[test]
404 fn test_arena_allocator_epochs() {
405 let allocator = ArenaAllocator::new();
406
407 let epoch0 = allocator.current_epoch();
408 assert_eq!(epoch0, EpochId::INITIAL);
409
410 let epoch1 = allocator.new_epoch();
411 assert_eq!(epoch1, EpochId::new(1));
412
413 let epoch2 = allocator.new_epoch();
414 assert_eq!(epoch2, EpochId::new(2));
415
416 assert_eq!(allocator.current_epoch(), epoch2);
418 }
419
420 #[test]
421 fn test_arena_allocator_allocation() {
422 let allocator = ArenaAllocator::new();
423
424 let ptr1 = allocator.alloc(100, 8);
425 let ptr2 = allocator.alloc(100, 8);
426
427 assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
428 }
429
430 #[test]
431 fn test_arena_drop_epoch() {
432 let allocator = ArenaAllocator::new();
433
434 let initial_mem = allocator.total_allocated();
435
436 let epoch1 = allocator.new_epoch();
437 {
439 let arena = allocator.arena(epoch1);
440 arena.alloc(10000, 8);
441 }
442
443 let after_alloc = allocator.total_allocated();
444 assert!(after_alloc > initial_mem);
445
446 allocator.drop_epoch(epoch1);
448
449 let after_drop = allocator.total_allocated();
451 assert!(after_drop < after_alloc);
452 }
453
454 #[test]
455 fn test_arena_stats() {
456 let arena = Arena::with_chunk_size(EpochId::new(5), 4096);
457
458 let stats = arena.stats();
459 assert_eq!(stats.epoch, EpochId::new(5));
460 assert_eq!(stats.chunk_count, 1);
461 assert_eq!(stats.total_allocated, 4096);
462 assert_eq!(stats.total_used, 0);
463
464 arena.alloc(100, 8);
465 let stats = arena.stats();
466 assert!(stats.total_used >= 100);
467 }
468}