Skip to main content

oxigdal_core/memory/
arena.rs

1//! Arena Allocators for Batch Operations
2//!
3//! This module provides arena allocators optimized for temporary batch operations:
4//! - Bump allocator for fast sequential allocation
5//! - Arena pooling and reuse
6//! - Automatic arena reset
7//! - Per-thread arenas
8//! - Arena statistics
9
10// Arena allocators intentionally return mutable refs from shared self
11#![allow(clippy::mut_from_ref)]
12// Default impl uses expect() for configuration errors - acceptable here
13#![allow(clippy::expect_used)]
14// Unsafe code is necessary for arena allocators
15#![allow(unsafe_code)]
16
17use crate::error::{OxiGdalError, Result};
18use parking_lot::Mutex;
19use std::alloc::{Layout, alloc, dealloc};
20use std::cell::RefCell;
21use std::ptr::NonNull;
22use std::sync::Arc;
23use std::sync::atomic::{AtomicU64, AtomicUsize, Ordering};
24
25/// Default arena size (1MB)
26pub const DEFAULT_ARENA_SIZE: usize = 1024 * 1024;
27
28/// Arena statistics
29#[derive(Debug, Default)]
30pub struct ArenaStats {
31    /// Total allocations
32    pub total_allocations: AtomicU64,
33    /// Total bytes allocated
34    pub bytes_allocated: AtomicUsize,
35    /// Number of arena resets
36    pub resets: AtomicU64,
37    /// Peak memory usage
38    pub peak_usage: AtomicUsize,
39}
40
41impl ArenaStats {
42    /// Create new statistics
43    #[must_use]
44    pub fn new() -> Self {
45        Self::default()
46    }
47
48    /// Record an allocation
49    pub fn record_allocation(&self, size: usize) {
50        self.total_allocations.fetch_add(1, Ordering::Relaxed);
51        let prev = self.bytes_allocated.fetch_add(size, Ordering::Relaxed);
52        let new_allocated = prev.saturating_add(size);
53
54        // Update peak
55        let mut peak = self.peak_usage.load(Ordering::Relaxed);
56        while new_allocated > peak {
57            match self.peak_usage.compare_exchange_weak(
58                peak,
59                new_allocated,
60                Ordering::Relaxed,
61                Ordering::Relaxed,
62            ) {
63                Ok(_) => break,
64                Err(x) => peak = x,
65            }
66        }
67    }
68
69    /// Record a reset
70    pub fn record_reset(&self, bytes_freed: usize) {
71        self.resets.fetch_add(1, Ordering::Relaxed);
72        self.bytes_allocated
73            .fetch_sub(bytes_freed, Ordering::Relaxed);
74    }
75
76    /// Get allocation rate
77    pub fn allocation_rate(&self) -> f64 {
78        let total = self.total_allocations.load(Ordering::Relaxed);
79        let peak = self.peak_usage.load(Ordering::Relaxed);
80        if peak == 0 {
81            0.0
82        } else {
83            total as f64 / peak as f64
84        }
85    }
86}
87
88/// Bump allocator arena
89pub struct Arena {
90    /// Base pointer
91    base: NonNull<u8>,
92    /// Current offset
93    offset: AtomicUsize,
94    /// Total capacity
95    capacity: usize,
96    /// Statistics
97    stats: Arc<ArenaStats>,
98    /// Alignment
99    alignment: usize,
100}
101
102impl Arena {
103    /// Create a new arena with default size
104    pub fn new() -> Result<Self> {
105        Self::with_capacity(DEFAULT_ARENA_SIZE)
106    }
107
108    /// Create a new arena with specified capacity
109    pub fn with_capacity(capacity: usize) -> Result<Self> {
110        Self::with_capacity_and_alignment(capacity, 16)
111    }
112
113    /// Create a new arena with specified capacity and alignment
114    pub fn with_capacity_and_alignment(capacity: usize, alignment: usize) -> Result<Self> {
115        if capacity == 0 {
116            return Err(OxiGdalError::invalid_parameter(
117                "parameter",
118                "Arena capacity must be non-zero".to_string(),
119            ));
120        }
121
122        if !alignment.is_power_of_two() {
123            return Err(OxiGdalError::invalid_parameter(
124                "parameter",
125                "Alignment must be a power of 2".to_string(),
126            ));
127        }
128
129        let layout = Layout::from_size_align(capacity, alignment)
130            .map_err(|e| OxiGdalError::allocation_error(e.to_string()))?;
131
132        // SAFETY: Layout is valid and we check for null before creating NonNull.
133        // The pointer is properly aligned according to the layout.
134        let base = unsafe {
135            let ptr = alloc(layout);
136            if ptr.is_null() {
137                return Err(OxiGdalError::allocation_error(
138                    "Failed to allocate arena".to_string(),
139                ));
140            }
141            NonNull::new_unchecked(ptr)
142        };
143
144        Ok(Self {
145            base,
146            offset: AtomicUsize::new(0),
147            capacity,
148            stats: Arc::new(ArenaStats::new()),
149            alignment,
150        })
151    }
152
153    /// Allocate memory from the arena
154    pub fn allocate(&self, size: usize) -> Result<NonNull<u8>> {
155        self.allocate_aligned(size, self.alignment)
156    }
157
158    /// Allocate aligned memory from the arena
159    pub fn allocate_aligned(&self, size: usize, alignment: usize) -> Result<NonNull<u8>> {
160        if size == 0 {
161            return Err(OxiGdalError::invalid_parameter(
162                "parameter",
163                "Allocation size must be non-zero".to_string(),
164            ));
165        }
166
167        // Align the current offset
168        let mut current = self.offset.load(Ordering::Relaxed);
169        loop {
170            let aligned = (current + alignment - 1) & !(alignment - 1);
171            let new_offset = aligned + size;
172
173            if new_offset > self.capacity {
174                return Err(OxiGdalError::allocation_error(format!(
175                    "Arena exhausted: requested {}, available {}",
176                    size,
177                    self.capacity - current
178                )));
179            }
180
181            match self.offset.compare_exchange_weak(
182                current,
183                new_offset,
184                Ordering::Relaxed,
185                Ordering::Relaxed,
186            ) {
187                Ok(_) => {
188                    self.stats.record_allocation(size);
189                    // SAFETY: Pointer arithmetic is within allocated bounds.
190                    // aligned is computed to be within capacity.
191                    let ptr = unsafe { NonNull::new_unchecked(self.base.as_ptr().add(aligned)) };
192                    return Ok(ptr);
193                }
194                Err(x) => current = x,
195            }
196        }
197    }
198
199    /// Reset the arena (invalidates all previous allocations)
200    pub fn reset(&self) {
201        let freed = self.offset.swap(0, Ordering::Relaxed);
202        self.stats.record_reset(freed);
203    }
204
205    /// Get current usage
206    pub fn usage(&self) -> usize {
207        self.offset.load(Ordering::Relaxed)
208    }
209
210    /// Get capacity
211    pub fn capacity(&self) -> usize {
212        self.capacity
213    }
214
215    /// Get available space
216    pub fn available(&self) -> usize {
217        self.capacity.saturating_sub(self.usage())
218    }
219
220    /// Check if arena is exhausted
221    pub fn is_exhausted(&self) -> bool {
222        self.available() == 0
223    }
224
225    /// Get statistics
226    pub fn stats(&self) -> Arc<ArenaStats> {
227        Arc::clone(&self.stats)
228    }
229
230    /// Allocate a slice of values
231    pub fn allocate_slice<T>(&self, count: usize) -> Result<&mut [T]> {
232        let size = count * std::mem::size_of::<T>();
233        let alignment = std::mem::align_of::<T>();
234        let ptr = self.allocate_aligned(size, alignment)?;
235
236        // SAFETY: ptr is valid and properly aligned. count has been validated
237        // to fit within the allocated arena space.
238        Ok(unsafe { std::slice::from_raw_parts_mut(ptr.as_ptr().cast::<T>(), count) })
239    }
240
241    /// Allocate and initialize a value
242    pub fn allocate_value<T>(&self, value: T) -> Result<&mut T> {
243        let size = std::mem::size_of::<T>();
244        let alignment = std::mem::align_of::<T>();
245        let ptr = self.allocate_aligned(size, alignment)?;
246
247        // SAFETY: ptr is valid, aligned, and we have exclusive access.
248        // T's Drop implementation will be called automatically.
249        unsafe {
250            let typed_ptr = ptr.as_ptr().cast::<T>();
251            std::ptr::write(typed_ptr, value);
252            Ok(&mut *typed_ptr)
253        }
254    }
255}
256
257impl Drop for Arena {
258    fn drop(&mut self) {
259        // SAFETY: Layout matches the one used during allocation.
260        // We have exclusive ownership and this is the last use of the pointer.
261        unsafe {
262            let layout = Layout::from_size_align_unchecked(self.capacity, self.alignment);
263            dealloc(self.base.as_ptr(), layout);
264        }
265    }
266}
267
268// SAFETY: Arena can be sent between threads because:
269// - base pointer is owned and uniquely accessed
270// - AtomicUsize provides thread-safe offset management
271unsafe impl Send for Arena {}
272// SAFETY: Arena can be shared between threads because:
273// - All operations use atomic operations for synchronization
274// - Memory allocation is internally synchronized
275unsafe impl Sync for Arena {}
276
277/// Arena pool for reusing arenas
278pub struct ArenaPool {
279    /// Available arenas
280    available: Mutex<Vec<Arena>>,
281    /// Arena capacity
282    capacity: usize,
283    /// Maximum pool size
284    max_pool_size: usize,
285    /// Statistics
286    stats: Arc<ArenaStats>,
287}
288
289impl ArenaPool {
290    /// Create a new arena pool
291    #[must_use]
292    pub fn new(capacity: usize, max_pool_size: usize) -> Self {
293        Self {
294            available: Mutex::new(Vec::new()),
295            capacity,
296            max_pool_size,
297            stats: Arc::new(ArenaStats::new()),
298        }
299    }
300
301    /// Create with default settings
302    #[must_use]
303    pub fn with_defaults() -> Self {
304        Self::new(DEFAULT_ARENA_SIZE, 16)
305    }
306
307    /// Acquire an arena from the pool
308    pub fn acquire(&self) -> Result<Arena> {
309        let mut available = self.available.lock();
310
311        if let Some(arena) = available.pop() {
312            arena.reset();
313            Ok(arena)
314        } else {
315            drop(available);
316            Arena::with_capacity(self.capacity)
317        }
318    }
319
320    /// Return an arena to the pool
321    pub fn release(&self, arena: Arena) {
322        let mut available = self.available.lock();
323
324        if available.len() < self.max_pool_size {
325            available.push(arena);
326        }
327        // Otherwise, arena is dropped
328    }
329
330    /// Get pool statistics
331    pub fn stats(&self) -> Arc<ArenaStats> {
332        Arc::clone(&self.stats)
333    }
334
335    /// Get current pool size
336    pub fn pool_size(&self) -> usize {
337        self.available.lock().len()
338    }
339
340    /// Clear the pool
341    pub fn clear(&self) {
342        self.available.lock().clear();
343    }
344}
345
346impl Default for ArenaPool {
347    fn default() -> Self {
348        Self::with_defaults()
349    }
350}
351
352// Thread-local arena
353thread_local! {
354    static THREAD_ARENA: RefCell<Option<Arena>> = const { RefCell::new(None) };
355}
356
357/// Get or create a thread-local arena
358pub fn get_thread_arena() -> Result<Arena> {
359    THREAD_ARENA.with(|arena| {
360        let mut arena_ref = arena.borrow_mut();
361        if arena_ref.is_none() {
362            *arena_ref = Some(Arena::new()?);
363        }
364        Arena::new()
365    })
366}
367
368/// Reset thread-local arena
369pub fn reset_thread_arena() -> Result<()> {
370    THREAD_ARENA.with(|arena| {
371        if let Some(arena) = arena.borrow().as_ref() {
372            arena.reset();
373        }
374        Ok(())
375    })
376}
377
378/// RAII guard for automatic arena reset
379pub struct ArenaGuard<'a> {
380    arena: &'a Arena,
381    saved_offset: usize,
382}
383
384impl<'a> ArenaGuard<'a> {
385    /// Create a new arena guard
386    pub fn new(arena: &'a Arena) -> Self {
387        let saved_offset = arena.offset.load(Ordering::Relaxed);
388        Self {
389            arena,
390            saved_offset,
391        }
392    }
393
394    /// Get the arena
395    #[must_use]
396    pub fn arena(&self) -> &Arena {
397        self.arena
398    }
399}
400
401impl Drop for ArenaGuard<'_> {
402    fn drop(&mut self) {
403        // Restore the arena to the state it was in when the guard was created
404        self.arena
405            .offset
406            .store(self.saved_offset, Ordering::Relaxed);
407    }
408}
409
410#[cfg(test)]
411#[allow(useless_ptr_null_checks)]
412mod tests {
413    use super::*;
414
415    #[test]
416    fn test_arena_basic() {
417        // Use alignment of 1 to avoid padding in this test
418        let arena = Arena::with_capacity_and_alignment(1024, 1)
419            .expect("Test setup failed: arena creation with custom alignment");
420
421        let ptr1 = arena
422            .allocate(100)
423            .expect("Arena allocation should succeed in test");
424        let ptr2 = arena
425            .allocate(200)
426            .expect("Arena allocation should succeed in test");
427
428        assert!(!ptr1.as_ptr().is_null());
429        assert!(!ptr2.as_ptr().is_null());
430        assert_ne!(ptr1, ptr2);
431
432        assert_eq!(arena.usage(), 300);
433        assert_eq!(arena.available(), 724);
434    }
435
436    #[test]
437    fn test_arena_reset() {
438        let arena = Arena::with_capacity(1024).expect("Test setup failed: arena creation");
439
440        arena
441            .allocate(500)
442            .expect("Arena allocation should succeed in test");
443        assert_eq!(arena.usage(), 500);
444
445        arena.reset();
446        assert_eq!(arena.usage(), 0);
447
448        arena
449            .allocate(300)
450            .expect("Arena allocation should succeed after reset");
451        assert_eq!(arena.usage(), 300);
452    }
453
454    #[test]
455    fn test_arena_exhaustion() {
456        let arena = Arena::with_capacity(100).expect("Test setup failed: arena creation");
457
458        arena.allocate(50).expect("First allocation should succeed");
459        arena
460            .allocate(30)
461            .expect("Second allocation should succeed");
462
463        let result = arena.allocate(50);
464        assert!(result.is_err());
465    }
466
467    #[test]
468    fn test_arena_pool() {
469        let pool = ArenaPool::new(1024, 4);
470
471        let arena1 = pool.acquire().expect("Pool should acquire first arena");
472        let arena2 = pool.acquire().expect("Pool should acquire second arena");
473
474        assert_eq!(pool.pool_size(), 0);
475
476        pool.release(arena1);
477        pool.release(arena2);
478
479        assert_eq!(pool.pool_size(), 2);
480    }
481
482    #[test]
483    fn test_arena_slice() {
484        let arena = Arena::with_capacity(1024).expect("Test setup failed: arena creation");
485
486        let slice: &mut [u32] = arena
487            .allocate_slice(10)
488            .expect("Arena slice allocation should succeed");
489        assert_eq!(slice.len(), 10);
490
491        slice[0] = 42;
492        assert_eq!(slice[0], 42);
493    }
494
495    #[test]
496    fn test_arena_value() {
497        let arena = Arena::with_capacity(1024).expect("Test setup failed: arena creation");
498
499        let value = arena
500            .allocate_value(42u32)
501            .expect("Arena value allocation should succeed");
502        assert_eq!(*value, 42);
503
504        *value = 100;
505        assert_eq!(*value, 100);
506    }
507
508    #[test]
509    fn test_arena_guard() {
510        // Use alignment of 1 to avoid padding in this test
511        let arena = Arena::with_capacity_and_alignment(1024, 1)
512            .expect("Test setup failed: arena creation with custom alignment");
513        arena
514            .allocate(100)
515            .expect("Initial allocation should succeed");
516        assert_eq!(arena.usage(), 100);
517
518        {
519            let _guard = ArenaGuard::new(&arena);
520            arena
521                .allocate(200)
522                .expect("Allocation within guard should succeed");
523            assert_eq!(arena.usage(), 300);
524        }
525
526        // Guard dropped, arena reset to state before guard
527        assert_eq!(arena.usage(), 100);
528    }
529}