ruvector_core/
arena.rs

1//! Arena allocator for batch operations
2//!
3//! This module provides arena-based memory allocation to reduce allocation
4//! overhead in hot paths and improve memory locality.
5
6use std::alloc::{alloc, dealloc, Layout};
7use std::cell::RefCell;
8use std::ptr;
9
10/// Arena allocator for temporary allocations
11///
12/// Use this for batch operations where many temporary allocations
13/// are needed and can be freed all at once.
14pub struct Arena {
15    chunks: RefCell<Vec<Chunk>>,
16    chunk_size: usize,
17}
18
19struct Chunk {
20    data: *mut u8,
21    capacity: usize,
22    used: usize,
23}
24
25impl Arena {
26    /// Create a new arena with the specified chunk size
27    pub fn new(chunk_size: usize) -> Self {
28        Self {
29            chunks: RefCell::new(Vec::new()),
30            chunk_size,
31        }
32    }
33
34    /// Create an arena with a default 1MB chunk size
35    pub fn with_default_chunk_size() -> Self {
36        Self::new(1024 * 1024) // 1MB
37    }
38
39    /// Allocate a buffer of the specified size
40    pub fn alloc_vec<T>(&self, count: usize) -> ArenaVec<T> {
41        let size = count * std::mem::size_of::<T>();
42        let align = std::mem::align_of::<T>();
43
44        let ptr = self.alloc_raw(size, align);
45
46        ArenaVec {
47            ptr: ptr as *mut T,
48            len: 0,
49            capacity: count,
50            _phantom: std::marker::PhantomData,
51        }
52    }
53
54    /// Allocate raw bytes with specified alignment
55    fn alloc_raw(&self, size: usize, align: usize) -> *mut u8 {
56        // SECURITY: Validate alignment is a power of 2 and size is reasonable
57        assert!(align > 0 && align.is_power_of_two(), "Alignment must be a power of 2");
58        assert!(size > 0, "Cannot allocate zero bytes");
59        assert!(size <= isize::MAX as usize, "Allocation size too large");
60
61        let mut chunks = self.chunks.borrow_mut();
62
63        // Try to allocate from the last chunk
64        if let Some(chunk) = chunks.last_mut() {
65            // Align the current position
66            let current = chunk.used;
67            let aligned = (current + align - 1) & !(align - 1);
68
69            // SECURITY: Check for overflow in alignment calculation
70            if aligned < current {
71                panic!("Alignment calculation overflow");
72            }
73
74            let needed = aligned.checked_add(size)
75                .expect("Arena allocation size overflow");
76
77            if needed <= chunk.capacity {
78                chunk.used = needed;
79                return unsafe {
80                    // SECURITY: Verify pointer arithmetic doesn't overflow
81                    let ptr = chunk.data.add(aligned);
82                    debug_assert!(ptr as usize >= chunk.data as usize, "Pointer underflow");
83                    ptr
84                };
85            }
86        }
87
88        // Need a new chunk
89        let chunk_size = self.chunk_size.max(size + align);
90        let layout = Layout::from_size_align(chunk_size, 64).unwrap();
91        let data = unsafe { alloc(layout) };
92
93        let aligned = align;
94        let chunk = Chunk {
95            data,
96            capacity: chunk_size,
97            used: aligned + size,
98        };
99
100        let ptr = unsafe { data.add(aligned) };
101        chunks.push(chunk);
102
103        ptr
104    }
105
106    /// Reset the arena, allowing reuse of allocated memory
107    pub fn reset(&self) {
108        let mut chunks = self.chunks.borrow_mut();
109        for chunk in chunks.iter_mut() {
110            chunk.used = 0;
111        }
112    }
113
114    /// Get total allocated bytes
115    pub fn allocated_bytes(&self) -> usize {
116        let chunks = self.chunks.borrow();
117        chunks.iter().map(|c| c.capacity).sum()
118    }
119
120    /// Get used bytes
121    pub fn used_bytes(&self) -> usize {
122        let chunks = self.chunks.borrow();
123        chunks.iter().map(|c| c.used).sum()
124    }
125}
126
127impl Drop for Arena {
128    fn drop(&mut self) {
129        let chunks = self.chunks.borrow();
130        for chunk in chunks.iter() {
131            let layout = Layout::from_size_align(chunk.capacity, 64).unwrap();
132            unsafe {
133                dealloc(chunk.data, layout);
134            }
135        }
136    }
137}
138
139/// Vector allocated from an arena
140pub struct ArenaVec<T> {
141    ptr: *mut T,
142    len: usize,
143    capacity: usize,
144    _phantom: std::marker::PhantomData<T>,
145}
146
147impl<T> ArenaVec<T> {
148    /// Push an element (panics if capacity exceeded)
149    pub fn push(&mut self, value: T) {
150        // SECURITY: Bounds check before pointer arithmetic
151        assert!(self.len < self.capacity, "ArenaVec capacity exceeded");
152        assert!(!self.ptr.is_null(), "ArenaVec pointer is null");
153
154        unsafe {
155            // Additional safety: verify the pointer offset is within bounds
156            let offset_ptr = self.ptr.add(self.len);
157            debug_assert!(
158                offset_ptr as usize >= self.ptr as usize,
159                "Pointer arithmetic overflow"
160            );
161            ptr::write(offset_ptr, value);
162        }
163        self.len += 1;
164    }
165
166    /// Get length
167    pub fn len(&self) -> usize {
168        self.len
169    }
170
171    /// Check if empty
172    pub fn is_empty(&self) -> bool {
173        self.len == 0
174    }
175
176    /// Get capacity
177    pub fn capacity(&self) -> usize {
178        self.capacity
179    }
180
181    /// Get as slice
182    pub fn as_slice(&self) -> &[T] {
183        // SECURITY: Bounds check before creating slice
184        assert!(self.len <= self.capacity, "Length exceeds capacity");
185        assert!(!self.ptr.is_null(), "Cannot create slice from null pointer");
186
187        unsafe { std::slice::from_raw_parts(self.ptr, self.len) }
188    }
189
190    /// Get as mutable slice
191    pub fn as_mut_slice(&mut self) -> &mut [T] {
192        // SECURITY: Bounds check before creating slice
193        assert!(self.len <= self.capacity, "Length exceeds capacity");
194        assert!(!self.ptr.is_null(), "Cannot create slice from null pointer");
195
196        unsafe { std::slice::from_raw_parts_mut(self.ptr, self.len) }
197    }
198}
199
200impl<T> std::ops::Deref for ArenaVec<T> {
201    type Target = [T];
202
203    fn deref(&self) -> &[T] {
204        self.as_slice()
205    }
206}
207
208impl<T> std::ops::DerefMut for ArenaVec<T> {
209    fn deref_mut(&mut self) -> &mut [T] {
210        self.as_mut_slice()
211    }
212}
213
214/// Thread-local arena for per-thread allocations
215thread_local! {
216    static THREAD_ARENA: RefCell<Arena> = RefCell::new(Arena::with_default_chunk_size());
217}
218
219/// Get the thread-local arena
220/// Note: Commented out due to lifetime issues with RefCell::borrow() escaping closure
221/// Use THREAD_ARENA.with(|arena| { ... }) directly instead
222/*
223pub fn thread_arena() -> impl std::ops::Deref<Target = Arena> {
224    THREAD_ARENA.with(|arena| {
225        arena.borrow()
226    })
227}
228*/
229
230#[cfg(test)]
231mod tests {
232    use super::*;
233
234    #[test]
235    fn test_arena_alloc() {
236        let arena = Arena::new(1024);
237
238        let mut vec1 = arena.alloc_vec::<f32>(10);
239        vec1.push(1.0);
240        vec1.push(2.0);
241        vec1.push(3.0);
242
243        assert_eq!(vec1.len(), 3);
244        assert_eq!(vec1[0], 1.0);
245        assert_eq!(vec1[1], 2.0);
246        assert_eq!(vec1[2], 3.0);
247    }
248
249    #[test]
250    fn test_arena_multiple_allocs() {
251        let arena = Arena::new(1024);
252
253        let vec1 = arena.alloc_vec::<u32>(100);
254        let vec2 = arena.alloc_vec::<u64>(50);
255        let vec3 = arena.alloc_vec::<f32>(200);
256
257        assert_eq!(vec1.capacity(), 100);
258        assert_eq!(vec2.capacity(), 50);
259        assert_eq!(vec3.capacity(), 200);
260    }
261
262    #[test]
263    fn test_arena_reset() {
264        let arena = Arena::new(1024);
265
266        {
267            let _vec1 = arena.alloc_vec::<f32>(100);
268            let _vec2 = arena.alloc_vec::<f32>(100);
269        }
270
271        let used_before = arena.used_bytes();
272        arena.reset();
273        let used_after = arena.used_bytes();
274
275        assert!(used_after < used_before);
276    }
277}