use crate::config::CACHE_LINE_ALIGNMENT;
use crate::error::ConcurrentAVLError;
pub struct GenerationalArena<const CAPACITY: usize> {
buffers: [Vec<u8>; 2],
allocation_offsets: [usize; 2],
active_index: usize,
next_index: usize,
}
impl<const CAPACITY: usize> GenerationalArena<CAPACITY> {
#[must_use]
pub fn new() -> Self {
assert!(CAPACITY > 0, "Arena capacity must be positive");
Self {
buffers: [vec![0; CAPACITY], vec![0; CAPACITY]],
allocation_offsets: [0, 0],
active_index: 0,
next_index: 1,
}
}
pub fn allocate(&mut self, byte_count: usize) -> Result<usize, ConcurrentAVLError> {
if byte_count == 0 {
return Err(ConcurrentAVLError::InvalidOperation("byte_count must be positive".into()));
}
let current_offset = self.allocation_offsets[self.next_index];
let rounded_bytes = (byte_count + CACHE_LINE_ALIGNMENT - 1) & !(CACHE_LINE_ALIGNMENT - 1);
if current_offset + rounded_bytes > self.buffers[self.next_index].len() {
return Err(ConcurrentAVLError::AllocationExhausted);
}
let alloc_offset = current_offset;
self.allocation_offsets[self.next_index] = current_offset + rounded_bytes;
Ok(alloc_offset)
}
pub fn reset(&mut self) {
self.allocation_offsets[self.next_index] = 0;
}
pub fn swap_buffers(&mut self) {
std::mem::swap(&mut self.active_index, &mut self.next_index);
self.allocation_offsets[self.next_index] = 0;
}
pub fn active_buffer_len(&self) -> usize {
self.allocation_offsets[self.active_index]
}
}
impl<const CAPACITY: usize> Default for GenerationalArena<CAPACITY> {
fn default() -> Self {
Self::new()
}
}