concurrent_avl_tree 0.1.0

Lock-free readable AVL tree with epoch-based reclamation and background batch rebalancing.
Documentation
//! # Generational Memory Arena
//!
//! Double-buffered allocator for versioned tree nodes.
//!
//! ## License & Attribution
//! SPDX-License-Identifier: MIT | Author: Dzulkifli Anwar | Version: 0.1.0 | Date: 2026-04-30

use crate::config::CACHE_LINE_ALIGNMENT;
use crate::error::ConcurrentAVLError;

/// # Generational Arena Allocator
///
/// Fixed-capacity double-buffered allocator with linear allocation strategy.
///
/// ## Description
/// Maintains active and next buffers. Allocation proceeds linearly with mandatory alignment.
/// Reset clears next buffer. Swap exchanges active/next roles atomically from caller perspective.
/// Peak memory usage remains within two times total capacity.
///
/// ## Invariant
/// `active_index ∈ {0, 1} && next_index = 1 - active_index`
/// `allocation_offsets[active_index] <= Capacity`
///
/// ## Thread Safety
/// `allocate` requires external synchronization. `reset` and `swap` require external synchronization.
///
/// ## Performance
/// Zero dynamic allocation after construction. Offsets round up to alignment boundary
/// to guarantee deterministic size invariants.
///
/// ## See Also
/// `crate::builder::BalancedBuilder`
pub struct GenerationalArena<const CAPACITY: usize> {
    buffers: [Vec<u8>; 2],
    allocation_offsets: [usize; 2],
    active_index: usize,
    next_index: usize,
}

impl<const CAPACITY: usize> GenerationalArena<CAPACITY> {
    /// # Construct Arena
    ///
    /// Pre-allocates contiguous storage for both buffers.
    ///
    /// ## Panics
    /// Panics if `CAPACITY == 0`.
    ///
    /// ## Complexity
    /// Time: O(CAPACITY), Space: O(CAPACITY)
    #[must_use]
    pub fn new() -> Self {
        assert!(CAPACITY > 0, "Arena capacity must be positive");
        Self {
            buffers: [vec![0; CAPACITY], vec![0; CAPACITY]],
            allocation_offsets: [0, 0],
            active_index: 0,
            next_index: 1,
        }
    }

    /// # Allocate Contiguous Byte Sequence
    ///
    /// Advances linear allocation pointer in next buffer.
    ///
    /// ## Parameters
    /// - `byte_count`: Requested allocation size. Must be > 0 and ≤ available space.
    ///
    /// ## Returns
    /// Offset index aligned to `CACHE_LINE_ALIGNMENT`, or `Err` if capacity exhausted.
    ///
    /// ## Errors
    /// Returns `ConcurrentAVLError::AllocationExhausted` if next buffer lacks sufficient
    /// contiguous space for `byte_count` rounded to alignment boundary.
    /// Returns `ConcurrentAVLError::InvalidOperation` if `byte_count == 0`.
    ///
    /// ## Thread Safety
    /// Not thread-safe. Caller must serialize invocations.
    ///
    /// ## Complexity
    /// Time: O(1), Space: O(1)
    pub fn allocate(&mut self, byte_count: usize) -> Result<usize, ConcurrentAVLError> {
        if byte_count == 0 {
            return Err(ConcurrentAVLError::InvalidOperation("byte_count must be positive".into()));
        }
        let current_offset = self.allocation_offsets[self.next_index];
        let rounded_bytes = (byte_count + CACHE_LINE_ALIGNMENT - 1) & !(CACHE_LINE_ALIGNMENT - 1);

        if current_offset + rounded_bytes > self.buffers[self.next_index].len() {
            return Err(ConcurrentAVLError::AllocationExhausted);
        }

        let alloc_offset = current_offset;
        self.allocation_offsets[self.next_index] = current_offset + rounded_bytes;
        Ok(alloc_offset)
    }

    /// # Clear Next Buffer
    ///
    /// Resets allocation pointer for next buffer.
    ///
    /// ## Thread Safety
    /// Not thread-safe. Caller must serialize invocations.
    pub fn reset(&mut self) {
        self.allocation_offsets[self.next_index] = 0;
    }

    /// # Exchange Buffer Roles
    ///
    /// Makes next buffer active. Clears previous active offset.
    ///
    /// ## Thread Safety
    /// Not thread-safe. Caller must serialize invocations.
    pub fn swap_buffers(&mut self) {
        std::mem::swap(&mut self.active_index, &mut self.next_index);
        self.allocation_offsets[self.next_index] = 0;
    }

    /// # Retrieve Committed Allocation Byte Count
    ///
    /// Returns the total number of bytes allocated in the active buffer.
    ///
    /// ## Description
    /// Reads the linear allocation offset pointer for the currently active buffer region.
    /// Represents the committed memory footprint available for concurrent traversal.
    /// Caller retains immutable reference to returned value.
    ///
    /// ## Examples
    /// ```
    /// use concurrent_avl_tree::memory::arena::GenerationalArena;
    ///
    /// let mut arena = GenerationalArena::<4096>::new();
    /// assert_eq!(arena.active_buffer_len(), 0);
    /// let _ = arena.allocate(64);
    /// arena.swap_buffers();
    /// assert_eq!(arena.active_buffer_len(), 64);
    /// ```
    ///
    /// ## Parameters
    /// None. Operates on immutable reference `&self`.
    ///
    /// ## Returns
    /// Unsigned integer representing allocated bytes in active buffer. Range: `[0, CAPACITY]`.
    ///
    /// ## Panics
    /// Never. Returns computed offset directly from state array.
    ///
    /// ## Errors
    /// None. Operation infallible.
    ///
    /// ## Complexity
    /// Time: O(1), Space: O(1)
    ///
    /// ## Thread Safety
    /// `&self` method. Thread-safe for concurrent reads after publication.
    ///
    /// ## Warning
    /// Value reflects state at invocation time. Concurrent swaps may invalidate correlation
    /// with previously allocated pointers if caller does not synchronize access.
    ///
    /// ## See Also
    /// `GenerationalArena::swap_buffers`, `GenerationalArena::allocate`
    pub fn active_buffer_len(&self) -> usize {
        self.allocation_offsets[self.active_index]
    }
}

impl<const CAPACITY: usize> Default for GenerationalArena<CAPACITY> {
    fn default() -> Self {
        Self::new()
    }
}