use crate::backend::native::NativeBackendError;
use crate::backend::native::NativeResult;
use crate::backend::native::v3::constants::{DEFAULT_PAGE_SIZE, V3_HEADER_SIZE};
#[cfg(feature = "v3-forensics")]
use crate::backend::native::v3::forensics::FORENSIC_COUNTERS;
use crate::backend::native::v3::header::PersistentHeaderV3;
pub const PAGE_SIZE: u64 = DEFAULT_PAGE_SIZE;
const INITIAL_BITMAP_PAGES: usize = 1024;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PageState {
Free,
Allocated,
Pinned,
}
#[derive(Clone)]
pub struct PageAllocator {
bitmap: Vec<bool>,
free_list: Vec<u64>,
total_pages: u64,
}
impl PageAllocator {
pub fn new(header: &PersistentHeaderV3) -> Self {
let mut bitmap = Vec::with_capacity(INITIAL_BITMAP_PAGES);
bitmap.push(true);
bitmap.push(true);
Self {
bitmap,
free_list: Vec::new(),
total_pages: header.total_pages,
}
}
pub fn allocate(&mut self) -> NativeResult<u64> {
#[cfg(feature = "v3-forensics")]
FORENSIC_COUNTERS
.page_allocate_count
.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if let Some(page_id) = self.free_list.pop() {
let page_idx = page_id as usize;
if page_idx < self.bitmap.len() {
self.bitmap[page_idx] = true;
}
return Ok(page_id);
}
let new_page_id = if self.total_pages < 2 {
2
} else {
self.total_pages
};
if new_page_id as usize >= self.bitmap.len() {
self.bitmap.resize((new_page_id as usize) + 1024, false);
}
self.bitmap[new_page_id as usize] = true;
self.total_pages = new_page_id + 1;
Ok(new_page_id)
}
pub fn deallocate(&mut self, page_id: u64) -> NativeResult<()> {
if page_id == 0 {
return Err(NativeBackendError::InvalidHeader {
field: "page_id".to_string(),
reason: "Cannot free header page (page 0)".to_string(),
});
}
let page_idx = page_id as usize;
if page_idx >= self.bitmap.len() {
self.bitmap.resize(page_idx + 1, false);
}
if !self.bitmap[page_idx] {
return Err(NativeBackendError::CorruptionDetected {
context: format!("Double-free detected for page {}", page_id),
source: None,
});
}
self.bitmap[page_idx] = false;
self.free_list.push(page_id);
Ok(())
}
pub fn get_page_state(&self, page_id: u64) -> NativeResult<PageState> {
if page_id == 0 {
return Ok(PageState::Allocated);
}
let page_idx = page_id as usize;
if page_idx >= self.total_pages as usize {
return Err(NativeBackendError::InvalidHeader {
field: "page_id".to_string(),
reason: format!("Page {} exceeds max pages {}", page_id, self.total_pages),
});
}
if page_idx >= self.bitmap.len() {
return Ok(PageState::Free);
}
let state = if self.bitmap[page_idx] {
PageState::Allocated
} else {
PageState::Free
};
Ok(state)
}
pub fn pin_page(&mut self, page_id: u64) -> NativeResult<()> {
let state = self.get_page_state(page_id)?;
if state == PageState::Free {
return Err(NativeBackendError::InvalidHeader {
field: "page_state".to_string(),
reason: format!("Cannot pin free page {}", page_id),
});
}
Ok(())
}
pub fn unpin_page(&mut self, page_id: u64) -> NativeResult<()> {
let _state = self.get_page_state(page_id)?;
Ok(())
}
pub fn stats(&self) -> (u64, u64, u64) {
let allocated = self.bitmap.iter().filter(|&&x| x).count() as u64;
let total = self.total_pages;
let on_free_list = self.free_list.len() as u64;
let free = on_free_list;
(allocated, free, total)
}
pub fn free_list_head(&self) -> u64 {
self.free_list.last().copied().unwrap_or(0)
}
pub fn total_pages(&self) -> u64 {
self.total_pages
}
pub fn page_offset(page_id: u64) -> NativeResult<u64> {
if page_id == 0 {
return Ok(0); }
let offset = V3_HEADER_SIZE + (page_id - 1) * PAGE_SIZE;
Ok(offset)
}
pub fn validate_checksum(page_data: &[u8], stored_checksum: u64) -> NativeResult<()> {
let calculated = xor_checksum(page_data);
if calculated != stored_checksum {
return Err(NativeBackendError::InvalidChecksum {
expected: stored_checksum,
found: calculated,
});
}
Ok(())
}
}
pub fn xor_checksum(data: &[u8]) -> u64 {
const SEED: u64 = 0x5A5A5A5A5A5A5A5A;
let mut checksum = SEED;
for (i, &byte) in data.iter().enumerate() {
checksum ^= (byte as u64) ^ (i as u64);
}
checksum
}
#[repr(C)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct FreePageHeader {
pub next_free: u64,
pub checksum: u64,
}
impl FreePageHeader {
pub const SIZE: usize = 16;
pub fn new(next_free: u64) -> Self {
Self {
next_free,
checksum: 0, }
}
pub fn to_bytes(&self) -> [u8; Self::SIZE] {
let mut bytes = [0u8; Self::SIZE];
bytes[0..8].copy_from_slice(&self.next_free.to_le_bytes());
bytes[8..16].copy_from_slice(&self.checksum.to_le_bytes());
bytes
}
pub fn from_bytes(bytes: &[u8]) -> Option<Self> {
if bytes.len() < Self::SIZE {
return None;
}
let next_free = u64::from_le_bytes(bytes[0..8].try_into().ok()?);
let checksum = u64::from_le_bytes(bytes[8..16].try_into().ok()?);
Some(Self {
next_free,
checksum,
})
}
pub fn calculate_checksum(&self) -> u64 {
xor_checksum(&self.next_free.to_le_bytes())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_allocator_initialization() {
let header = PersistentHeaderV3::new_v3();
let allocator = PageAllocator::new(&header);
let (allocated, free, total) = allocator.stats();
assert_eq!(
total, 0,
"New allocator should have 0 total pages from header"
);
assert_eq!(
allocated, 2,
"Pages 0 (header) and 1 (data) should be reserved"
);
assert_eq!(free, 0, "Should have 0 free pages initially (all reserved)");
}
#[test]
fn test_page_offset_calculation() {
assert_eq!(
PageAllocator::page_offset(0).unwrap(),
0,
"Page 0 should be at offset 0"
);
let first_data = V3_HEADER_SIZE;
assert_eq!(
PageAllocator::page_offset(1).unwrap(),
first_data,
"Page 1 should start after header"
);
let second_data = V3_HEADER_SIZE + PAGE_SIZE;
assert_eq!(
PageAllocator::page_offset(2).unwrap(),
second_data,
"Page 2 should start after page 1"
);
}
#[test]
fn test_allocate_new_pages() {
let header = PersistentHeaderV3::new_v3();
let mut allocator = PageAllocator::new(&header);
let page1 = allocator.allocate().unwrap();
assert_eq!(
page1, 2,
"First allocation should be page 2 (pages 0,1 reserved)"
);
let state1 = allocator.get_page_state(page1).unwrap();
assert_eq!(state1, PageState::Allocated);
let page2 = allocator.allocate().unwrap();
assert_eq!(page2, 3, "Second allocation should be page 3");
}
#[test]
fn test_deallocate_pages() {
let header = PersistentHeaderV3::new_v3();
let mut allocator = PageAllocator::new(&header);
let page1 = allocator.allocate().unwrap();
assert_eq!(page1, 2, "First allocation returns page 2");
let page2 = allocator.allocate().unwrap();
assert_eq!(page2, 3, "Second allocation returns page 3");
allocator.deallocate(page2).unwrap();
let state = allocator.get_page_state(page2).unwrap();
assert_eq!(state, PageState::Free);
}
#[test]
fn test_double_free_detection() {
let header = PersistentHeaderV3::new_v3();
let mut allocator = PageAllocator::new(&header);
let page0 = allocator.allocate().unwrap();
assert_eq!(page0, 2);
let page1 = allocator.allocate().unwrap();
assert_eq!(page1, 3);
allocator.deallocate(page1).unwrap();
let result = allocator.deallocate(page1);
assert!(result.is_err(), "Double-free should return error");
}
#[test]
fn test_pin_page() {
let header = PersistentHeaderV3::new_v3();
let mut allocator = PageAllocator::new(&header);
let page = allocator.allocate().unwrap();
allocator.pin_page(page).unwrap();
let state = allocator.get_page_state(page).unwrap();
assert_eq!(state, PageState::Allocated);
}
#[test]
fn test_checksum_validation() {
let data = b"test page data";
let checksum = xor_checksum(data);
assert!(PageAllocator::validate_checksum(data, checksum).is_ok());
let result = PageAllocator::validate_checksum(data, checksum + 1);
assert!(result.is_err(), "Invalid checksum should fail validation");
}
#[test]
fn test_free_page_header_serialization() {
let header = FreePageHeader::new(42);
let bytes = header.to_bytes();
assert_eq!(bytes[0..8], 42u64.to_le_bytes());
assert_eq!(bytes[8..16], 0u64.to_le_bytes());
let deserialized = FreePageHeader::from_bytes(&bytes).unwrap();
assert_eq!(deserialized.next_free, 42);
}
#[test]
fn test_free_list_chain_reuse() {
let header = PersistentHeaderV3::new_v3();
let mut allocator = PageAllocator::new(&header);
let pages: Vec<u64> = (0..5).map(|_| allocator.allocate().unwrap()).collect();
assert_eq!(pages, vec![2, 3, 4, 5, 6]);
allocator.deallocate(3).unwrap();
allocator.deallocate(4).unwrap();
allocator.deallocate(5).unwrap();
let reused1 = allocator.allocate().unwrap();
assert_eq!(reused1, 5, "First reuse should be last freed (LIFO)");
let reused2 = allocator.allocate().unwrap();
assert_eq!(reused2, 4, "Second reuse should be middle freed");
let reused3 = allocator.allocate().unwrap();
assert_eq!(reused3, 3, "Third reuse should be first freed");
let new_page = allocator.allocate().unwrap();
assert_eq!(
new_page, 7,
"After exhausting free list, should allocate new page 7"
);
}
#[test]
fn test_stats_accuracy_after_alloc_dealloc() {
let header = PersistentHeaderV3::new_v3();
let mut allocator = PageAllocator::new(&header);
allocator.allocate().unwrap();
allocator.allocate().unwrap();
allocator.allocate().unwrap();
let (allocated, free, total) = allocator.stats();
assert_eq!(
allocated, 5,
"Should have 5 allocated pages (2 reserved + 3 new)"
);
assert_eq!(free, 0, "No free pages yet");
assert_eq!(total, 5, "Total pages should be 5");
allocator.deallocate(3).unwrap();
let (allocated, free, total) = allocator.stats();
assert_eq!(allocated, 4, "Should have 4 allocated after freeing one");
assert_eq!(free, 1, "Should have 1 free page");
assert_eq!(total, 5, "Total pages unchanged");
allocator.deallocate(4).unwrap();
let (allocated, free, _total) = allocator.stats();
assert_eq!(allocated, 3, "Should have 3 allocated after freeing two");
assert_eq!(free, 2, "Should have 2 free pages");
}
#[test]
fn test_double_free_beyond_bitmap() {
let header = PersistentHeaderV3::new_v3();
let mut allocator = PageAllocator::new(&header);
let page = allocator.allocate().unwrap();
assert_eq!(page, 2);
allocator.total_pages = 5000;
allocator.deallocate(page).unwrap();
let result = allocator.deallocate(page);
assert!(
result.is_err(),
"Double-free of freed page should be detected"
);
}
}