use crate::PVM_PAGE_SIZE;
use std::collections::BTreeMap;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PageAccess {
Inaccessible,
ReadOnly,
ReadWrite,
}
#[derive(Clone, Debug)]
pub struct Memory {
pages: BTreeMap<u32, PageData>,
}
#[derive(Clone, Debug)]
struct PageData {
access: PageAccess,
data: Vec<u8>,
}
#[derive(Debug)]
pub enum MemoryAccess {
Ok,
PageFault(u32),
}
impl Memory {
pub fn new() -> Self {
Self {
pages: BTreeMap::new(),
}
}
fn page_index(addr: u32) -> u32 {
addr / PVM_PAGE_SIZE
}
fn page_offset(addr: u32) -> usize {
(addr % PVM_PAGE_SIZE) as usize
}
pub fn map_page(&mut self, page: u32, access: PageAccess) {
self.pages.insert(
page,
PageData {
access,
data: vec![0u8; PVM_PAGE_SIZE as usize],
},
);
}
pub fn map_page_meta(&mut self, page: u32, access: PageAccess) {
self.pages.insert(
page,
PageData {
access,
data: Vec::new(),
},
);
}
pub fn map_page_with_data(&mut self, page: u32, access: PageAccess, data: &[u8]) {
let mut page_data = vec![0u8; PVM_PAGE_SIZE as usize];
let copy_len = data.len().min(PVM_PAGE_SIZE as usize);
page_data[..copy_len].copy_from_slice(&data[..copy_len]);
self.pages.insert(
page,
PageData {
access,
data: page_data,
},
);
}
pub fn page_indices(&self) -> Vec<u32> {
self.pages.keys().copied().collect()
}
pub fn is_page_mapped(&self, page: u32) -> bool {
self.pages.contains_key(&page)
}
pub fn first_unmapped_page_from(&self, start_page: u32) -> Option<u32> {
let mut page = start_page;
for (&mapped_page, _) in self.pages.range(start_page..) {
if mapped_page != page {
return Some(page);
}
page = page.checked_add(1)?;
}
Some(page)
}
pub fn read_page(&self, page: u32) -> Option<&[u8]> {
self.pages.get(&page).map(|pd| pd.data.as_slice())
}
pub fn is_readable(&self, addr: u32, len: u32) -> bool {
if len == 0 {
return true;
}
let end = match addr.checked_add(len) {
Some(e) => e,
None => return false,
};
let start_page = Self::page_index(addr);
let end_page = Self::page_index(end.saturating_sub(1));
for page in start_page..=end_page {
match self.pages.get(&page) {
Some(pd) if pd.access != PageAccess::Inaccessible => {}
_ => return false,
}
}
true
}
pub fn is_writable(&self, addr: u32, len: u32) -> bool {
if len == 0 {
return true;
}
let end = match addr.checked_add(len) {
Some(e) => e,
None => return false,
};
let start_page = Self::page_index(addr);
let end_page = Self::page_index(end.saturating_sub(1));
for page in start_page..=end_page {
match self.pages.get(&page) {
Some(pd) if pd.access == PageAccess::ReadWrite => {}
_ => return false,
}
}
true
}
pub fn read_u8(&self, addr: u32) -> Option<u8> {
let page = Self::page_index(addr);
let offset = Self::page_offset(addr);
match self.pages.get(&page) {
Some(pd) if pd.access != PageAccess::Inaccessible => Some(pd.data[offset]),
_ => None,
}
}
pub fn read_bytes(&self, addr: u32, len: u32) -> Option<Vec<u8>> {
let mut result = Vec::with_capacity(len as usize);
for i in 0..len {
result.push(self.read_u8(addr.wrapping_add(i))?);
}
Some(result)
}
pub fn read_u16_le(&self, addr: u32) -> Option<u16> {
let bytes = self.read_bytes(addr, 2)?;
Some(u16::from_le_bytes([bytes[0], bytes[1]]))
}
pub fn read_u32_le(&self, addr: u32) -> Option<u32> {
let bytes = self.read_bytes(addr, 4)?;
Some(u32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]))
}
pub fn read_u64_le(&self, addr: u32) -> Option<u64> {
let bytes = self.read_bytes(addr, 8)?;
Some(u64::from_le_bytes([
bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7],
]))
}
pub fn write_u8(&mut self, addr: u32, value: u8) -> MemoryAccess {
let page = Self::page_index(addr);
let offset = Self::page_offset(addr);
match self.pages.get_mut(&page) {
Some(pd) if pd.access == PageAccess::ReadWrite => {
pd.data[offset] = value;
MemoryAccess::Ok
}
_ => MemoryAccess::PageFault(addr),
}
}
pub fn write_bytes(&mut self, addr: u32, data: &[u8]) -> MemoryAccess {
for (i, &byte) in data.iter().enumerate() {
match self.write_u8(addr.wrapping_add(i as u32), byte) {
MemoryAccess::Ok => {}
fault => return fault,
}
}
MemoryAccess::Ok
}
pub fn write_u16_le(&mut self, addr: u32, value: u16) -> MemoryAccess {
self.write_bytes(addr, &value.to_le_bytes())
}
pub fn write_u32_le(&mut self, addr: u32, value: u32) -> MemoryAccess {
self.write_bytes(addr, &value.to_le_bytes())
}
pub fn write_u64_le(&mut self, addr: u32, value: u64) -> MemoryAccess {
self.write_bytes(addr, &value.to_le_bytes())
}
pub fn pages_iter(&self) -> impl Iterator<Item = (u32, PageAccess, &[u8])> {
self.pages.iter().map(|(&idx, pd)| (idx, pd.access, pd.data.as_slice()))
}
pub fn page_data_mut(&mut self, page: u32) -> Option<&mut [u8]> {
self.pages.get_mut(&page).map(|pd| pd.data.as_mut_slice())
}
pub fn page_access(&self, page: u32) -> PageAccess {
self.pages.get(&page).map_or(PageAccess::Inaccessible, |pd| pd.access)
}
}
impl Default for Memory {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_page_mapping() {
let mut mem = Memory::new();
assert!(!mem.is_readable(0, 1));
mem.map_page(0, PageAccess::ReadOnly);
assert!(mem.is_readable(0, 1));
assert!(!mem.is_writable(0, 1));
mem.map_page(0, PageAccess::ReadWrite);
assert!(mem.is_readable(0, 1));
assert!(mem.is_writable(0, 1));
}
#[test]
fn test_read_write_u8() {
let mut mem = Memory::new();
mem.map_page(0, PageAccess::ReadWrite);
assert!(matches!(mem.write_u8(0, 42), MemoryAccess::Ok));
assert_eq!(mem.read_u8(0), Some(42));
}
#[test]
fn test_read_write_u64() {
let mut mem = Memory::new();
mem.map_page(0, PageAccess::ReadWrite);
let value: u64 = 0x0123456789ABCDEF;
assert!(matches!(mem.write_u64_le(0, value), MemoryAccess::Ok));
assert_eq!(mem.read_u64_le(0), Some(value));
}
#[test]
fn test_page_fault_on_unmapped() {
let mem = Memory::new();
assert_eq!(mem.read_u8(0), None);
}
#[test]
fn test_page_fault_on_readonly_write() {
let mut mem = Memory::new();
mem.map_page(0, PageAccess::ReadOnly);
assert!(matches!(mem.write_u8(0, 42), MemoryAccess::PageFault(_)));
}
}