use std::collections::HashMap;
use crate::{emulation::engine::EmulationError, Result};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct UnmanagedRef(u64);
impl UnmanagedRef {
#[must_use]
pub fn new(address: u64) -> Self {
UnmanagedRef(address)
}
#[must_use]
#[allow(clippy::trivially_copy_pass_by_ref)] pub fn address(&self) -> u64 {
self.0
}
}
#[derive(Clone, Debug)]
struct InternalRegion {
data: Vec<u8>,
valid: bool,
}
impl InternalRegion {
fn new(size: usize) -> Self {
InternalRegion {
data: vec![0; size],
valid: true,
}
}
#[inline]
fn size(&self) -> usize {
self.data.len()
}
}
#[derive(Clone, Debug)]
pub struct UnmanagedMemory {
regions: HashMap<u64, InternalRegion>,
next_address: u64,
current_size: usize,
max_size: usize,
}
impl UnmanagedMemory {
#[must_use]
pub fn new(max_size: usize) -> Self {
UnmanagedMemory {
regions: HashMap::new(),
next_address: 0x7FFF_0000_0000,
current_size: 0,
max_size,
}
}
pub fn alloc(&mut self, size: usize) -> Result<UnmanagedRef> {
if self.current_size + size > self.max_size {
return Err(EmulationError::HeapMemoryLimitExceeded {
current: self.current_size,
limit: self.max_size,
}
.into());
}
let address = self.next_address;
self.next_address += size as u64;
self.next_address = (self.next_address + 15) & !15;
self.regions.insert(address, InternalRegion::new(size));
self.current_size += size;
Ok(UnmanagedRef::new(address))
}
pub fn free(&mut self, ptr: UnmanagedRef) -> Result<()> {
if let Some(region) = self.regions.get_mut(&ptr.address()) {
if region.valid {
region.valid = false;
self.current_size = self.current_size.saturating_sub(region.size());
return Ok(());
}
}
Err(EmulationError::InvalidPointer {
address: ptr.address(),
reason: "not a valid allocation or already freed",
}
.into())
}
fn find_region(&self, address: u64) -> Option<(&InternalRegion, usize)> {
if let Some(region) = self.regions.get(&address) {
if region.valid {
return Some((region, 0));
}
}
for (&base, region) in &self.regions {
if region.valid && address >= base && address < base + region.size() as u64 {
#[allow(clippy::cast_possible_truncation)] let offset = (address - base) as usize;
return Some((region, offset));
}
}
None
}
fn find_region_mut(&mut self, address: u64) -> Option<(&mut InternalRegion, usize)> {
let mut found_base = None;
if let Some(region) = self.regions.get(&address) {
if region.valid {
found_base = Some(address);
}
}
if found_base.is_none() {
for (&base, region) in &self.regions {
if region.valid && address >= base && address < base + region.size() as u64 {
found_base = Some(base);
break;
}
}
}
if let Some(base) = found_base {
if let Some(region) = self.regions.get_mut(&base) {
#[allow(clippy::cast_possible_truncation)] let offset = (address - base) as usize;
return Some((region, offset));
}
}
None
}
pub fn read(&self, address: u64, size: usize) -> Result<Vec<u8>> {
let (region, offset) = self
.find_region(address)
.ok_or(EmulationError::InvalidPointer {
address,
reason: "address not in any allocated region",
})?;
if offset + size > region.size() {
return Err(EmulationError::InvalidPointer {
address,
reason: "read would exceed region bounds",
}
.into());
}
Ok(region.data[offset..offset + size].to_vec())
}
pub fn write(&mut self, address: u64, data: &[u8]) -> Result<()> {
let (region, offset) =
self.find_region_mut(address)
.ok_or(EmulationError::InvalidPointer {
address,
reason: "address not in any allocated region",
})?;
if offset + data.len() > region.size() {
return Err(EmulationError::InvalidPointer {
address,
reason: "write would exceed region bounds",
}
.into());
}
region.data[offset..offset + data.len()].copy_from_slice(data);
Ok(())
}
pub fn memcpy(&mut self, dest: u64, src: u64, size: usize) -> Result<()> {
if size == 0 {
return Ok(());
}
let data = self.read(src, size)?;
self.write(dest, &data)
}
pub fn memset(&mut self, address: u64, value: u8, size: usize) -> Result<()> {
if size == 0 {
return Ok(());
}
let (region, offset) =
self.find_region_mut(address)
.ok_or(EmulationError::InvalidPointer {
address,
reason: "address not in any allocated region",
})?;
if offset + size > region.size() {
return Err(EmulationError::InvalidPointer {
address,
reason: "memset would exceed region bounds",
}
.into());
}
region.data[offset..offset + size].fill(value);
Ok(())
}
#[must_use]
pub fn is_valid(&self, address: u64) -> bool {
self.find_region(address).is_some()
}
#[must_use]
pub fn current_size(&self) -> usize {
self.current_size
}
#[must_use]
pub fn max_size(&self) -> usize {
self.max_size
}
pub fn alloc_with_data(&mut self, data: &[u8]) -> Result<UnmanagedRef> {
let ptr = self.alloc(data.len())?;
self.write(ptr.address(), data)?;
Ok(ptr)
}
pub fn alloc_at(&mut self, address: u64, data: &[u8]) -> Result<UnmanagedRef> {
if self.regions.contains_key(&address) {
return Err(EmulationError::InvalidPointer {
address,
reason: "address already allocated",
}
.into());
}
let size = data.len();
if self.current_size + size > self.max_size {
return Err(EmulationError::HeapMemoryLimitExceeded {
current: self.current_size,
limit: self.max_size,
}
.into());
}
let mut region = InternalRegion::new(size);
region.data.copy_from_slice(data);
self.regions.insert(address, region);
self.current_size += size;
let end_address = address + size as u64;
if end_address > self.next_address {
self.next_address = (end_address + 15) & !15;
}
Ok(UnmanagedRef::new(address))
}
pub fn regions(&self) -> impl Iterator<Item = (u64, &[u8])> {
self.regions
.iter()
.filter(|(_, r)| r.valid)
.map(|(&addr, r)| (addr, r.data.as_slice()))
}
#[must_use]
pub fn get_region_data(&self, base_address: u64) -> Option<&[u8]> {
self.regions
.get(&base_address)
.filter(|r| r.valid)
.map(|r| r.data.as_slice())
}
}
impl Default for UnmanagedMemory {
fn default() -> Self {
Self::new(16 * 1024 * 1024)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_alloc_and_free() {
let mut mem = UnmanagedMemory::new(1024);
let ptr = mem.alloc(100).unwrap();
assert!(mem.is_valid(ptr.address()));
mem.free(ptr).unwrap();
assert!(!mem.is_valid(ptr.address()));
}
#[test]
fn test_read_write() {
let mut mem = UnmanagedMemory::new(1024);
let ptr = mem.alloc(16).unwrap();
let data = [1, 2, 3, 4, 5, 6, 7, 8];
mem.write(ptr.address(), &data).unwrap();
let read = mem.read(ptr.address(), 8).unwrap();
assert_eq!(read, data);
}
#[test]
fn test_memset() {
let mut mem = UnmanagedMemory::new(1024);
let ptr = mem.alloc(16).unwrap();
mem.memset(ptr.address(), 0xFF, 16).unwrap();
let read = mem.read(ptr.address(), 16).unwrap();
assert!(read.iter().all(|&b| b == 0xFF));
}
#[test]
fn test_memcpy() {
let mut mem = UnmanagedMemory::new(1024);
let src = mem.alloc(16).unwrap();
let dest = mem.alloc(16).unwrap();
mem.write(src.address(), &[1, 2, 3, 4, 5, 6, 7, 8]).unwrap();
mem.memcpy(dest.address(), src.address(), 8).unwrap();
let read = mem.read(dest.address(), 8).unwrap();
assert_eq!(read, [1, 2, 3, 4, 5, 6, 7, 8]);
}
#[test]
fn test_offset_access() {
let mut mem = UnmanagedMemory::new(1024);
let ptr = mem.alloc(32).unwrap();
let offset_addr = ptr.address() + 8;
mem.write(offset_addr, &[0xAB, 0xCD]).unwrap();
let read = mem.read(offset_addr, 2).unwrap();
assert_eq!(read, [0xAB, 0xCD]);
}
#[test]
fn test_out_of_bounds() {
let mut mem = UnmanagedMemory::new(1024);
let ptr = mem.alloc(8).unwrap();
assert!(mem.read(ptr.address(), 16).is_err());
assert!(mem.write(ptr.address(), &[0; 16]).is_err());
}
#[test]
fn test_memory_limit() {
let mut mem = UnmanagedMemory::new(100);
let _ptr1 = mem.alloc(50).unwrap();
assert!(mem.alloc(60).is_err());
}
}