use std::sync::{Arc, RwLock};
use bitflags::bitflags;
use crate::emulation::{
engine::EmulationError,
memory::page::{Page, PAGE_SIZE},
};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct ThreadId(pub u32);
impl ThreadId {
pub const MAIN: ThreadId = ThreadId(0);
#[must_use]
pub fn new(id: u32) -> Self {
Self(id)
}
#[must_use]
pub fn value(&self) -> u32 {
self.0
}
}
impl Default for ThreadId {
fn default() -> Self {
Self::MAIN
}
}
impl std::fmt::Display for ThreadId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Thread({})", self.0)
}
}
bitflags! {
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct MemoryProtection: u32 {
const READ = 0x01;
const WRITE = 0x02;
const EXECUTE = 0x04;
const GUARD = 0x100;
const READ_WRITE = Self::READ.bits() | Self::WRITE.bits();
const READ_WRITE_EXECUTE = Self::READ.bits() | Self::WRITE.bits() | Self::EXECUTE.bits();
const READ_EXECUTE = Self::READ.bits() | Self::EXECUTE.bits();
}
}
impl Default for MemoryProtection {
fn default() -> Self {
Self::READ_WRITE
}
}
impl MemoryProtection {
const PAGE_NOACCESS: u32 = 0x01;
const PAGE_READONLY: u32 = 0x02;
const PAGE_READWRITE: u32 = 0x04;
const PAGE_WRITECOPY: u32 = 0x08;
const PAGE_EXECUTE: u32 = 0x10;
const PAGE_EXECUTE_READ: u32 = 0x20;
const PAGE_EXECUTE_READWRITE: u32 = 0x40;
const PAGE_EXECUTE_WRITECOPY: u32 = 0x80;
#[must_use]
pub fn from_windows(page_protect: u32) -> Self {
match page_protect & 0xFF {
Self::PAGE_NOACCESS => Self::empty(),
Self::PAGE_READONLY => Self::READ,
Self::PAGE_EXECUTE => Self::EXECUTE,
Self::PAGE_EXECUTE_READ => Self::READ_EXECUTE,
Self::PAGE_EXECUTE_READWRITE | Self::PAGE_EXECUTE_WRITECOPY => Self::READ_WRITE_EXECUTE,
_ => Self::READ_WRITE,
}
}
#[must_use]
pub fn to_windows(self) -> u32 {
let r = self.contains(Self::READ);
let w = self.contains(Self::WRITE);
let x = self.contains(Self::EXECUTE);
match (r, w, x) {
(false, false, false) => Self::PAGE_NOACCESS,
(true, false, false) => Self::PAGE_READONLY,
(_, true, false) => Self::PAGE_READWRITE,
(false, false, true) => Self::PAGE_EXECUTE,
(true, false, true) => Self::PAGE_EXECUTE_READ,
(_, true, true) => Self::PAGE_EXECUTE_READWRITE,
}
}
}
#[derive(Clone, Debug)]
pub struct SectionInfo {
pub name: String,
pub virtual_address: u32,
pub virtual_size: u32,
pub raw_data_offset: u32,
pub raw_data_size: u32,
pub characteristics: u32,
pub protection: MemoryProtection,
}
impl SectionInfo {
#[must_use]
pub fn new(
name: String,
virtual_address: u32,
virtual_size: u32,
raw_data_offset: u32,
raw_data_size: u32,
characteristics: u32,
) -> Self {
let mut protection = MemoryProtection::empty();
if characteristics & 0x4000_0000 != 0 {
protection |= MemoryProtection::READ;
}
if characteristics & 0x8000_0000 != 0 {
protection |= MemoryProtection::WRITE;
}
if characteristics & 0x2000_0000 != 0 {
protection |= MemoryProtection::EXECUTE;
}
Self {
name,
virtual_address,
virtual_size,
raw_data_offset,
raw_data_size,
characteristics,
protection,
}
}
}
#[derive(Debug)]
pub struct MemoryRegion {
base: u64,
size: usize,
pages: Vec<Page>,
sections: Option<Arc<[SectionInfo]>>,
label: String,
protection: RwLock<MemoryProtection>,
kind: RegionKind,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum RegionKind {
PeImage,
MappedData,
UnmanagedAlloc,
}
impl MemoryRegion {
fn pages_from_data(data: &[u8]) -> Vec<Page> {
let num_pages = data.len().div_ceil(PAGE_SIZE);
let mut pages = Vec::with_capacity(num_pages);
for i in 0..num_pages {
let start = i * PAGE_SIZE;
let end = (start + PAGE_SIZE).min(data.len());
let chunk = &data[start..end];
pages.push(Page::from_slice(chunk));
}
pages
}
#[must_use]
pub fn pe_image(
base: u64,
data: &[u8],
sections: Vec<SectionInfo>,
name: impl Into<String>,
) -> Self {
let size = data.len();
let pages = Self::pages_from_data(data);
Self {
base,
size,
pages,
sections: Some(Arc::from(sections.into_boxed_slice())),
label: name.into(),
protection: RwLock::new(MemoryProtection::READ_EXECUTE),
kind: RegionKind::PeImage,
}
}
#[must_use]
pub fn mapped_data(
base: u64,
data: &[u8],
label: impl Into<String>,
protection: MemoryProtection,
) -> Self {
let size = data.len();
let pages = Self::pages_from_data(data);
Self {
base,
size,
pages,
sections: None,
label: label.into(),
protection: RwLock::new(protection),
kind: RegionKind::MappedData,
}
}
#[must_use]
pub fn unmanaged_alloc(base: u64, size: usize) -> Self {
let num_pages = size.div_ceil(PAGE_SIZE);
let pages: Vec<Page> = (0..num_pages).map(|_| Page::zeroed()).collect();
Self {
base,
size,
pages,
sections: None,
label: String::from("unmanaged"),
protection: RwLock::new(MemoryProtection::READ_WRITE),
kind: RegionKind::UnmanagedAlloc,
}
}
#[must_use]
pub fn base(&self) -> u64 {
self.base
}
#[must_use]
pub fn size(&self) -> usize {
self.size
}
#[must_use]
pub fn end(&self) -> u64 {
self.base + self.size as u64
}
#[must_use]
pub fn contains(&self, address: u64) -> bool {
address >= self.base && address < self.end()
}
#[must_use]
pub fn contains_range(&self, address: u64, len: usize) -> bool {
address >= self.base && (address + len as u64) <= self.end()
}
#[must_use]
pub fn protection(&self) -> MemoryProtection {
*self.protection.read().expect("protection lock poisoned")
}
pub fn set_protection(&self, protection: MemoryProtection) {
*self.protection.write().expect("protection lock poisoned") = protection;
}
#[must_use]
pub fn protection_at(&self, address: u64) -> MemoryProtection {
if let Some(ref sections) = self.sections {
#[allow(clippy::cast_possible_truncation)]
let rva = (address - self.base) as u32;
for section in sections.iter() {
if rva >= section.virtual_address
&& rva < section.virtual_address + section.virtual_size
{
return section.protection;
}
}
MemoryProtection::READ
} else {
self.protection()
}
}
#[must_use]
pub fn read(&self, address: u64, len: usize) -> Option<Vec<u8>> {
if len == 0 {
return Some(Vec::new());
}
if !self.contains_range(address, len) {
return None;
}
#[allow(clippy::cast_possible_truncation)]
let offset = (address - self.base) as usize;
let mut result = vec![0u8; len];
let mut bytes_read = 0;
while bytes_read < len {
let current_offset = offset + bytes_read;
let page_index = current_offset / PAGE_SIZE;
let page_offset = current_offset % PAGE_SIZE;
if page_index >= self.pages.len() {
return None;
}
let bytes_in_page = (PAGE_SIZE - page_offset).min(len - bytes_read);
let page = &self.pages[page_index];
if page
.read(
page_offset,
&mut result[bytes_read..bytes_read + bytes_in_page],
)
.is_err()
{
return None;
}
bytes_read += bytes_in_page;
}
Some(result)
}
pub fn write(&self, address: u64, bytes: &[u8]) -> bool {
if bytes.is_empty() {
return true;
}
if !self.contains_range(address, bytes.len()) {
return false;
}
#[allow(clippy::cast_possible_truncation)]
let offset = (address - self.base) as usize;
let mut bytes_written = 0;
while bytes_written < bytes.len() {
let current_offset = offset + bytes_written;
let page_index = current_offset / PAGE_SIZE;
let page_offset = current_offset % PAGE_SIZE;
if page_index >= self.pages.len() {
return false;
}
let bytes_in_page = (PAGE_SIZE - page_offset).min(bytes.len() - bytes_written);
let page = &self.pages[page_index];
if page
.write(
page_offset,
&bytes[bytes_written..bytes_written + bytes_in_page],
)
.is_err()
{
return false;
}
bytes_written += bytes_in_page;
}
true
}
#[must_use]
pub fn label(&self) -> &str {
&self.label
}
pub fn fork(&self) -> Result<Self, EmulationError> {
let forked_pages: Result<Vec<Page>, EmulationError> =
self.pages.iter().map(Page::fork).collect();
Ok(Self {
base: self.base,
size: self.size,
pages: forked_pages?,
sections: self.sections.clone(),
label: self.label.clone(),
protection: RwLock::new(self.protection()),
kind: self.kind,
})
}
#[must_use]
pub fn page_count(&self) -> usize {
self.pages.len()
}
#[must_use]
pub fn modified_page_count(&self) -> usize {
self.pages
.iter()
.filter(|p| p.is_modified().unwrap_or(false))
.count()
}
#[must_use]
pub fn with_base(self, base: u64) -> Self {
Self {
base,
size: self.size,
pages: self.pages,
sections: self.sections,
label: self.label,
protection: self.protection,
kind: self.kind,
}
}
#[must_use]
pub fn is_pe_image(&self) -> bool {
self.kind == RegionKind::PeImage
}
#[must_use]
pub fn is_unmanaged_alloc(&self) -> bool {
self.kind == RegionKind::UnmanagedAlloc
}
#[must_use]
pub fn is_mapped_data(&self) -> bool {
self.kind == RegionKind::MappedData
}
}
impl Clone for MemoryRegion {
fn clone(&self) -> Self {
self.fork().expect("page lock poisoned during clone")
}
}
#[cfg(test)]
mod tests {
use crate::emulation::memory::{
page::PAGE_SIZE,
region::{MemoryProtection, MemoryRegion, SectionInfo, ThreadId},
};
#[test]
fn test_memory_region_contains() {
let region = MemoryRegion::mapped_data(
0x1000,
&vec![0u8; 0x100],
"test",
MemoryProtection::READ_WRITE,
);
assert!(region.contains(0x1000));
assert!(region.contains(0x10FF));
assert!(!region.contains(0x1100));
assert!(!region.contains(0x0FFF));
}
#[test]
fn test_memory_region_read_write() {
let region = MemoryRegion::mapped_data(
0x1000,
&vec![0u8; 0x100],
"test",
MemoryProtection::READ_WRITE,
);
assert!(region.write(0x1010, &[0xDE, 0xAD, 0xBE, 0xEF]));
let data = region.read(0x1010, 4).unwrap();
assert_eq!(data, vec![0xDE, 0xAD, 0xBE, 0xEF]);
}
#[test]
fn test_memory_region_cross_page_read_write() {
let region = MemoryRegion::mapped_data(
0x1000,
&vec![0u8; PAGE_SIZE * 3],
"test",
MemoryProtection::READ_WRITE,
);
let write_addr = 0x1000 + PAGE_SIZE as u64 - 2;
let data = [1, 2, 3, 4, 5, 6];
assert!(region.write(write_addr, &data));
let read_data = region.read(write_addr, 6).unwrap();
assert_eq!(read_data, data);
}
#[test]
fn test_memory_region_fork() {
let region = MemoryRegion::mapped_data(
0x1000,
&vec![42u8; 0x100],
"test",
MemoryProtection::READ_WRITE,
);
region.write(0x1000, &[100]);
let forked = region.fork().unwrap();
assert_eq!(forked.read(0x1000, 1).unwrap(), vec![100]);
forked.write(0x1000, &[200]);
assert_eq!(region.read(0x1000, 1).unwrap(), vec![100]);
assert_eq!(forked.read(0x1000, 1).unwrap(), vec![200]);
}
#[test]
fn test_memory_region_fork_shares_unmodified() {
let region = MemoryRegion::mapped_data(
0x1000,
&vec![0u8; PAGE_SIZE * 4],
"test",
MemoryProtection::READ_WRITE,
);
region.write(0x1000, &[1, 2, 3, 4]);
assert_eq!(region.modified_page_count(), 1);
let forked = region.fork().unwrap();
assert_eq!(forked.modified_page_count(), 0);
let page2_addr = 0x1000 + (PAGE_SIZE * 2) as u64;
forked.write(page2_addr, &[5, 6, 7, 8]);
assert_eq!(forked.modified_page_count(), 1);
assert_eq!(region.modified_page_count(), 1);
}
#[test]
fn test_memory_protection() {
let prot = MemoryProtection::READ_WRITE;
assert!(prot.contains(MemoryProtection::READ));
assert!(prot.contains(MemoryProtection::WRITE));
assert!(!prot.contains(MemoryProtection::EXECUTE));
}
#[test]
fn test_section_info() {
let section = SectionInfo::new(
".text".to_string(),
0x1000,
0x2000,
0x400,
0x1800,
0x6000_0020, );
assert!(section.protection.contains(MemoryProtection::READ));
assert!(section.protection.contains(MemoryProtection::EXECUTE));
assert!(!section.protection.contains(MemoryProtection::WRITE));
}
#[test]
fn test_pe_image_protection_at() {
let sections = vec![
SectionInfo::new(".text".to_string(), 0x1000, 0x1000, 0, 0, 0x6000_0020),
SectionInfo::new(".data".to_string(), 0x2000, 0x1000, 0, 0, 0xC000_0040),
];
let region = MemoryRegion::pe_image(0x10000, &vec![0u8; 0x4000], sections, "test.exe");
let text_prot = region.protection_at(0x11000);
assert!(text_prot.contains(MemoryProtection::READ));
assert!(text_prot.contains(MemoryProtection::EXECUTE));
assert!(!text_prot.contains(MemoryProtection::WRITE));
let data_prot = region.protection_at(0x12000);
assert!(data_prot.contains(MemoryProtection::READ));
assert!(data_prot.contains(MemoryProtection::WRITE));
}
#[test]
fn test_thread_id() {
assert_eq!(ThreadId::MAIN, ThreadId(0));
let t1 = ThreadId::new(1);
assert_eq!(t1.value(), 1);
}
#[test]
fn test_unmanaged_alloc() {
let region = MemoryRegion::unmanaged_alloc(0x5000, 0x2000);
assert_eq!(region.base(), 0x5000);
assert_eq!(region.size(), 0x2000);
assert_eq!(region.protection(), MemoryProtection::READ_WRITE);
let data = region.read(0x5000, 16).unwrap();
assert!(data.iter().all(|&b| b == 0));
assert!(region.write(0x5000, &[1, 2, 3, 4]));
assert_eq!(region.read(0x5000, 4).unwrap(), vec![1, 2, 3, 4]);
}
}