use pkbuffer::{Buffer, PtrBuffer};
use bitflags::bitflags;
use winapi::shared::minwindef::LPVOID;
use winapi::um::memoryapi::{VirtualAlloc, VirtualFree, VirtualQuery, VirtualProtect};
use winapi::um::errhandlingapi::GetLastError;
use winapi::um::winnt::MEMORY_BASIC_INFORMATION;
use crate::{align, Error};
use crate::headers::*;
use crate::pe::*;
use crate::types::*;
bitflags! {
pub struct AllocationType: u32 {
const MEM_COMMIT = 0x1000;
const MEM_RESERVE = 0x2000;
const MEM_RESET = 0x80000;
const MEM_RESET_UNDO = 0x1000000;
const MEM_LARGE_PAGES = 0x20000000;
const MEM_PHYSICAL = 0x00400000;
const MEM_TOP_DOWN = 0x00100000;
const MEM_WRITE_WATCH = 0x00200000;
}
}
bitflags! {
pub struct Protect: u32 {
const PAGE_EXECUTE = 0x10;
const PAGE_EXECUTE_READ = 0x20;
const PAGE_EXECUTE_READWRITE = 0x40;
const PAGE_EXECUTE_WRITECOPY = 0x80;
const PAGE_NOACCESS = 0x01;
const PAGE_READONLY = 0x02;
const PAGE_READWRITE = 0x04;
const PAGE_WRITECOPY = 0x08;
const PAGE_TARGETS_INVALID = 0x40000000;
const PAGE_GUARD = 0x100;
const PAGE_NOCACHE = 0x200;
const PAGE_WRITECOMBINE = 0x400;
}
}
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct VallocBuffer {
buffer: PtrBuffer,
allocation: AllocationType,
protection: Protect,
owned: bool,
}
impl VallocBuffer {
pub fn new(address: *const u8, size: usize, allocation: AllocationType, protection: Protect) -> Result<Self, Error> {
let buffer = unsafe { VirtualAlloc(address as LPVOID, size, allocation.bits(), protection.bits()) };
if buffer == std::ptr::null_mut() { return Err(Error::Win32Error(unsafe { GetLastError() })); }
Ok(Self {
buffer: PtrBuffer::new(buffer as *const u8, size),
allocation,
protection,
owned: true,
})
}
fn query_internal(address: *const u8) -> Result<MEMORY_BASIC_INFORMATION, Error> {
let mut info = MEMORY_BASIC_INFORMATION::default();
if unsafe { VirtualQuery(address as LPVOID, &mut info as *mut MEMORY_BASIC_INFORMATION, std::mem::size_of::<MEMORY_BASIC_INFORMATION>()) } == 0 {
Err(Error::Win32Error(unsafe { GetLastError() }))
}
else {
Ok(info)
}
}
pub fn from_query(address: *const u8) -> Result<Option<Self>, Error> {
let info = Self::query_internal(address)?;
if info.BaseAddress == std::ptr::null_mut() || ((address as usize) < (info.BaseAddress as usize) || (address as usize) > (info.BaseAddress as usize + info.RegionSize)) {
Ok(None)
}
else {
Ok(Some(Self {
buffer: PtrBuffer::new(info.BaseAddress as *const u8, info.RegionSize),
allocation: AllocationType::from_bits_truncate(info.State),
protection: Protect::from_bits_truncate(info.Protect),
owned: false,
}))
}
}
pub fn is_available(&self) -> Result<bool, Error> {
let address = self.as_ptr();
let info = Self::query_internal(address)?;
if info.BaseAddress == std::ptr::null_mut() || ((address as usize) < (info.BaseAddress as usize) || (address as usize) > (info.BaseAddress as usize + info.RegionSize)) {
Ok(false)
}
else {
Ok(true)
}
}
pub fn get_default_allocation(&self) -> AllocationType {
self.allocation
}
pub fn get_allocation(&self) -> Result<AllocationType, Error> {
let available = self.is_available()?;
if !available { return Err(Error::BufferNotAvailable) }
let info = Self::query_internal(self.as_ptr())?;
Ok(AllocationType::from_bits_truncate(info.State))
}
pub fn get_default_protection(&self) -> Protect {
self.protection
}
pub fn get_protection(&self) -> Result<Protect, Error> {
let available = self.is_available()?;
if !available { return Err(Error::BufferNotAvailable) }
let info = Self::query_internal(self.as_ptr())?;
Ok(Protect::from_bits_truncate(info.Protect))
}
pub fn is_readable(&self) -> Result<bool, Error> {
let protect = self.get_protection()?;
Ok(protect.contains(Protect::PAGE_READONLY)
|| protect.contains(Protect::PAGE_READWRITE)
|| protect.contains(Protect::PAGE_EXECUTE_READ)
|| protect.contains(Protect::PAGE_EXECUTE_READWRITE))
}
pub fn is_writable(&self) -> Result<bool, Error> {
let protect = self.get_protection()?;
Ok(protect.contains(Protect::PAGE_READWRITE)
|| protect.contains(Protect::PAGE_EXECUTE_READWRITE))
}
pub fn is_executable(&self) -> Result<bool, Error> {
let protect = self.get_protection()?;
Ok(protect.contains(Protect::PAGE_EXECUTE)
|| protect.contains(Protect::PAGE_EXECUTE_READ)
|| protect.contains(Protect::PAGE_EXECUTE_READWRITE))
}
pub fn protect(&mut self, protect: Option<Protect>) -> Result<Protect, Error> {
let new_protect;
if protect.is_none() { new_protect = self.protection; }
else { new_protect = protect.unwrap(); }
let mut old_protect = new_protect.bits();
if unsafe { VirtualProtect(self.as_ptr() as LPVOID, self.len(), new_protect.bits(), &mut old_protect as *mut u32) } == 0 {
Err(Error::Win32Error(unsafe { GetLastError() }))
}
else {
Ok(Protect::from_bits_truncate(old_protect))
}
}
}
impl Buffer for VallocBuffer {
fn len(&self) -> usize { self.buffer.len() }
fn as_ptr(&self) -> *const u8 { self.buffer.as_ptr() }
fn as_mut_ptr(&mut self) -> *mut u8 { self.buffer.as_mut_ptr() }
fn as_slice(&self) -> &[u8] { self.buffer.as_slice() }
fn as_mut_slice(&mut self) -> &mut [u8] { self.buffer.as_mut_slice() }
}
impl<Idx: std::slice::SliceIndex<[u8]>> std::ops::Index<Idx> for VallocBuffer {
type Output = Idx::Output;
fn index(&self, index: Idx) -> &Self::Output {
self.as_slice().index(index)
}
}
impl<Idx: std::slice::SliceIndex<[u8]>> std::ops::IndexMut<Idx> for VallocBuffer {
fn index_mut(&mut self, index: Idx) -> &mut Self::Output {
self.as_mut_slice().index_mut(index)
}
}
impl Drop for VallocBuffer {
fn drop(&mut self) {
if self.owned {
if let Ok(result) = self.is_available() {
if result {
unsafe { VirtualFree(self.as_mut_ptr() as LPVOID, self.len(), 0x8000) };
}
}
}
}
}
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct VallocPE {
sum: PtrPE,
sections: Vec<(ImageSectionHeader, VallocBuffer)>,
reservation: Option<VallocBuffer>,
}
impl VallocPE {
pub fn new(address: *const u8, size: usize, allocation: AllocationType, protect: Protect) -> Result<Self, Error> {
let buffer = VallocBuffer::new(
address,
size,
allocation,
protect,
)?;
Ok(Self::from_valloc_buffer(buffer))
}
pub fn from_valloc_buffer(buffer: VallocBuffer) -> Self {
let sum = PtrPE::new_memory(buffer.as_ptr(), buffer.len());
let sections = vec![(ImageSectionHeader::default(), buffer)];
Self { sum, sections, reservation: None }
}
pub fn from_pe<P: PE + Buffer>(pe: &P) -> Result<Self, Error> {
let headers = pe.get_valid_nt_headers()?;
let pe_size = pe.calculate_memory_size()?;
let (image_base, section_alignment) = match headers {
NTHeaders::NTHeaders32(h32) => (h32.optional_header.image_base as usize, h32.optional_header.section_alignment),
NTHeaders::NTHeaders64(h64) => (h64.optional_header.image_base as usize, h64.optional_header.section_alignment),
};
let aslr = match headers {
NTHeaders::NTHeaders32(h32) => !(h32.optional_header.dll_characteristics & DLLCharacteristics::DYNAMIC_BASE).is_empty(),
NTHeaders::NTHeaders64(h64) => !(h64.optional_header.dll_characteristics & DLLCharacteristics::DYNAMIC_BASE).is_empty(),
};
let mut alloc_address;
if aslr {
alloc_address = std::ptr::null() as *const u8;
}
else {
alloc_address = image_base as *const u8;
}
let reservation = VallocBuffer::new(
alloc_address,
pe_size,
AllocationType::MEM_RESERVE,
Protect::PAGE_READWRITE,
)?;
alloc_address = reservation.as_ptr();
if !aslr && alloc_address != image_base as *const u8 {
if !pe.has_data_directory(ImageDirectoryEntry::BaseReloc) {
return Err(Error::ImageBaseNotAvailable);
}
}
let alloc_base = alloc_address;
let section_table = pe.get_section_table()?;
let mut section_table = section_table.to_vec();
section_table.sort_by(|a,b| a.virtual_address.0.cmp(&b.virtual_address.0));
let mut sections = Vec::<(ImageSectionHeader, VallocBuffer)>::new();
let header_size = pe.calculate_header_size()?;
let header_data = pe.read(0, header_size)?;
let first_section = §ion_table[0];
let mut section_size = align(first_section.virtual_address.0 as usize, section_alignment as usize);
let mut previous_size = section_size;
let mut total_size = section_size;
let mut header_buffer = VallocBuffer::new(
alloc_address,
section_size,
AllocationType::MEM_COMMIT,
Protect::PAGE_READWRITE,
)?;
header_buffer.write(0, header_data)?;
header_buffer.protect(Some(Protect::PAGE_READONLY))?;
sections.push((ImageSectionHeader::default(), header_buffer));
for scn_header in section_table {
let checked_address = unsafe { alloc_base.add(align(scn_header.virtual_address.into(), section_alignment) as usize) };
alloc_address = unsafe { alloc_address.add(previous_size) };
if alloc_address != checked_address {
return Err(Error::SectionsNotContiguous);
}
section_size = align(scn_header.virtual_size as usize, section_alignment as usize);
previous_size = section_size;
total_size += section_size;
let protect;
if scn_header.characteristics.contains(SectionCharacteristics::MEM_READ)
&& scn_header.characteristics.contains(SectionCharacteristics::MEM_WRITE)
&& scn_header.characteristics.contains(SectionCharacteristics::MEM_EXECUTE) {
protect = Protect::PAGE_EXECUTE_READWRITE;
}
else if scn_header.characteristics.contains(SectionCharacteristics::MEM_READ)
&& scn_header.characteristics.contains(SectionCharacteristics::MEM_WRITE) {
protect = Protect::PAGE_READWRITE;
}
else if scn_header.characteristics.contains(SectionCharacteristics::MEM_READ)
&& scn_header.characteristics.contains(SectionCharacteristics::MEM_EXECUTE) {
protect = Protect::PAGE_EXECUTE_READ;
}
else if scn_header.characteristics.contains(SectionCharacteristics::MEM_READ) {
protect = Protect::PAGE_READONLY;
}
else if scn_header.characteristics.contains(SectionCharacteristics::MEM_EXECUTE) {
protect = Protect::PAGE_EXECUTE;
}
else {
protect = Protect::PAGE_NOACCESS;
}
let section_data = scn_header.read(pe)?;
let mut section_buffer = VallocBuffer::new(
alloc_address,
section_size,
AllocationType::MEM_COMMIT,
Protect::PAGE_READWRITE,
)?;
section_buffer.write(0, section_data)?;
section_buffer.protect(Some(protect))?;
sections.push((scn_header.clone(), section_buffer));
}
let sum = PtrPE::new_memory(reservation.as_ptr(), total_size);
Ok(Self { sum, sections, reservation: Some(reservation) })
}
pub fn load_image(&mut self) -> Result<(), Error> {
match self.get_arch() {
Ok(a) => match a {
Arch::X86 => { if std::mem::size_of::<usize>() == 8 { return Err(Error::ArchMismatch(Arch::X86, a)); } },
Arch::X64 => { if std::mem::size_of::<usize>() == 4 { return Err(Error::ArchMismatch(Arch::X64, a)); } },
},
Err(e) => return Err(e),
}
self.mark_read_write()?;
let self_ro = PtrPE::new_memory(self.as_ptr(), self.len());
if self_ro.has_data_directory(ImageDirectoryEntry::BaseReloc) {
let reloc_dir = match RelocationDirectory::parse(&self_ro) {
Ok(r) => r,
Err(e) => return Err(e),
};
reloc_dir.relocate(self, self_ro.as_ptr() as u64)?;
}
if self_ro.has_data_directory(ImageDirectoryEntry::Import) {
let import_dir = ImportDirectory::parse(&self_ro)?;
match import_dir.resolve_iat(self) {
Ok(()) => (),
Err(e) => return Err(e),
}
}
self.protect()?;
Ok(())
}
pub fn get_header(&self) -> &VallocBuffer {
&self.sections[0].1
}
pub fn get_mut_header(&mut self) -> &mut VallocBuffer {
&mut self.sections[0].1
}
pub fn get_section(&self, index: usize) -> Result<&VallocBuffer, Error> {
let sections = &self.sections[1..];
if index > sections.len() { return Err(Error::OutOfBounds(sections.len(), index)); }
Ok(§ions[index].1)
}
pub fn get_mut_section(&mut self, index: usize) -> Result<&mut VallocBuffer, Error> {
let sections = &mut self.sections[1..];
if index > sections.len() { return Err(Error::OutOfBounds(sections.len(), index)); }
Ok(&mut sections[index].1)
}
pub fn get_section_by_name<S: AsRef<str>>(&self, name: S) -> Result<&VallocBuffer, Error> {
let sections = &self.sections[1..];
let name = name.as_ref();
for (header, section) in sections {
let s = header.name.as_str()?;
if name == s { return Ok(section); }
}
Err(Error::SectionNotFound)
}
pub fn get_mut_section_by_name<S: AsRef<str>>(&mut self, name: S) -> Result<&mut VallocBuffer, Error> {
let sections = &mut self.sections[1..];
let name = name.as_ref();
for (header, scn) in sections.iter_mut() {
let s = header.name.as_str()?;
if name == s { return Ok(scn); }
}
Err(Error::SectionNotFound)
}
pub fn mark_read_write(&mut self) -> Result<(), Error> {
for (_, scn) in &mut self.sections[1..] {
scn.protect(Some(Protect::PAGE_READWRITE))?;
}
Ok(())
}
pub fn protect(&mut self) -> Result<(), Error> {
for (header, scn) in &mut self.sections[1..] {
let protect;
if header.characteristics.contains(SectionCharacteristics::MEM_READ)
&& header.characteristics.contains(SectionCharacteristics::MEM_WRITE)
&& header.characteristics.contains(SectionCharacteristics::MEM_EXECUTE) {
protect = Protect::PAGE_EXECUTE_READWRITE;
}
else if header.characteristics.contains(SectionCharacteristics::MEM_READ)
&& header.characteristics.contains(SectionCharacteristics::MEM_WRITE) {
protect = Protect::PAGE_READWRITE;
}
else if header.characteristics.contains(SectionCharacteristics::MEM_READ)
&& header.characteristics.contains(SectionCharacteristics::MEM_EXECUTE) {
protect = Protect::PAGE_EXECUTE_READ;
}
else if header.characteristics.contains(SectionCharacteristics::MEM_READ) {
protect = Protect::PAGE_READONLY;
}
else if header.characteristics.contains(SectionCharacteristics::MEM_EXECUTE) {
protect = Protect::PAGE_EXECUTE;
}
else {
protect = Protect::PAGE_NOACCESS;
}
scn.protect(Some(protect))?;
}
Ok(())
}
}
impl PE for VallocPE {
fn get_type(&self) -> PEType { PEType::Memory }
fn is_allocated(&self) -> bool { true }
}
impl Buffer for VallocPE {
fn len(&self) -> usize { self.sum.len() }
fn as_ptr(&self) -> *const u8 { self.sum.as_ptr() }
fn as_mut_ptr(&mut self) -> *mut u8 { self.sum.as_mut_ptr() }
fn as_slice(&self) -> &[u8] { self.sum.as_slice() }
fn as_mut_slice(&mut self) -> &mut [u8] { self.sum.as_mut_slice() }
}
impl<Idx: std::slice::SliceIndex<[u8]>> std::ops::Index<Idx> for VallocPE {
type Output = Idx::Output;
fn index(&self, index: Idx) -> &Self::Output {
self.as_slice().index(index)
}
}
impl<Idx: std::slice::SliceIndex<[u8]>> std::ops::IndexMut<Idx> for VallocPE {
fn index_mut(&mut self, index: Idx) -> &mut Self::Output {
self.as_mut_slice().index_mut(index)
}
}