use byteorder::{LittleEndian, ReadBytesExt};
use pkbuffer::{PtrBuffer, VecBuffer, Error as PKError};
pub use pkbuffer::Buffer;
use std::clone::Clone;
use std::cmp;
use std::convert::AsRef;
use std::io::Cursor;
use std::mem;
use std::ops::{Index, IndexMut};
use std::path::Path;
use std::slice;
use crate::{align, Error, HashData};
use crate::headers::*;
use crate::imphash::*;
use crate::types::*;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum PEType {
Disk,
Memory,
}
pub enum PETranslation {
Disk(Offset),
Memory(RVA),
}
impl Address for PETranslation {
fn as_offset<P: PE>(&self, pe: &P) -> Result<Offset, Error> {
match self {
Self::Disk(o) => Ok(*o),
Self::Memory(r) => r.as_offset(pe),
}
}
fn as_rva<P: PE>(&self, pe: &P) -> Result<RVA, Error> {
match self {
Self::Disk(o) => o.as_rva(pe),
Self::Memory(r) => Ok(*r),
}
}
fn as_va<P: PE>(&self, pe: &P) -> Result<VA, Error> {
match self {
Self::Disk(o) => o.as_va(pe),
Self::Memory(r) => r.as_va(pe),
}
}
fn as_ptr<P: PE>(&self, pe: &P) -> Result<*const u8, Error> {
let offset = self.as_offset(pe)?;
offset.as_ptr(pe)
}
}
impl From<Offset> for PETranslation {
fn from(offset: Offset) -> Self {
Self::Disk(offset)
}
}
impl From<RVA> for PETranslation {
fn from(rva: RVA) -> Self {
Self::Memory(rva)
}
}
pub trait PE: Buffer + Sized {
fn get_type(&self) -> PEType;
#[cfg(feature="win32")]
fn is_allocated(&self) -> bool;
fn get_cstring_size(&self, offset: usize, thunk: bool, max_size: Option<usize>) -> Result<usize, Error> {
let end = match max_size {
None => self.len(),
Some(s) => offset + s,
};
if end > self.len() {
return Err(Error::OutOfBounds(self.len(), end));
}
let mut cursor = Cursor::new(self.as_slice());
let mut index = offset;
for i in index..end {
cursor.set_position(i as u64);
let val = cursor.read_u8();
match val {
Err(e) => return Err(Error::from(e)),
Ok(v) => match v {
0 => { index = i; break; },
_ => ()
}
}
}
index += 1; let mut size = index - offset;
if thunk {
size += size % 2;
}
Ok(size)
}
fn get_widestring_size(&self, offset: usize, max_size: Option<usize>) -> Result<usize, Error> {
let end = match max_size {
None => self.len(),
Some(s) => offset + (s * 2),
};
if end > self.len() {
return Err(Error::OutOfBounds(self.len(), end));
}
let mut cursor = Cursor::new(self.as_slice());
let mut index = offset;
for i in (index..end).step_by(2) {
cursor.set_position(i as u64);
let val = cursor.read_u16::<LittleEndian>();
match val {
Err(e) => return Err(Error::from(e)),
Ok(v) => match v {
0 => { index = i; break; },
_ => ()
}
}
}
Ok( ( (index+2) - offset) / 2 )
}
fn get_cstring(&self, offset: usize, thunk: bool, max_size: Option<usize>) -> Result<&[CChar], Error> {
let found_size = self.get_cstring_size(offset, thunk, max_size)?;
let result = self.get_slice_ref::<CChar>(offset, found_size)?;
Ok(result)
}
fn get_mut_cstring(&mut self, offset: usize, thunk: bool, max_size: Option<usize>) -> Result<&mut [CChar], Error> {
let found_size = self.get_cstring_size(offset, thunk, max_size)?;
let result = self.get_mut_slice_ref::<CChar>(offset, found_size)?;
Ok(result)
}
fn get_widestring(&self, offset: usize, max_size: Option<usize>) -> Result<&[WChar], Error> {
let found_size = self.get_widestring_size(offset, max_size)?;
let result = self.get_slice_ref::<WChar>(offset, found_size)?;
Ok(result)
}
fn get_mut_widestring(&mut self, offset: usize, max_size: Option<usize>) -> Result<&mut [WChar], Error> {
let found_size = self.get_widestring_size(offset, max_size)?;
let result = self.get_mut_slice_ref::<WChar>(offset, found_size)?;
Ok(result)
}
fn translate(&self, addr: PETranslation) -> Result<usize, Error> {
match self.get_type() {
PEType::Disk => match addr {
PETranslation::Disk(o) => Ok(o.into()),
PETranslation::Memory(r) => {
let result = r.as_offset(self)?;
Ok(result.into())
},
}
PEType::Memory => match addr {
PETranslation::Disk(o) => {
let result = o.as_rva(self)?;
Ok(result.into())
},
PETranslation::Memory(r) => Ok(r.into()),
}
}
}
fn get_dos_header(&self) -> Result<ImageDOSHeader, Error> {
self.read_val::<ImageDOSHeader>(0).map_err(Error::from)
}
fn get_mut_dos_header(&mut self) -> Result<ImageDOSHeader, Error> {
self.read_val::<ImageDOSHeader>(0).map_err(Error::from)
}
fn get_valid_dos_header(&self) -> Result<ImageDOSHeader, Error> {
let dos_header = self.get_dos_header()?;
if dos_header.e_magic != DOS_SIGNATURE {
return Err(Error::InvalidDOSSignature(dos_header.e_magic));
}
Ok(dos_header)
}
fn get_dos_header_ref(&self) -> Result<&ImageDOSHeader, Error> {
self.get_aligned_ref::<ImageDOSHeader>(0).map_err(Error::from)
}
fn get_mut_dos_header_ref(&mut self) -> Result<&mut ImageDOSHeader, Error> {
self.get_aligned_mut::<ImageDOSHeader>(0).map_err(Error::from)
}
fn get_valid_dos_header_ref(&self) -> Result<&ImageDOSHeader, Error> {
let dos_ref = self.get_aligned_ref::<ImageDOSHeader>(0).map_err(Error::from)?;
if dos_ref.e_magic != DOS_SIGNATURE {
return Err(Error::InvalidDOSSignature(dos_ref.e_magic));
};
Ok(dos_ref)
}
fn get_valid_mut_dos_header_ref(&mut self) -> Result<&mut ImageDOSHeader, Error> {
let dos_ref = self.get_aligned_mut::<ImageDOSHeader>(0).map_err(Error::from)?;
if dos_ref.e_magic != DOS_SIGNATURE {
return Err(Error::InvalidDOSSignature(dos_ref.e_magic));
};
Ok(dos_ref)
}
fn e_lfanew(&self) -> Result<Offset, Error> {
let header = self.get_valid_dos_header()?;
Ok(header.e_lfanew)
}
fn get_dos_stub(&self) -> Result<&[u8], Error> {
let e_lfanew = self.e_lfanew()?;
let dos_header_end = Offset(mem::size_of::<ImageDOSHeader>() as u32);
if e_lfanew.0 < dos_header_end.0 {
let result = self.read(dos_header_end.into(), 0usize)?;
return Ok(result);
}
let result = self.read(dos_header_end.into(), (e_lfanew.0 - dos_header_end.0) as usize)?;
Ok(result)
}
fn get_nt_headers_32(&self) -> Result<ImageNTHeaders32, Error> {
let e_lfanew = self.e_lfanew()?;
self.read_val::<ImageNTHeaders32>(e_lfanew.into()).map_err(Error::from)
}
fn get_mut_nt_headers_32(&mut self) -> Result<ImageNTHeaders32, Error> {
let e_lfanew = self.e_lfanew()?;
self.read_val::<ImageNTHeaders32>(e_lfanew.into()).map_err(Error::from)
}
fn get_valid_nt_headers_32(&self) -> Result<ImageNTHeaders32, Error> {
let e_lfanew = self.e_lfanew()?;
if e_lfanew.0 % 4 != 0 {
return Err(Error::BadAlignment);
}
let nt_headers = self.get_nt_headers_32()?;
if nt_headers.signature != NT_SIGNATURE {
return Err(Error::InvalidPESignature(nt_headers.signature));
}
if nt_headers.optional_header.magic != HDR32_MAGIC {
return Err(Error::InvalidNTSignature(nt_headers.optional_header.magic));
}
Ok(nt_headers)
}
fn get_valid_mut_nt_headers_32(&mut self) -> Result<ImageNTHeaders32, Error> {
let e_lfanew = self.e_lfanew()?;
if e_lfanew.0 % 4 != 0 {
return Err(Error::BadAlignment);
}
let nt_headers = self.get_mut_nt_headers_32()?;
if nt_headers.signature != NT_SIGNATURE {
return Err(Error::InvalidPESignature(nt_headers.signature));
}
if nt_headers.optional_header.magic != HDR32_MAGIC {
return Err(Error::InvalidNTSignature(nt_headers.optional_header.magic));
}
Ok(nt_headers)
}
fn get_nt_headers_64(&self) -> Result<ImageNTHeaders64, Error> {
let e_lfanew = self.e_lfanew()?;
self.read_val::<ImageNTHeaders64>(e_lfanew.into()).map_err(Error::from)
}
fn get_mut_nt_headers_64(&mut self) -> Result<ImageNTHeaders64, Error> {
let e_lfanew = self.e_lfanew()?;
self.read_val::<ImageNTHeaders64>(e_lfanew.into()).map_err(Error::from)
}
fn get_nt_headers_32_ref(&self) -> Result<&ImageNTHeaders32, Error> {
let e_lfanew = self.e_lfanew()?;
self.get_aligned_ref::<ImageNTHeaders32>(e_lfanew.into()).map_err(Error::from)
}
fn get_mut_nt_headers_32_ref(&mut self) -> Result<&mut ImageNTHeaders32, Error> {
let e_lfanew = self.e_lfanew()?;
self.get_aligned_mut::<ImageNTHeaders32>(e_lfanew.into()).map_err(Error::from)
}
fn get_nt_headers_64_ref(&self) -> Result<&ImageNTHeaders64, Error> {
let e_lfanew = self.e_lfanew()?;
self.get_aligned_ref::<ImageNTHeaders64>(e_lfanew.into()).map_err(Error::from)
}
fn get_mut_nt_headers_64_ref(&mut self) -> Result<&mut ImageNTHeaders64, Error> {
let e_lfanew = self.e_lfanew()?;
self.get_aligned_mut::<ImageNTHeaders64>(e_lfanew.into()).map_err(Error::from)
}
fn get_valid_nt_headers_64(&self) -> Result<ImageNTHeaders64, Error> {
let e_lfanew = self.e_lfanew()?;
if e_lfanew.0 % 4 != 0 {
return Err(Error::BadAlignment);
}
let nt_headers = self.get_nt_headers_64()?;
if nt_headers.signature != NT_SIGNATURE {
return Err(Error::InvalidPESignature(nt_headers.signature));
}
if nt_headers.optional_header.magic != HDR64_MAGIC {
return Err(Error::InvalidNTSignature(nt_headers.optional_header.magic));
}
Ok(nt_headers)
}
fn get_valid_mut_nt_headers_64(&mut self) -> Result<ImageNTHeaders64, Error> {
let e_lfanew = self.e_lfanew()?;
if e_lfanew.0 % 4 != 0 {
return Err(Error::BadAlignment);
}
let nt_headers = self.get_mut_nt_headers_64()?;
if nt_headers.signature != NT_SIGNATURE {
return Err(Error::InvalidPESignature(nt_headers.signature));
}
if nt_headers.optional_header.magic != HDR64_MAGIC {
return Err(Error::InvalidNTSignature(nt_headers.optional_header.magic));
}
Ok(nt_headers)
}
fn get_nt_magic(&self) -> Result<u16, Error> {
let header = self.get_nt_headers_32()?;
Ok(header.optional_header.magic)
}
fn get_arch(&self) -> Result<Arch, Error> {
let magic = self.get_nt_magic()?;
match magic {
HDR32_MAGIC => Ok(Arch::X86),
HDR64_MAGIC => Ok(Arch::X64),
_ => return Err(Error::InvalidNTSignature(magic)),
}
}
fn get_valid_nt_headers(&self) -> Result<NTHeaders, Error> {
let magic = self.get_nt_magic()?;
if magic == HDR32_MAGIC {
match self.get_valid_nt_headers_32() {
Ok(h) => Ok(NTHeaders::NTHeaders32(h)),
Err(e) => Err(e)
}
}
else if magic == HDR64_MAGIC {
match self.get_valid_nt_headers_64() {
Ok(h) => Ok(NTHeaders::NTHeaders64(h)),
Err(e) => Err(e),
}
}
else {
Err(Error::InvalidNTSignature(magic))
}
}
fn get_valid_mut_nt_headers(&mut self) -> Result<NTHeadersMut, Error> {
let magic = self.get_nt_magic()?;
if magic == HDR32_MAGIC {
match self.get_valid_mut_nt_headers_32() {
Ok(h) => Ok(NTHeadersMut::NTHeaders32(h)),
Err(e) => Err(e)
}
}
else if magic == HDR64_MAGIC {
match self.get_valid_mut_nt_headers_64() {
Ok(h) => Ok(NTHeadersMut::NTHeaders64(h)),
Err(e) => Err(e),
}
}
else {
Err(Error::InvalidNTSignature(magic))
}
}
fn validate_checksum(&self) -> Result<bool, Error> {
let checksum = match self.get_valid_nt_headers() {
Ok(h) => match h {
NTHeaders::NTHeaders32(h32) => h32.optional_header.checksum,
NTHeaders::NTHeaders64(h64) => h64.optional_header.checksum,
},
Err(e) => return Err(e),
};
match self.calculate_checksum() {
Ok(c) => Ok(c == checksum),
Err(e) => Err(e),
}
}
fn calculate_checksum(&self) -> Result<u32, Error> {
let magic = self.get_nt_magic()?;
let e_lfanew = self.e_lfanew()?;
let checksum_offset = if magic == HDR32_MAGIC {
e_lfanew.0 as usize + 0x58
} else {
e_lfanew.0 as usize + 0x64
};
let eof = self.len();
let mut checksum = 0u64;
for offset in (0..eof).step_by(4) {
if offset == checksum_offset { continue; }
let data: Vec<u8> = match self.read(offset, 4) {
Ok(d) => d.iter().cloned().collect(),
Err(e) => {
let pk_err = Error::from(e);
if let Error::PKBufferError(PKError::OutOfBounds(_,_)) = pk_err { () }
else { return Err(pk_err); }
let real_size = eof - offset;
let real_output = self.read(offset, real_size)?;
let mut padded_output = Vec::<u8>::new();
padded_output.extend_from_slice(real_output);
padded_output.append(&mut vec![0u8; 4 - padded_output.len()]);
padded_output
},
};
let int_val = data.as_slice().read_u32::<LittleEndian>().unwrap();
checksum = (checksum & 0xFFFFFFFF) + (int_val as u64) + (checksum >> 32);
if checksum > (u32::MAX as u64) {
checksum = (checksum & 0xFFFFFFFF) + (checksum >> 32);
}
}
checksum = (checksum & 0xFFFF) + (checksum >> 16);
checksum = checksum + (checksum >> 16);
checksum = checksum & 0xFFFF;
checksum += eof as u64;
Ok(checksum as u32)
}
fn get_entrypoint(&self) -> Result<RVA, Error> {
let nt_headers = self.get_valid_nt_headers()?;
match nt_headers {
NTHeaders::NTHeaders32(h32) => Ok(h32.optional_header.address_of_entry_point),
NTHeaders::NTHeaders64(h64) => Ok(h64.optional_header.address_of_entry_point),
}
}
fn get_image_base(&self) -> Result<u64, Error> {
#[cfg(feature="win32")] {
if self.is_allocated() {
return Ok(self.as_ptr() as u64);
}
}
match self.get_valid_nt_headers() {
Ok(h) => match h {
NTHeaders::NTHeaders32(h32) => Ok(h32.optional_header.image_base as u64),
NTHeaders::NTHeaders64(h64) => Ok(h64.optional_header.image_base),
},
Err(e) => return Err(e),
}
}
fn get_data_directory_offset(&self) -> Result<Offset, Error> {
let e_lfanew = self.e_lfanew()?;
let nt_header = self.get_valid_nt_headers()?;
let header_size = match nt_header {
NTHeaders::NTHeaders32(_) => mem::size_of::<ImageNTHeaders32>(),
NTHeaders::NTHeaders64(_) => mem::size_of::<ImageNTHeaders64>(),
};
let offset = Offset(e_lfanew.0 + (header_size as u32));
if !self.validate_offset(offset) {
return Err(Error::InvalidOffset(offset));
}
Ok(offset)
}
fn get_data_directory_size(&self) -> Result<usize, Error> {
let nt_header = self.get_valid_nt_headers()?;
let sizes = match nt_header {
NTHeaders::NTHeaders32(h32) => h32.optional_header.number_of_rva_and_sizes,
NTHeaders::NTHeaders64(h64) => h64.optional_header.number_of_rva_and_sizes,
};
if sizes > 16 {
Ok(16)
}
else {
Ok(sizes as usize)
}
}
fn get_data_directory_table(&self) -> Result<&[ImageDataDirectory], Error> {
let offset = self.get_data_directory_offset()?;
let size = self.get_data_directory_size()?;
let result = self.get_slice_ref::<ImageDataDirectory>(offset.into(), size).map_err(Error::from)?;
Ok(result)
}
fn get_mut_data_directory_table(&mut self) -> Result<&mut [ImageDataDirectory], Error> {
let offset = self.get_data_directory_offset()?;
let size = self.get_data_directory_size()?;
let result = self.get_mut_slice_ref::<ImageDataDirectory>(offset.into(), size).map_err(Error::from)?;
Ok(result)
}
fn get_data_directory(&self, dir: ImageDirectoryEntry) -> Result<ImageDataDirectory, Error> {
let directory_table = self.get_data_directory_table()?;
let index = dir as usize;
if index >= directory_table.len() {
return Err(Error::BadDirectory(dir));
}
Ok(directory_table[index])
}
fn get_mut_data_directory(&mut self, dir: ImageDirectoryEntry) -> Result<ImageDataDirectory, Error> {
let directory_table = self.get_mut_data_directory_table()?;
let index = dir as usize;
if index >= directory_table.len() {
return Err(Error::BadDirectory(dir));
}
Ok(directory_table[index])
}
fn has_data_directory(&self, dir: ImageDirectoryEntry) -> bool {
let dir_obj = match self.get_data_directory(dir) {
Ok(d) => d,
Err(_) => return false,
};
if dir_obj.virtual_address.0 == 0 { return false; }
self.validate_rva(dir_obj.virtual_address)
}
fn cast_directory<T: Copy>(&self, dir: ImageDirectoryEntry) -> Result<T, Error> {
let directory = self.get_data_directory(dir)?;
directory.cast::<T,Self>(self)
}
fn cast_directory_mut<T: Copy>(&mut self, dir: ImageDirectoryEntry) -> Result<T, Error> {
let bypass = unsafe { &mut *(self as *mut Self) };
let directory = self.get_data_directory(dir)?;
directory.cast_mut::<T,Self>(bypass)
}
fn get_section_table_offset(&self) -> Result<Offset, Error> {
let e_lfanew = self.e_lfanew()?;
let nt_header = self.get_valid_nt_headers()?;
let size_of_optional = match nt_header {
NTHeaders::NTHeaders32(h) => h.file_header.size_of_optional_header,
NTHeaders::NTHeaders64(h) => h.file_header.size_of_optional_header,
};
let Offset(mut offset) = e_lfanew;
offset += mem::size_of::<u32>() as u32;
offset += mem::size_of::<ImageFileHeader>() as u32;
offset += size_of_optional as u32;
if !self.validate_offset(Offset(offset)) {
return Err(Error::InvalidOffset(Offset(offset)));
}
Ok(Offset(offset))
}
fn get_section_table(&self) -> Result<&[ImageSectionHeader], Error> {
let offset = self.get_section_table_offset()?;
let nt_headers = self.get_valid_nt_headers()?;
let sections = match nt_headers {
NTHeaders::NTHeaders32(h) => h.file_header.number_of_sections,
NTHeaders::NTHeaders64(h) => h.file_header.number_of_sections,
};
let result = self.get_slice_ref::<ImageSectionHeader>(offset.into(), sections as usize).map_err(Error::from)?;
Ok(result)
}
fn get_mut_section_table(&mut self) -> Result<&mut [ImageSectionHeader], Error> {
let offset = self.get_section_table_offset()?;
let nt_headers = self.get_valid_nt_headers()?;
let sections = match nt_headers {
NTHeaders::NTHeaders32(h) => h.file_header.number_of_sections,
NTHeaders::NTHeaders64(h) => h.file_header.number_of_sections,
};
let result = self.get_mut_slice_ref::<ImageSectionHeader>(offset.into(), sections as usize).map_err(Error::from)?;
Ok(result)
}
fn get_section_by_offset(&self, offset: Offset) -> Result<ImageSectionHeader, Error> {
let section_table = self.get_section_table()?;
for section in section_table {
if section.has_offset(offset) {
return Ok(section.clone());
}
}
Err(Error::SectionNotFound)
}
fn get_mut_section_by_offset(&mut self, offset: Offset) -> Result<ImageSectionHeader, Error> {
let section_table = self.get_mut_section_table()?;
for section in section_table {
if section.has_offset(offset) {
return Ok(section.clone());
}
}
Err(Error::SectionNotFound)
}
fn get_section_by_rva(&self, rva: RVA) -> Result<ImageSectionHeader, Error> {
let section_table = self.get_section_table()?;
for section in section_table {
if section.has_rva(rva) {
return Ok(section.clone());
}
}
Err(Error::SectionNotFound)
}
fn get_mut_section_by_rva(&mut self, rva: RVA) -> Result<ImageSectionHeader, Error> {
let section_table = self.get_mut_section_table()?;
for section in section_table {
if section.has_rva(rva) {
return Ok(section.clone());
}
}
Err(Error::SectionNotFound)
}
fn get_section_by_name<S: AsRef<str>>(&self, name: S) -> Result<ImageSectionHeader, Error> {
let sections = self.get_section_table()?;
let s = name.as_ref();
for section in sections {
let name = section.name.as_str()?;
if name == s {
return Ok(section.clone());
}
}
Err(Error::SectionNotFound)
}
fn get_mut_section_by_name(&mut self, name: String) -> Result<ImageSectionHeader, Error> {
let sections = self.get_mut_section_table()?;
let s = name.as_str();
for section in sections {
let name = section.name.as_str()?;
if name == s {
return Ok(section.clone());
}
}
Err(Error::SectionNotFound)
}
fn add_section(&mut self, section: &ImageSectionHeader) -> Result<ImageSectionHeader, Error> {
let e_lfanew = self.e_lfanew()?;
let sections_before = match self.get_valid_nt_headers() {
Ok(ref h) => match h {
NTHeaders::NTHeaders32(h32) => h32.file_header.number_of_sections,
NTHeaders::NTHeaders64(h64) => h64.file_header.number_of_sections,
},
Err(e) => return Err(e),
};
let mut nt_headers = match self.get_valid_mut_nt_headers() {
Ok(h) => h,
Err(e) => return Err(e),
};
match nt_headers {
NTHeadersMut::NTHeaders32(ref mut h32) => h32.file_header.number_of_sections += 1,
NTHeadersMut::NTHeaders64(ref mut h64) => h64.file_header.number_of_sections += 1,
}
match nt_headers {
NTHeadersMut::NTHeaders32(h32) => self.write_val(e_lfanew.0 as usize, &h32)?,
NTHeadersMut::NTHeaders64(h64) => self.write_val(e_lfanew.0 as usize, &h64)?,
}
let section_table_offset = self.get_section_table_offset()?;
let section_size = std::mem::size_of::<ImageSectionHeader>();
let write_offset = section_table_offset.0 as usize + (sections_before as usize) * section_size;
self.write_val(write_offset, section)?;
let section_table = self.get_mut_section_table()?;
Ok(section_table[sections_before as usize])
}
fn append_section(&mut self, section: &ImageSectionHeader) -> Result<ImageSectionHeader, Error> {
let section_table_ro = self.get_section_table()?;
let last_section_file_size;
let last_section_virtual_size;
let last_offset;
let last_rva;
if section_table_ro.len() == 0 {
last_section_file_size = match self.calculate_header_size() {
Ok(s) => s as u32,
Err(e) => return Err(e),
};
last_section_virtual_size = last_section_file_size;
last_offset = Offset(0);
last_rva = RVA(0);
}
else {
let last_section = section_table_ro[section_table_ro.len()-1].clone();
last_section_file_size = last_section.size_of_raw_data;
last_section_virtual_size = last_section.virtual_size;
last_offset = last_section.pointer_to_raw_data.clone();
last_rva = last_section.virtual_address.clone();
}
let next_offset = self.align_to_file(Offset(last_offset.0 + last_section_file_size))?;
let next_rva = self.align_to_section(RVA(last_rva.0 + last_section_virtual_size))?;
let mut modified_section = *section;
modified_section.pointer_to_raw_data = next_offset;
modified_section.virtual_address = next_rva;
self.add_section(&modified_section)
}
fn validate_offset(&self, offset: Offset) -> bool {
(offset.0 as usize) < self.len()
}
fn validate_rva(&self, rva: RVA) -> bool {
let headers = match self.get_valid_nt_headers() {
Ok(h) => h,
Err(_) => return false,
};
let image_size = match headers {
NTHeaders::NTHeaders32(h32) => h32.optional_header.size_of_image,
NTHeaders::NTHeaders64(h64) => h64.optional_header.size_of_image,
};
rva.0 < image_size
}
fn validate_va(&self, va: VA) -> bool {
let headers = match self.get_valid_nt_headers() {
Ok(h) => h,
Err(_) => return false,
};
let image_base = match self.get_image_base() {
Ok(i) => i,
Err(_) => return false,
};
let image_size = match headers {
NTHeaders::NTHeaders32(h32) => h32.optional_header.size_of_image as u64,
NTHeaders::NTHeaders64(h64) => h64.optional_header.size_of_image as u64,
};
let start = image_base;
let end = start + image_size;
match va {
VA::VA32(v32) => start <= (v32.0 as u64) && (v32.0 as u64) < end,
VA::VA64(v64) => start <= v64.0 && v64.0 < end,
}
}
fn is_aligned_to_file(&self, offset: Offset) -> bool {
let alignment = match self.get_valid_nt_headers() {
Ok(h) => match h {
NTHeaders::NTHeaders32(h32) => h32.optional_header.file_alignment,
NTHeaders::NTHeaders64(h64) => h64.optional_header.file_alignment,
},
Err(_) => return false,
};
offset.0 % alignment == 0
}
fn is_aligned_to_section(&self, rva: RVA) -> bool {
let alignment = match self.get_valid_nt_headers() {
Ok(h) => match h {
NTHeaders::NTHeaders32(h32) => h32.optional_header.section_alignment,
NTHeaders::NTHeaders64(h64) => h64.optional_header.section_alignment,
},
Err(_) => return false,
};
rva.0 % alignment == 0
}
fn align_to_file(&self, offset: Offset) -> Result<Offset, Error> {
let alignment = match self.get_valid_nt_headers() {
Ok(h) => match h {
NTHeaders::NTHeaders32(h32) => h32.optional_header.file_alignment,
NTHeaders::NTHeaders64(h64) => h64.optional_header.file_alignment,
},
Err(e) => return Err(e),
};
Ok(Offset(align(offset.0, alignment)))
}
fn align_to_section(&self, rva: RVA) -> Result<RVA, Error> {
let alignment = match self.get_valid_nt_headers() {
Ok(h) => match h {
NTHeaders::NTHeaders32(h32) => h32.optional_header.section_alignment,
NTHeaders::NTHeaders64(h64) => h64.optional_header.section_alignment,
},
Err(e) => return Err(e),
};
Ok(RVA(align(rva.0, alignment)))
}
fn offset_to_rva(&self, offset: Offset) -> Result<RVA, Error> {
if !self.validate_offset(offset) {
return Err(Error::InvalidOffset(offset));
}
let section = match self.get_section_by_offset(offset) {
Ok(s) => s,
Err(e) => {
if let Error::SectionNotFound = e { () } else { return Err(Error::from(e)); }
if !self.validate_rva(RVA(offset.0)) {
return Err(Error::InvalidRVA(RVA(offset.0)));
}
return Ok(RVA(offset.0));
}
};
let mut rva = offset.0;
rva -= section.pointer_to_raw_data.0;
rva += section.virtual_address.0;
let final_rva = RVA(rva);
if !self.validate_rva(final_rva) || !section.has_rva(final_rva) {
return Err(Error::InvalidRVA(final_rva));
}
Ok(RVA(rva))
}
fn offset_to_va(&self, offset: Offset) -> Result<VA, Error> {
if !self.validate_offset(offset) {
return Err(Error::InvalidOffset(offset));
}
let rva = match self.offset_to_rva(offset) {
Ok(r) => r,
Err(e) => return Err(e),
};
self.rva_to_va(rva)
}
fn rva_to_offset(&self, rva: RVA) -> Result<Offset, Error> {
if !self.validate_rva(rva) {
return Err(Error::InvalidRVA(rva));
}
let section = match self.get_section_by_rva(rva) {
Ok(s) => s,
Err(e) => {
if let Error::SectionNotFound = e { () } else { return Err(Error::from(e)); }
if !self.validate_offset(Offset(rva.0)) {
return Err(Error::InvalidOffset(Offset(rva.0)));
}
return Ok(Offset(rva.0));
}
};
let mut offset = rva.0;
offset -= section.virtual_address.0;
offset += section.pointer_to_raw_data.0;
let final_offset = Offset(offset);
if !self.validate_offset(final_offset) || !section.has_offset(final_offset) {
return Err(Error::InvalidOffset(final_offset));
}
Ok(Offset(offset))
}
fn rva_to_va(&self, rva: RVA) -> Result<VA, Error> {
if !self.validate_rva(rva) {
return Err(Error::InvalidRVA(rva));
}
let image_base = match self.get_image_base() {
Ok(i) => i,
Err(e) => return Err(e),
};
let arch = match self.get_arch() {
Ok(a) => a,
Err(e) => return Err(e),
};
let va = match arch {
Arch::X86 => VA::VA32(VA32(rva.0 + (image_base as u32))),
Arch::X64 => VA::VA64(VA64((rva.0 as u64) + image_base)),
};
if !self.validate_va(va) {
return Err(Error::InvalidVA(va));
}
Ok(va)
}
fn va_to_rva(&self, va: VA) -> Result<RVA, Error> {
if !self.validate_va(va) {
return Err(Error::InvalidVA(va));
}
let image_base = self.get_image_base()?;
let rva = match va {
VA::VA32(v32) => RVA(( (v32.0 as u64) - image_base ) as u32),
VA::VA64(v64) => RVA(( v64.0 - image_base ) as u32),
};
if !self.validate_rva(rva) {
return Err(Error::InvalidRVA(rva));
}
Ok(rva)
}
fn va_to_offset(&self, va: VA) -> Result<Offset, Error> {
if !self.validate_va(va) {
return Err(Error::InvalidVA(va));
}
let rva = self.va_to_rva(va)?;
self.rva_to_offset(rva)
}
fn get_resource_address(&self, offset: ResourceOffset) -> Result<RVA, Error> {
let dir = self.get_data_directory(ImageDirectoryEntry::Resource)?;
if offset.0 > dir.size {
return Err(Error::OutOfBounds(dir.size as usize, offset.0 as usize));
}
if dir.virtual_address.0 == 0 || !self.validate_rva(dir.virtual_address) {
return Err(Error::InvalidRVA(dir.virtual_address));
}
Ok(RVA(dir.virtual_address.0 + offset.0))
}
fn calculate_header_size(&self) -> Result<usize, Error> {
let mut header_size = 0usize;
let e_lfanew = self.e_lfanew()?;
header_size = cmp::max(e_lfanew.into(), header_size);
let data_dir_offset = self.get_data_directory_offset()?;
header_size = cmp::max(data_dir_offset.into(), header_size);
let data_dir_size = self.get_data_directory_size()?;
header_size += data_dir_size * mem::size_of::<ImageDataDirectory>();
let section_offset = self.get_section_table_offset()?;
header_size = cmp::max(section_offset.into(), header_size);
let section_table = self.get_section_table()?;
header_size += section_table.len() * mem::size_of::<ImageSectionHeader>();
Ok(header_size)
}
fn calculate_disk_size(&self) -> Result<usize, Error> {
let mut disk_size = self.calculate_header_size()?;
let section_table = self.get_section_table()?;
for section in section_table {
let section_end = (section.pointer_to_raw_data.0 as usize) + (section.size_of_raw_data as usize);
disk_size = cmp::max(section_end, disk_size);
}
Ok(disk_size)
}
fn calculate_memory_size(&self) -> Result<usize, Error> {
let mut memory_size = self.calculate_header_size()?;
let section_table = self.get_section_table()?;
for section in section_table {
let section_end = (section.virtual_address.0 as usize) + (section.virtual_size as usize);
memory_size = cmp::max(section_end, memory_size);
}
let alignment = match self.get_valid_nt_headers() {
Ok(h) => match h {
NTHeaders::NTHeaders32(h32) => h32.optional_header.section_alignment as usize,
NTHeaders::NTHeaders64(h64) => h64.optional_header.section_alignment as usize,
}
Err(e) => return Err(e),
};
if memory_size % alignment != 0 {
memory_size += alignment - (memory_size % alignment);
}
Ok(memory_size)
}
fn calculate_imphash(&self) -> Result<Vec<u8>, Error> {
let import_directory = ImportDirectory::parse(self)?;
let mut imphash_results = Vec::<String>::new();
for import in import_directory.descriptors {
let dll_name = match import.get_name(self) {
Ok(n) => match n.as_str() {
Ok(s) => s.to_string().to_ascii_lowercase(),
Err(e) => return Err(e),
},
Err(e) => return Err(e),
};
let mut imphash_dll_name = dll_name.clone();
let extensions = &["ocx", "sys", "dll"];
let name_chunks: Vec<String> = dll_name.as_str()
.rsplitn(2, '.')
.map(|x| x.to_string())
.collect();
if name_chunks.len() > 1 && extensions.contains(&name_chunks[0].as_str()) {
imphash_dll_name = name_chunks[1].clone();
}
let import_entries = import.get_imports(self)?;
for import_data in import_entries {
let import_name = match import_data {
ImportData::Ordinal(x) => imphash_resolve(dll_name.as_str(), x).to_ascii_lowercase(),
ImportData::ImportByName(s) => s.to_string().to_ascii_lowercase(),
};
let mut imphash_name = String::new();
imphash_name.push_str(imphash_dll_name.as_str());
imphash_name.push('.');
imphash_name.push_str(import_name.as_str());
imphash_results.push(imphash_name.clone());
}
}
Ok(imphash_results.join(",").as_str().as_bytes().md5())
}
fn recreate_image(&self, pe_type: PEType) -> Result<Vec<u8>, Error> {
let buffer_size = match pe_type {
PEType::Disk => match self.calculate_disk_size() {
Ok(s) => s,
Err(e) => return Err(e),
},
PEType::Memory => match self.get_valid_nt_headers() {
Ok(h) => {
let (mut image_size, alignment) = match h {
NTHeaders::NTHeaders32(h32) => (h32.optional_header.size_of_image as usize, h32.optional_header.section_alignment as usize),
NTHeaders::NTHeaders64(h64) => (h64.optional_header.size_of_image as usize, h64.optional_header.section_alignment as usize),
};
if image_size % alignment != 0 {
image_size += alignment - (image_size % alignment);
}
image_size
},
Err(e) => return Err(e),
}
};
let mut buffer = VecBuffer::with_initial_size(buffer_size);
let header_size = self.calculate_header_size()?;
let header_data = self.read(0, header_size)?;
buffer.write(0, header_data)?;
let section_table = self.get_section_table()?;
for section in section_table {
let section_data = section.read(self)?;
let section_size = match pe_type {
PEType::Disk => section.size_of_raw_data as usize,
PEType::Memory => section.virtual_size as usize,
};
let data_size = section_data.len();
let written_size;
if section_size > data_size {
written_size = data_size
}
else {
written_size = section_size
}
let buffer_offset = match pe_type {
PEType::Disk => section.pointer_to_raw_data,
PEType::Memory => Offset(section.virtual_address.0),
};
buffer.write(buffer_offset.into(), §ion_data[..written_size])?;
}
Ok(buffer.to_vec())
}
fn fix_image_size(&mut self) -> Result<(), Error> {
let image_size = self.calculate_memory_size()?;
match self.get_valid_mut_nt_headers() {
Ok(ref mut h) => match h {
NTHeadersMut::NTHeaders32(ref mut h32) => h32.optional_header.size_of_image = image_size as u32,
NTHeadersMut::NTHeaders64(ref mut h64) => h64.optional_header.size_of_image = image_size as u32,
},
Err(e) => return Err(e),
}
Ok(())
}
}
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct PtrPE {
pe_type: PEType,
buffer: PtrBuffer,
}
impl PtrPE {
pub fn new(pe_type: PEType, pointer: *const u8, size: usize) -> Self {
Self {
pe_type: pe_type,
buffer: PtrBuffer::new(pointer, size),
}
}
pub fn new_disk(pointer: *const u8, size: usize) -> Self {
Self::new(PEType::Disk, pointer, size)
}
pub fn new_memory(pointer: *const u8, size: usize) -> Self {
Self::new(PEType::Memory, pointer, size)
}
pub unsafe fn from_memory(ptr: *const u8) -> Result<Self, Error> {
let dos_header = &*(ptr as *const ImageDOSHeader);
if dos_header.e_magic != DOS_SIGNATURE {
return Err(Error::InvalidDOSSignature(dos_header.e_magic));
}
let nt_header = &*(ptr.add(dos_header.e_lfanew.0 as usize) as *const ImageNTHeaders32);
if nt_header.signature != NT_SIGNATURE {
return Err(Error::InvalidPESignature(nt_header.signature));
}
let image_size;
if nt_header.optional_header.magic == HDR32_MAGIC {
image_size = nt_header.optional_header.size_of_image as usize;
}
else if nt_header.optional_header.magic == HDR64_MAGIC {
let nt_header_64 = &*(ptr.add(dos_header.e_lfanew.0 as usize) as *const ImageNTHeaders64);
image_size = nt_header_64.optional_header.size_of_image as usize;
}
else {
return Err(Error::InvalidNTSignature(nt_header.optional_header.magic));
}
Ok(Self::new_memory(ptr, image_size))
}
pub fn to_vecpe(&self) -> VecPE {
VecPE::from_data(self.pe_type, self.as_slice())
}
pub fn get_buffer(&self) -> &PtrBuffer {
&self.buffer
}
pub fn get_mut_buffer(&mut self) -> &mut PtrBuffer {
&mut self.buffer
}
}
impl PE for PtrPE {
fn get_type(&self) -> PEType { self.pe_type }
#[cfg(feature="win32")]
fn is_allocated(&self) -> bool { false }
}
impl Buffer for PtrPE {
fn len(&self) -> usize { self.buffer.len() }
fn as_ptr(&self) -> *const u8 { self.buffer.as_ptr() }
fn as_mut_ptr(&mut self) -> *mut u8 { self.buffer.as_mut_ptr() }
fn as_slice(&self) -> &[u8] { self.buffer.as_slice() }
fn as_mut_slice(&mut self) -> &mut [u8] { self.buffer.as_mut_slice() }
}
impl<Idx: slice::SliceIndex<[u8]>> Index<Idx> for PtrPE {
type Output = Idx::Output;
fn index(&self, index: Idx) -> &Self::Output {
self.as_slice().index(index)
}
}
impl<Idx: slice::SliceIndex<[u8]>> IndexMut<Idx> for PtrPE {
fn index_mut(&mut self, index: Idx) -> &mut Self::Output {
self.as_mut_slice().index_mut(index)
}
}
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct VecPE {
pe_type: PEType,
buffer: VecBuffer,
}
impl VecPE {
pub fn new(pe_type: PEType, size: usize) -> Self {
let buffer = VecBuffer::with_initial_size(size);
Self { pe_type, buffer }
}
pub fn new_disk(size: usize) -> Self {
Self::new(PEType::Disk, size)
}
pub fn new_memory(size: usize) -> Self {
Self::new(PEType::Memory, size)
}
pub fn from_file<P: AsRef<Path>>(pe_type: PEType, filename: P) -> Result<Self, Error> {
let buffer = VecBuffer::from_file(filename)?;
Ok(Self { pe_type, buffer })
}
pub fn from_disk_file<P: AsRef<Path>>(filename: P) -> Result<Self, Error> {
Self::from_file(PEType::Disk, filename)
}
pub fn from_memory_file<P: AsRef<Path>>(filename: P) -> Result<Self, Error> {
Self::from_file(PEType::Memory, filename)
}
pub fn from_data<B: AsRef<[u8]>>(pe_type: PEType, data: B) -> Self {
let buffer = VecBuffer::from_data(data);
Self { pe_type, buffer }
}
pub fn from_disk_data<B: AsRef<[u8]>>(data: B) -> Self {
Self::from_data(PEType::Disk, data)
}
pub fn from_memory_data<B: AsRef<[u8]>>(data: B) -> Self {
Self::from_data(PEType::Memory, data)
}
pub fn from_assembly<B: AsRef<[u8]>>(arch: Arch, asm_ref: B, entrypoint: Offset) -> Result<Self, Error> {
let asm_data = asm_ref.as_ref();
let mut result = Self::new_disk(0x400);
result.write_val(0, &ImageDOSHeader::default()).map_err(Error::from)?;
let e_lfanew = result.e_lfanew()?;
match arch {
Arch::X86 => result.write_val(e_lfanew.into(), &ImageNTHeaders32::default()).map_err(Error::from)?,
Arch::X64 => result.write_val(e_lfanew.into(), &ImageNTHeaders64::default()).map_err(Error::from)?,
}
let mut new_section = ImageSectionHeader::default();
new_section.set_name(Some(".text"));
let mut appended_section = result.append_section(&new_section)?;
appended_section.size_of_raw_data = asm_data.len() as u32;
appended_section.virtual_size = appended_section.size_of_raw_data;
appended_section.characteristics = SectionCharacteristics::MEM_EXECUTE
| SectionCharacteristics::MEM_READ
| SectionCharacteristics::CNT_CODE;
let new_entrypoint = RVA(entrypoint.0 + appended_section.virtual_address.0);
result.fix_image_size()?;
if !result.validate_rva(new_entrypoint) {
return Err(Error::InvalidOffset(entrypoint));
}
result.append(asm_data);
match result.get_valid_mut_nt_headers() {
Ok(ref mut h) => match h {
NTHeadersMut::NTHeaders32(ref mut h32) => h32.optional_header.address_of_entry_point = new_entrypoint,
NTHeadersMut::NTHeaders64(ref mut h64) => h64.optional_header.address_of_entry_point = new_entrypoint,
},
Err(e) => return Err(e),
}
Ok(result)
}
pub fn as_ptr_pe(&self) -> PtrPE {
PtrPE::new(self.pe_type, self.as_ptr(), self.len())
}
pub fn get_buffer(&self) -> &VecBuffer {
&self.buffer
}
pub fn get_mut_buffer(&mut self) -> &mut VecBuffer {
&mut self.buffer
}
pub fn append<B: AsRef<[u8]>>(&mut self, data: B) {
self.buffer.append(data);
}
pub fn append_val<T: Copy>(&mut self, data: &T) -> Result<(), Error> {
self.buffer.append_val(data); Ok(())
}
pub fn append_slice_val<T: Copy>(&mut self, data: &[T]) -> Result<(), Error> {
self.buffer.append_slice_val(data); Ok(())
}
pub fn insert(&mut self, offset: usize, element: u8) {
self.buffer.insert(offset, element);
}
pub fn remove(&mut self, offset: usize) {
self.buffer.remove(offset);
}
pub fn push(&mut self, byte: u8) {
self.buffer.push(byte);
}
pub fn pop(&mut self) -> Option<u8> {
self.buffer.pop()
}
pub fn clear(&mut self) {
self.buffer.clear();
}
pub fn resize_with<F>(&mut self, new_len: usize, f: F)
where
F: FnMut() -> u8,
{
self.buffer.resize_with(new_len, f);
}
pub fn resize(&mut self, new_len: usize, value: u8) {
self.buffer.resize(new_len, value);
}
pub fn truncate(&mut self, len: usize) {
self.buffer.truncate(len);
}
pub fn pad_to_file_alignment(&mut self) -> Result<(), Error> {
let current_offset = Offset(self.len() as u32);
let aligned_offset = self.align_to_file(current_offset)?;
let padding = aligned_offset.0 - current_offset.0;
if padding != 0 { self.append(&vec![0u8; padding as usize]); }
Ok(())
}
pub fn pad_to_section_alignment(&mut self) -> Result<(), Error> {
let current_rva = RVA(self.len() as u32);
let aligned_rva = self.align_to_section(current_rva)?;
let padding = aligned_rva.0 - current_rva.0;
if padding != 0 { self.append(&vec![0u8; padding as usize]); }
Ok(())
}
pub fn pad_to_alignment(&mut self) -> Result<(), Error> {
match self.pe_type {
PEType::Disk => self.pad_to_file_alignment(),
PEType::Memory => self.pad_to_section_alignment(),
}
}
}
impl PE for VecPE {
fn get_type(&self) -> PEType { self.pe_type }
#[cfg(feature="win32")]
fn is_allocated(&self) -> bool { false }
}
impl Buffer for VecPE {
fn len(&self) -> usize { self.buffer.len() }
fn as_ptr(&self) -> *const u8 { self.buffer.as_ptr() }
fn as_mut_ptr(&mut self) -> *mut u8 { self.buffer.as_mut_ptr() }
fn as_slice(&self) -> &[u8] { self.buffer.as_slice() }
fn as_mut_slice(&mut self) -> &mut [u8] { self.buffer.as_mut_slice() }
}
impl<Idx: slice::SliceIndex<[u8]>> Index<Idx> for VecPE {
type Output = Idx::Output;
fn index(&self, index: Idx) -> &Self::Output {
self.as_slice().index(index)
}
}
impl<Idx: slice::SliceIndex<[u8]>> IndexMut<Idx> for VecPE {
fn index_mut(&mut self, index: Idx) -> &mut Self::Output {
self.as_mut_slice().index_mut(index)
}
}