use std::collections::HashMap;
use crate::CilObject;
pub const COR_VTABLE_32BIT: u16 = 0x01;
pub const COR_VTABLE_64BIT: u16 = 0x02;
pub const COR_VTABLE_FROM_UNMANAGED: u16 = 0x04;
pub const COR_VTABLE_RETAIN_APPDOMAIN: u16 = 0x08;
pub const COR_VTABLE_CALL_MOST_DERIVED: u16 = 0x10;
pub struct VtFixupEntry {
pub rva: u32,
pub count: u16,
pub flags: u16,
pub tokens: Vec<u32>,
}
pub struct VtFixupContext {
pub entries: Vec<VtFixupEntry>,
pub vtentry_map: HashMap<u32, Vec<(usize, usize)>>,
pub export_map: HashMap<u32, (u16, Option<String>)>,
}
pub fn parse(asm: &CilObject) -> Option<VtFixupContext> {
let header = asm.cor20header();
let rva = header.vtable_fixups_rva;
let size = header.vtable_fixups_size;
if rva == 0 || size == 0 {
return None;
}
let file = asm.file();
let offset = file.rva_to_offset(rva as usize).ok()?;
let data = file.data_slice(offset, size as usize).ok()?;
let num_entries = (size as usize) / 8;
let mut entries = Vec::with_capacity(num_entries);
for i in 0..num_entries {
let base = i * 8;
if base + 8 > data.len() {
break;
}
let entry_rva =
u32::from_le_bytes([data[base], data[base + 1], data[base + 2], data[base + 3]]);
let count = u16::from_le_bytes([data[base + 4], data[base + 5]]);
let flags = u16::from_le_bytes([data[base + 6], data[base + 7]]);
let slot_size: usize = if flags & COR_VTABLE_64BIT != 0 { 8 } else { 4 };
let mut tokens = Vec::with_capacity(count as usize);
if let Ok(tok_offset) = file.rva_to_offset(entry_rva as usize) {
let tok_data_len = (count as usize) * slot_size;
if let Ok(tok_data) = file.data_slice(tok_offset, tok_data_len) {
for j in 0..count as usize {
let slot_base = j * slot_size;
let token = if slot_size == 8 {
if slot_base + 8 <= tok_data.len() {
u64::from_le_bytes([
tok_data[slot_base],
tok_data[slot_base + 1],
tok_data[slot_base + 2],
tok_data[slot_base + 3],
tok_data[slot_base + 4],
tok_data[slot_base + 5],
tok_data[slot_base + 6],
tok_data[slot_base + 7],
]) as u32
} else {
0
}
} else if slot_base + 4 <= tok_data.len() {
u32::from_le_bytes([
tok_data[slot_base],
tok_data[slot_base + 1],
tok_data[slot_base + 2],
tok_data[slot_base + 3],
])
} else {
0
};
tokens.push(token);
}
}
}
entries.push(VtFixupEntry {
rva: entry_rva,
count,
flags,
tokens,
});
}
let mut vtentry_map: HashMap<u32, Vec<(usize, usize)>> = HashMap::new();
for (i, entry) in entries.iter().enumerate() {
for (j, &token) in entry.tokens.iter().enumerate() {
if token != 0 {
vtentry_map.entry(token).or_default().push((i + 1, j + 1));
}
}
}
let mut export_map: HashMap<u32, (u16, Option<String>)> = HashMap::new();
for func in asm.exports().native().functions() {
if func.is_forwarder || func.address == 0 {
continue;
}
let addr = func.address;
for entry in &entries {
let slot_size: u32 = if entry.flags & COR_VTABLE_64BIT != 0 {
8
} else {
4
};
let range_end = entry
.rva
.saturating_add(u32::from(entry.count).saturating_mul(slot_size));
if addr >= entry.rva && addr < range_end {
let slot_offset = addr - entry.rva;
if slot_offset % slot_size == 0 {
let slot_idx = (slot_offset / slot_size) as usize;
if let Some(&token) = entry.tokens.get(slot_idx) {
if token != 0 {
export_map.insert(token, (func.ordinal, func.name.clone()));
}
}
}
break;
}
}
}
Some(VtFixupContext {
entries,
vtentry_map,
export_map,
})
}