mod cu_entry;
use core::ops::Range;
pub(crate) use cu_entry::CompactUnwindEntry;
use rangemap::RangeMap;
use std::sync::{LazyLock, Mutex};
use wasmer_types::CompileError;
type CUResult<T> = Result<T, CompileError>;
#[repr(C)]
#[derive(Debug)]
pub struct UnwDynamicUnwindSections {
dso_base: u64,
dwarf_section: u64,
dwarf_section_length: u64,
compact_unwind_section: u64,
compact_unwind_section_length: u64,
}
type UnwFindDynamicUnwindSections =
unsafe extern "C" fn(addr: usize, info: *mut UnwDynamicUnwindSections) -> u32;
unsafe extern "C" {
pub fn __unw_add_find_dynamic_unwind_sections(
find_dynamic_unwind_sections: UnwFindDynamicUnwindSections,
) -> u32;
pub fn __unw_remove_find_dynamic_unwind_sections(
find_dynamic_unwind_sections: UnwFindDynamicUnwindSections,
) -> u32;
pub fn __unw_add_dynamic_eh_frame_section(eh_frame_start: usize);
pub fn __unw_remove_dynamic_eh_frame_section(eh_frame_start: usize);
}
trait ToBytes {
fn to_bytes(&self) -> Vec<u8>;
}
impl ToBytes for u32 {
fn to_bytes(&self) -> Vec<u8> {
self.to_ne_bytes().into()
}
}
impl ToBytes for u16 {
fn to_bytes(&self) -> Vec<u8> {
self.to_ne_bytes().into()
}
}
#[derive(Debug, Default)]
pub struct CompactUnwindManager {
unwind_info_section: Vec<u8>,
compact_unwind_entries: Vec<CompactUnwindEntry>,
num_second_level_pages: usize,
num_lsdas: usize,
personalities: Vec<usize>,
dso_base: usize,
maybe_eh_personality_addr_in_got: Option<usize>,
}
static UNWIND_INFO: LazyLock<Mutex<RangeMap<usize, UnwindInfoEntry>>> =
LazyLock::new(|| Mutex::new(RangeMap::new()));
#[derive(Debug, Clone, PartialEq)]
struct UnwindInfoEntry {
dso_base: usize,
section_ptr: usize,
section_len: usize,
}
unsafe extern "C" fn find_dynamic_unwind_sections(
addr: usize,
info: *mut UnwDynamicUnwindSections,
) -> u32 {
let Some(info) = (unsafe { info.as_mut() }) else {
return 0;
};
if let Some(entry) = UNWIND_INFO
.lock()
.expect("cannot lock UNWIND_INFO")
.get(&addr)
{
info.compact_unwind_section = entry.section_ptr as u64;
info.compact_unwind_section_length = entry.section_len as u64;
info.dwarf_section = 0;
info.dwarf_section_length = 0;
info.dso_base = entry.dso_base as u64;
1
} else {
info.compact_unwind_section = 0;
info.compact_unwind_section_length = 0;
info.dwarf_section = 0;
info.dwarf_section_length = 0;
info.dso_base = 0;
0
}
}
impl CompactUnwindManager {
const UNWIND_SECTION_VERSION: u32 = 1;
const UNWIND_INFO_SECTION_HEADER_SIZE: usize = 4 * 7;
const PERSONALITY_SHIFT: usize = 28;
const PERSONALITY_ENTRY_SIZE: usize = 4;
const INDEX_ENTRY_SIZE: usize = 3 * 4;
const LSDA_ENTRY_SIZE: usize = 2 * 4;
const SECOND_LEVEL_PAGE_SIZE: usize = 4096;
const SECOND_LEVEL_PAGE_HEADER_SIZE: usize = 8;
const SECOND_LEVEL_PAGE_ENTRY_SIZE: usize = 8;
const NUM_RECORDS_PER_SECOND_LEVEL_PAGE: usize = (Self::SECOND_LEVEL_PAGE_SIZE
- Self::SECOND_LEVEL_PAGE_HEADER_SIZE)
/ Self::SECOND_LEVEL_PAGE_ENTRY_SIZE;
pub unsafe fn read_compact_unwind_section(
&mut self,
compact_unwind_section_ptr: *const u8,
len: usize,
eh_personality_addr_in_got: Option<usize>,
) -> Result<(), String> {
if eh_personality_addr_in_got.is_none() {
return Err(
"Cannot register compact_unwind entries without a personality function!".into(),
);
}
let mut offset = 0;
while offset < len {
let entry = unsafe {
CompactUnwindEntry::from_ptr_and_len(
compact_unwind_section_ptr.wrapping_add(offset),
len,
)
};
self.compact_unwind_entries.push(entry);
offset += size_of::<CompactUnwindEntry>();
}
self.maybe_eh_personality_addr_in_got = eh_personality_addr_in_got;
Ok(())
}
pub fn finalize(&mut self) -> CUResult<()> {
self.process_compact_unwind_entries()?;
self.merge_records();
if self.compact_unwind_entries.is_empty() {
return Ok(());
}
let mut info = libc::Dl_info {
dli_fname: core::ptr::null(),
dli_fbase: core::ptr::null_mut(),
dli_sname: core::ptr::null(),
dli_saddr: core::ptr::null_mut(),
};
unsafe {
if let Some(personality) = self.personalities.first() {
_ = libc::dladdr(*personality as *const _, &mut info as *mut _);
}
if info.dli_fbase.is_null() {
_ = libc::dladdr(
wasmer_vm::libcalls::wasmer_eh_personality as *const _,
&mut info as *mut _,
);
}
}
self.dso_base = info.dli_fbase as usize;
self.write_unwind_info()?;
let ranges: Vec<Range<usize>> = self
.compact_unwind_entries
.iter()
.map(|v| v.function_addr..v.function_addr + (v.length as usize))
.collect();
let data: &'static mut [u8] = self.unwind_info_section.clone().leak();
let section_ptr = data.as_ptr() as usize;
let section_len = data.len();
let dso_base = self.dso_base;
let mut uw_info = UNWIND_INFO.lock().expect("cannot lock UNWIND_INFO");
for range in ranges {
(*uw_info).insert(
range,
UnwindInfoEntry {
dso_base,
section_ptr,
section_len,
},
);
}
Ok(())
}
fn process_compact_unwind_entries(&mut self) -> CUResult<()> {
for entry in self.compact_unwind_entries.iter_mut() {
if entry.personality_addr != 0 {
let p_idx: u32 = if let Some(p_idx) = self
.personalities
.iter()
.position(|v| *v == entry.personality_addr)
{
p_idx
} else {
self.personalities.push(entry.personality_addr);
self.personalities.len() - 1
} as u32;
entry.compact_encoding |= (p_idx + 1) << Self::PERSONALITY_SHIFT;
}
if entry.lsda_addr != 0 {
self.num_lsdas += 1;
}
}
self.num_second_level_pages = self
.compact_unwind_entries
.len()
.div_ceil(Self::NUM_RECORDS_PER_SECOND_LEVEL_PAGE);
self.compact_unwind_entries
.sort_by(|l, r| l.function_addr.cmp(&r.function_addr));
let unwind_info_section_len = Self::UNWIND_INFO_SECTION_HEADER_SIZE
+ (self.personalities.len() * Self::PERSONALITY_ENTRY_SIZE)
+ ((self.num_second_level_pages + 1) * Self::INDEX_ENTRY_SIZE)
+ (self.num_lsdas * Self::LSDA_ENTRY_SIZE)
+ (self.num_second_level_pages * Self::SECOND_LEVEL_PAGE_HEADER_SIZE)
+ (self.compact_unwind_entries.len() * Self::SECOND_LEVEL_PAGE_ENTRY_SIZE);
self.unwind_info_section = Vec::with_capacity(unwind_info_section_len);
Ok(())
}
fn write_unwind_info(&mut self) -> CUResult<()> {
self.write_header()?;
self.write_personalities()?;
self.write_indices()?;
self.write_lsdas()?;
self.write_second_level_pages()?;
Ok(())
}
fn merge_records(&mut self) {
if self.compact_unwind_entries.len() <= 1 {
self.num_second_level_pages = 1;
return;
}
let non_unique: Vec<CompactUnwindEntry> = self.compact_unwind_entries.drain(1..).collect();
for next in non_unique.into_iter() {
let last = self.compact_unwind_entries.last().unwrap();
if next.is_dwarf()
|| (next.compact_encoding != last.compact_encoding)
|| next.cannot_be_merged()
|| next.lsda_addr != 0
|| last.lsda_addr != 0
{
self.compact_unwind_entries.push(next);
}
}
self.num_second_level_pages = self
.compact_unwind_entries
.len()
.div_ceil(Self::NUM_RECORDS_PER_SECOND_LEVEL_PAGE);
}
#[inline(always)]
fn write<T: ToBytes>(&mut self, value: T) -> CUResult<()> {
let bytes = value.to_bytes();
let capacity = self.unwind_info_section.capacity();
let len = self.unwind_info_section.len();
if len + bytes.len() > capacity {
return Err(CompileError::Codegen(
"writing the unwind_info after the allocated bytes".into(),
));
}
for byte in bytes {
self.unwind_info_section.push(byte);
}
Ok(())
}
fn write_header(&mut self) -> CUResult<()> {
let num_personalities = self.personalities.len() as u32;
let index_section_offset: u32 = (Self::UNWIND_INFO_SECTION_HEADER_SIZE
+ self.personalities.len() * Self::PERSONALITY_ENTRY_SIZE)
as u32;
let index_count = (self.num_second_level_pages + 1) as u32;
self.write(Self::UNWIND_SECTION_VERSION)?;
self.write(Self::UNWIND_INFO_SECTION_HEADER_SIZE as u32)?;
self.write(0u32)?;
self.write(Self::UNWIND_INFO_SECTION_HEADER_SIZE as u32)?;
self.write(num_personalities)?;
self.write(index_section_offset)?;
self.write(index_count + 1)?;
Ok(())
}
fn write_personalities(&mut self) -> CUResult<()> {
let personalities = self.personalities.len();
for _ in 0..personalities {
let personality_pointer =
if let Some(personality) = self.maybe_eh_personality_addr_in_got {
personality
} else {
return Err(CompileError::Codegen(
"Personality function does not appear in GOT table!".into(),
));
};
let delta = (personality_pointer - self.dso_base) as u32;
self.write(delta)?;
}
Ok(())
}
fn write_indices(&mut self) -> CUResult<()> {
let section_offset_to_lsdas: usize = self.unwind_info_section.len()
+ ((self.num_second_level_pages + 1) * Self::INDEX_ENTRY_SIZE);
let section_offset_to_second_level_pages =
section_offset_to_lsdas + (self.num_lsdas * Self::LSDA_ENTRY_SIZE);
let mut num_previous_lsdas = 0;
let num_entries = self.compact_unwind_entries.len();
for entry_idx in 0..num_entries {
let entry = &self.compact_unwind_entries[entry_idx];
let lsda_addr = entry.lsda_addr;
if entry_idx % Self::NUM_RECORDS_PER_SECOND_LEVEL_PAGE == 0 {
let fn_delta = entry.function_addr.wrapping_sub(self.dso_base);
let num_second_level_page = entry_idx / Self::NUM_RECORDS_PER_SECOND_LEVEL_PAGE;
let mut second_level_page_offset = section_offset_to_second_level_pages;
if num_second_level_page != 0 {
second_level_page_offset += num_second_level_page
* (Self::NUM_RECORDS_PER_SECOND_LEVEL_PAGE
* Self::SECOND_LEVEL_PAGE_ENTRY_SIZE);
second_level_page_offset +=
num_second_level_page * Self::SECOND_LEVEL_PAGE_HEADER_SIZE;
}
let lsda_offset =
section_offset_to_lsdas + num_previous_lsdas * Self::LSDA_ENTRY_SIZE;
self.write(fn_delta as u32)?;
self.write(second_level_page_offset as u32)?;
self.write(lsda_offset as u32)?;
}
if lsda_addr != 0 {
num_previous_lsdas += 1;
}
}
if let Some(last_entry) = self.compact_unwind_entries.last() {
let fn_end_delta = (last_entry.function_addr + (last_entry.length as usize))
.wrapping_sub(self.dso_base) as u32;
self.write(fn_end_delta)?;
self.write(0u32)?;
self.write(section_offset_to_second_level_pages as u32)?;
}
Ok(())
}
fn write_lsdas(&mut self) -> CUResult<()> {
let num_entries = self.compact_unwind_entries.len();
for entry_idx in 0..num_entries {
let entry = &self.compact_unwind_entries[entry_idx];
if entry.lsda_addr != 0 {
let fn_delta = entry.function_addr.wrapping_sub(self.dso_base);
let lsda_delta = entry.lsda_addr.wrapping_sub(self.dso_base);
self.write(fn_delta as u32)?;
self.write(lsda_delta as u32)?;
}
}
Ok(())
}
fn write_second_level_pages(&mut self) -> CUResult<()> {
let num_entries = self.compact_unwind_entries.len();
for entry_idx in 0..num_entries {
let entry = &self.compact_unwind_entries[entry_idx];
let fn_delta = entry.function_addr.wrapping_sub(self.dso_base) as u32;
let encoding = entry.compact_encoding;
if entry_idx % Self::NUM_RECORDS_PER_SECOND_LEVEL_PAGE == 0 {
const SECOND_LEVEL_PAGE_HEADER_KIND: u32 = 2;
const SECOND_LEVEL_PAGE_HEADER_SIZE: u16 = 8;
let second_level_page_num_entries: u16 = std::cmp::min(
num_entries - entry_idx,
Self::NUM_RECORDS_PER_SECOND_LEVEL_PAGE,
) as u16;
self.write(SECOND_LEVEL_PAGE_HEADER_KIND)?;
self.write(SECOND_LEVEL_PAGE_HEADER_SIZE)?;
self.write(second_level_page_num_entries)?;
}
self.write(fn_delta)?;
self.write(encoding)?;
}
Ok(())
}
pub(crate) fn deregister(&self) {
if self.dso_base != 0 {
let ranges: Vec<Range<usize>> = self
.compact_unwind_entries
.iter()
.map(|v| v.function_addr..v.function_addr + (v.length as usize))
.collect();
let mut uw_info = UNWIND_INFO.lock().expect("cannot lock UNWIND_INFO");
for range in ranges {
(*uw_info).remove(range);
}
}
}
pub(crate) fn register(&self) {
unsafe {
if self.dso_base != 0 {
__unw_add_find_dynamic_unwind_sections(find_dynamic_unwind_sections);
}
}
}
}