#![allow(clippy::cmp_null)]
use crate::arch::{
dynamic_table_addr, ehdr_addr, relocation_load, relocation_mprotect_readonly, relocation_store,
trap,
};
use core::ffi::c_void;
use core::mem;
use core::ptr::{null, null_mut};
use linux_raw_sys::elf::*;
use linux_raw_sys::general::{AT_BASE, AT_ENTRY, AT_NULL, AT_PAGESZ};
#[allow(non_camel_case_types)]
type Elf_Relr = usize;
const DT_RELRSZ: usize = 35;
const DT_RELR: usize = 36;
#[cfg(debug_assertions)]
const DT_RELRENT: usize = 37;
#[cfg(debug_assertions)]
macro_rules! debug_assert_eq {
($l:expr, $r:expr) => {
if !($l == $r) {
trap();
}
};
}
#[cold]
pub(super) unsafe fn relocate(envp: *mut *mut u8) {
unsafe {
let auxp = compute_auxp(envp);
let mut auxv_base = null_mut();
let mut auxv_page_size = 0;
let mut auxv_entry = null_mut();
let mut current_aux = auxp;
loop {
let Elf_auxv_t { a_type, a_val } = *current_aux;
current_aux = current_aux.add(1);
match a_type as _ {
AT_BASE => auxv_base = a_val,
AT_PAGESZ => auxv_page_size = a_val.addr(),
AT_ENTRY => auxv_entry = a_val,
AT_NULL => break,
_ => (),
}
}
if load_static_start() == auxv_entry.addr() {
return;
}
let the_ehdr = &*ehdr_addr();
let base = if auxv_base == null_mut() {
let static_start = the_ehdr.e_entry;
auxv_entry.wrapping_sub(static_start)
} else {
auxv_base
};
let offset = base.addr();
let dynv = dynamic_table_addr();
let mut rela_ptr: *const Elf_Rela = null();
let mut rela_total_size = 0;
let mut rel_ptr: *const Elf_Rel = null();
let mut rel_total_size = 0;
let mut relr_ptr: *const Elf_Relr = null();
let mut relr_total_size = 0;
let mut current_dyn: *const Elf_Dyn = dynv;
loop {
let Elf_Dyn { d_tag, d_un } = &*current_dyn;
current_dyn = current_dyn.add(1);
match *d_tag {
DT_RELA => rela_ptr = base.byte_add(d_un.d_ptr).cast::<Elf_Rela>(),
DT_RELASZ => rela_total_size = d_un.d_val as usize,
#[cfg(debug_assertions)]
DT_RELAENT => debug_assert_eq!(d_un.d_val as usize, size_of::<Elf_Rela>()),
DT_REL => rel_ptr = base.byte_add(d_un.d_ptr).cast::<Elf_Rel>(),
DT_RELSZ => rel_total_size = d_un.d_val as usize,
#[cfg(debug_assertions)]
DT_RELENT => debug_assert_eq!(d_un.d_val as usize, size_of::<Elf_Rel>()),
DT_RELR => relr_ptr = base.byte_add(d_un.d_ptr).cast::<Elf_Relr>(),
DT_RELRSZ => relr_total_size = d_un.d_val as usize,
#[cfg(debug_assertions)]
DT_RELRENT => debug_assert_eq!(d_un.d_val as usize, size_of::<Elf_Relr>()),
DT_NULL => break,
_ => (),
}
}
let mut current_rela = rela_ptr;
let rela_end = current_rela.byte_add(rela_total_size);
while current_rela != rela_end {
let rela = &*current_rela;
current_rela = current_rela.add(1);
let reloc_addr = rela.r_offset.wrapping_add(offset);
match rela.type_() {
R_RELATIVE => {
let addend = rela.r_addend;
let reloc_value = addend.wrapping_add(offset);
relocation_store(reloc_addr, reloc_value);
}
_ => trap(),
}
}
let mut current_rel = rel_ptr;
let rel_end = current_rel.byte_add(rel_total_size);
while current_rel != rel_end {
let rel = &*current_rel;
current_rel = current_rel.add(1);
let reloc_addr = rel.r_offset.wrapping_add(offset);
match rel.type_() {
R_RELATIVE => {
let addend = relocation_load(reloc_addr);
let reloc_value = addend.wrapping_add(offset);
relocation_store(reloc_addr, reloc_value);
}
_ => trap(),
}
}
let mut current_relr = relr_ptr;
let relr_end = current_relr.byte_add(relr_total_size);
let mut reloc_addr = 0;
while current_relr != relr_end {
let mut entry = *current_relr;
current_relr = current_relr.add(1);
if entry & 1 == 0 {
reloc_addr = offset + entry;
let addend = relocation_load(reloc_addr);
let reloc_value = addend.wrapping_add(offset);
relocation_store(reloc_addr, reloc_value);
reloc_addr += mem::size_of::<usize>();
} else {
let mut i = 0;
loop {
entry >>= 1;
if entry == 0 {
break;
}
if entry & 1 != 0 {
let addend = relocation_load(reloc_addr + i * mem::size_of::<usize>());
let reloc_value = addend.wrapping_add(offset);
relocation_store(reloc_addr + i * mem::size_of::<usize>(), reloc_value);
}
i += 1;
}
reloc_addr += (mem::size_of::<usize>() * 8 - 1) * mem::size_of::<usize>();
}
}
debug_assert!(auxv_page_size.is_power_of_two());
debug_assert_eq!(offset & (auxv_page_size - 1), 0);
if auxv_base == null_mut() {
debug_assert_eq!(load_static_start(), auxv_entry.addr());
}
let mut relro = 0;
let mut relro_size = 0;
let phentsize = the_ehdr.e_phentsize as usize;
let mut current_phdr = base.byte_add(the_ehdr.e_phoff).cast::<Elf_Phdr>();
let phdrs_end = current_phdr.byte_add(the_ehdr.e_phnum as usize * phentsize);
while current_phdr != phdrs_end {
let phdr = &*current_phdr;
current_phdr = current_phdr.byte_add(phentsize);
match phdr.p_type {
#[cfg(debug_assertions)]
PT_DYNAMIC => {
assert_eq!(dynv, base.byte_add(phdr.p_vaddr).cast::<Elf_Dyn>());
}
PT_GNU_RELRO => {
relro = phdr.p_vaddr;
relro_size = phdr.p_memsz;
}
_ => (),
}
}
if relro_size != 0 {
let mprotect_addr = relro.wrapping_add(offset) & auxv_page_size.wrapping_neg();
relocation_mprotect_readonly(mprotect_addr, relro_size);
}
}
}
unsafe fn compute_auxp(envp: *mut *mut u8) -> *const Elf_auxv_t {
unsafe {
let mut auxp = envp;
while *auxp != null_mut() {
auxp = auxp.add(1);
}
auxp.add(1).cast()
}
}
fn load_static_start() -> usize {
struct StaticStart(*const c_void);
unsafe impl Sync for StaticStart {}
static STATIC_START: StaticStart = StaticStart(crate::arch::_start as *const c_void);
let static_start_addr: *const *const c_void = &STATIC_START.0;
unsafe { relocation_load(static_start_addr.addr()) }
}