1use std::collections::HashMap;
4use std::ptr::{read_unaligned, write_unaligned};
5use wasmer_compiler::{
6 JumpTable, Relocation, RelocationKind, RelocationTarget, SectionIndex, TrampolinesSection,
7};
8use wasmer_types::entity::PrimaryMap;
9use wasmer_types::LocalFunctionIndex;
10use wasmer_vm::{SectionBodyPtr, VMLocalFunction};
11
12fn trampolines_add(
15 map: &mut HashMap<usize, usize>,
16 trampoline: &TrampolinesSection,
17 address: usize,
18 baseaddress: usize,
19) -> usize {
20 if let Some(target) = map.get(&address) {
21 return *target;
22 }
23 let ret = map.len();
24 if ret == trampoline.slots {
25 panic!("No more slot in Trampolines");
26 }
27 map.insert(address, baseaddress + ret * trampoline.size);
28 baseaddress + ret * trampoline.size
29}
30
31fn use_trampoline(
32 address: usize,
33 allocated_sections: &PrimaryMap<SectionIndex, SectionBodyPtr>,
34 trampolines: &Option<TrampolinesSection>,
35 map: &mut HashMap<usize, usize>,
36) -> Option<usize> {
37 match trampolines {
38 Some(trampolines) => Some(trampolines_add(
39 map,
40 trampolines,
41 address,
42 *allocated_sections[trampolines.section_index] as usize,
43 )),
44 _ => None,
45 }
46}
47
48fn fill_trampoline_map(
49 allocated_sections: &PrimaryMap<SectionIndex, SectionBodyPtr>,
50 trampolines: &Option<TrampolinesSection>,
51) -> HashMap<usize, usize> {
52 let mut map: HashMap<usize, usize> = HashMap::new();
53 match trampolines {
54 Some(trampolines) => {
55 let baseaddress = *allocated_sections[trampolines.section_index] as usize;
56 for i in 0..trampolines.size {
57 let jmpslot: usize = unsafe {
58 read_unaligned((baseaddress + i * trampolines.size + 8) as *mut usize)
59 };
60 if jmpslot != 0 {
61 map.insert(jmpslot, baseaddress + i * trampolines.size);
62 }
63 }
64 }
65 _ => {}
66 };
67 map
68}
69
70fn apply_relocation(
71 body: usize,
72 r: &Relocation,
73 allocated_functions: &PrimaryMap<LocalFunctionIndex, VMLocalFunction>,
74 jt_offsets: impl Fn(LocalFunctionIndex, JumpTable) -> wasmer_compiler::CodeOffset,
75 allocated_sections: &PrimaryMap<SectionIndex, SectionBodyPtr>,
76 trampolines: &Option<TrampolinesSection>,
77 trampolines_map: &mut HashMap<usize, usize>,
78) {
79 let target_func_address: usize = match r.reloc_target {
80 RelocationTarget::LocalFunc(index) => *allocated_functions[index].body as usize,
81 RelocationTarget::LibCall(libcall) => libcall.function_pointer(),
82 RelocationTarget::CustomSection(custom_section) => {
83 *allocated_sections[custom_section] as usize
84 }
85 RelocationTarget::JumpTable(func_index, jt) => {
86 let offset = jt_offsets(func_index, jt);
87 *allocated_functions[func_index].body as usize + offset as usize
88 }
89 };
90
91 match r.kind {
92 #[cfg(target_pointer_width = "64")]
93 RelocationKind::Abs8 => unsafe {
94 let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
95 write_unaligned(reloc_address as *mut u64, reloc_delta);
96 },
97 #[cfg(target_pointer_width = "32")]
98 RelocationKind::X86PCRel4 => unsafe {
99 let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
100 write_unaligned(reloc_address as *mut u32, reloc_delta as _);
101 },
102 #[cfg(target_pointer_width = "64")]
103 RelocationKind::X86PCRel8 => unsafe {
104 let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
105 write_unaligned(reloc_address as *mut u64, reloc_delta);
106 },
107 RelocationKind::X86CallPCRel4 => unsafe {
108 let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
109 write_unaligned(reloc_address as *mut u32, reloc_delta as _);
110 },
111 RelocationKind::X86PCRelRodata4 => {}
112 RelocationKind::Arm64Call => unsafe {
113 let (reloc_address, mut reloc_delta) = r.for_address(body, target_func_address as u64);
114 if (reloc_delta as i64).abs() >= 0x1000_0000 {
115 let new_address = match use_trampoline(
116 target_func_address,
117 allocated_sections,
118 trampolines,
119 trampolines_map,
120 ) {
121 Some(new_address) => new_address,
122 _ => panic!(
123 "Relocation to big for {:?} for {:?} with {:x}, current val {:x}",
124 r.kind,
125 r.reloc_target,
126 reloc_delta,
127 read_unaligned(reloc_address as *mut u32)
128 ),
129 };
130 write_unaligned((new_address + 8) as *mut u64, target_func_address as u64); let (_, new_delta) = r.for_address(body, new_address as u64);
132 reloc_delta = new_delta;
133 }
134 let reloc_delta = (((reloc_delta / 4) as u32) & 0x3ff_ffff)
135 | read_unaligned(reloc_address as *mut u32);
136 write_unaligned(reloc_address as *mut u32, reloc_delta);
137 },
138 RelocationKind::Arm64Movw0 => unsafe {
139 let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
140 let reloc_delta =
141 (((reloc_delta & 0xffff) as u32) << 5) | read_unaligned(reloc_address as *mut u32);
142 write_unaligned(reloc_address as *mut u32, reloc_delta);
143 },
144 RelocationKind::Arm64Movw1 => unsafe {
145 let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
146 let reloc_delta = ((((reloc_delta >> 16) & 0xffff) as u32) << 5)
147 | read_unaligned(reloc_address as *mut u32);
148 write_unaligned(reloc_address as *mut u32, reloc_delta);
149 },
150 RelocationKind::Arm64Movw2 => unsafe {
151 let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
152 let reloc_delta = ((((reloc_delta >> 32) & 0xffff) as u32) << 5)
153 | read_unaligned(reloc_address as *mut u32);
154 write_unaligned(reloc_address as *mut u32, reloc_delta);
155 },
156 RelocationKind::Arm64Movw3 => unsafe {
157 let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
158 let reloc_delta = ((((reloc_delta >> 48) & 0xffff) as u32) << 5)
159 | read_unaligned(reloc_address as *mut u32);
160 write_unaligned(reloc_address as *mut u32, reloc_delta);
161 },
162 kind => panic!(
163 "Relocation kind unsupported in the current architecture {}",
164 kind
165 ),
166 }
167}
168
169pub fn link_module(
172 allocated_functions: &PrimaryMap<LocalFunctionIndex, VMLocalFunction>,
173 jt_offsets: impl Fn(LocalFunctionIndex, JumpTable) -> wasmer_compiler::CodeOffset,
174 function_relocations: impl Iterator<Item = (LocalFunctionIndex, impl Iterator<Item = Relocation>)>,
175 allocated_sections: &PrimaryMap<SectionIndex, SectionBodyPtr>,
176 section_relocations: impl Iterator<Item = (SectionIndex, impl Iterator<Item = Relocation>)>,
177 trampolines: &Option<TrampolinesSection>,
178) {
179 let mut trampolines_map = fill_trampoline_map(allocated_sections, trampolines);
180 for (i, section_relocs) in section_relocations {
181 let body = *allocated_sections[i] as usize;
182 for r in section_relocs {
183 apply_relocation(
184 body,
185 &r,
186 allocated_functions,
187 &jt_offsets,
188 allocated_sections,
189 trampolines,
190 &mut trampolines_map,
191 );
192 }
193 }
194 for (i, function_relocs) in function_relocations {
195 let body = *allocated_functions[i].body as usize;
196 for r in function_relocs {
197 apply_relocation(
198 body,
199 &r,
200 allocated_functions,
201 &jt_offsets,
202 allocated_sections,
203 trampolines,
204 &mut trampolines_map,
205 );
206 }
207 }
208}