wasmer_engine_jit/
code_memory.rs1use crate::unwind::UnwindRegistry;
6use wasmer_compiler::{CompiledFunctionUnwindInfo, CustomSection, FunctionBody};
7use wasmer_vm::{Mmap, VMFunctionBody};
8
9const ARCH_FUNCTION_ALIGNMENT: usize = 16;
15
16const DATA_SECTION_ALIGNMENT: usize = 64;
19
20pub struct CodeMemory {
22 unwind_registry: UnwindRegistry,
23 mmap: Mmap,
24 start_of_nonexecutable_pages: usize,
25}
26
27impl CodeMemory {
28 pub fn new() -> Self {
30 Self {
31 unwind_registry: UnwindRegistry::new(),
32 mmap: Mmap::new(),
33 start_of_nonexecutable_pages: 0,
34 }
35 }
36
37 pub fn unwind_registry_mut(&mut self) -> &mut UnwindRegistry {
39 &mut self.unwind_registry
40 }
41
42 pub fn allocate(
44 &mut self,
45 functions: &[&FunctionBody],
46 executable_sections: &[&CustomSection],
47 data_sections: &[&CustomSection],
48 ) -> Result<(Vec<&mut [VMFunctionBody]>, Vec<&mut [u8]>, Vec<&mut [u8]>), String> {
49 let mut function_result = vec![];
50 let mut data_section_result = vec![];
51 let mut executable_section_result = vec![];
52
53 let page_size = region::page::size();
54
55 let total_len = round_up(
66 functions.iter().fold(0, |acc, func| {
67 round_up(
68 acc + Self::function_allocation_size(func),
69 ARCH_FUNCTION_ALIGNMENT,
70 )
71 }) + executable_sections.iter().fold(0, |acc, exec| {
72 round_up(acc + exec.bytes.len(), ARCH_FUNCTION_ALIGNMENT)
73 }),
74 page_size,
75 ) + data_sections.iter().fold(0, |acc, data| {
76 round_up(acc + data.bytes.len(), DATA_SECTION_ALIGNMENT)
77 });
78
79 self.mmap = Mmap::with_at_least(total_len)?;
82
83 let mut bytes = 0;
87 let mut buf = self.mmap.as_mut_slice();
88 for func in functions {
89 let len = round_up(
90 Self::function_allocation_size(func),
91 ARCH_FUNCTION_ALIGNMENT,
92 );
93 let (func_buf, next_buf) = buf.split_at_mut(len);
94 buf = next_buf;
95 bytes += len;
96
97 let vmfunc = Self::copy_function(&mut self.unwind_registry, func, func_buf);
98 assert_eq!(vmfunc.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
99 function_result.push(vmfunc);
100 }
101 for section in executable_sections {
102 let section = §ion.bytes;
103 assert_eq!(buf.as_mut_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
104 let len = round_up(section.len(), ARCH_FUNCTION_ALIGNMENT);
105 let (s, next_buf) = buf.split_at_mut(len);
106 buf = next_buf;
107 bytes += len;
108 s[..section.len()].copy_from_slice(section.as_slice());
109 executable_section_result.push(s);
110 }
111
112 self.start_of_nonexecutable_pages = bytes;
113
114 if !data_sections.is_empty() {
115 let padding = round_up(bytes, page_size) - bytes;
118 buf = buf.split_at_mut(padding).1;
119
120 for section in data_sections {
121 let section = §ion.bytes;
122 assert_eq!(buf.as_mut_ptr() as usize % DATA_SECTION_ALIGNMENT, 0);
123 let len = round_up(section.len(), DATA_SECTION_ALIGNMENT);
124 let (s, next_buf) = buf.split_at_mut(len);
125 buf = next_buf;
126 s[..section.len()].copy_from_slice(section.as_slice());
127 data_section_result.push(s);
128 }
129 }
130
131 Ok((
132 function_result,
133 executable_section_result,
134 data_section_result,
135 ))
136 }
137
138 pub fn publish(&mut self) {
140 if self.mmap.is_empty() || self.start_of_nonexecutable_pages == 0 {
141 return;
142 }
143 assert!(self.mmap.len() >= self.start_of_nonexecutable_pages);
144 unsafe {
145 region::protect(
146 self.mmap.as_mut_ptr(),
147 self.start_of_nonexecutable_pages,
148 region::Protection::READ_EXECUTE,
149 )
150 }
151 .expect("unable to make memory readonly and executable");
152 }
153
154 fn function_allocation_size(func: &FunctionBody) -> usize {
156 match &func.unwind_info {
157 Some(CompiledFunctionUnwindInfo::WindowsX64(info)) => {
158 ((func.body.len() + 3) & !3) + info.len()
162 }
163 _ => func.body.len(),
164 }
165 }
166
167 fn copy_function<'a>(
171 registry: &mut UnwindRegistry,
172 func: &FunctionBody,
173 buf: &'a mut [u8],
174 ) -> &'a mut [VMFunctionBody] {
175 assert_eq!(buf.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
176
177 let func_len = func.body.len();
178
179 let (body, remainder) = buf.split_at_mut(func_len);
180 body.copy_from_slice(&func.body);
181 let vmfunc = Self::view_as_mut_vmfunc_slice(body);
182
183 if let Some(CompiledFunctionUnwindInfo::WindowsX64(info)) = &func.unwind_info {
184 let unwind_start = (func_len + 3) & !3;
187 let unwind_size = info.len();
188 let padding = unwind_start - func_len;
189 assert_eq!((func_len + padding) % 4, 0);
190 let slice = remainder.split_at_mut(padding + unwind_size).0;
191 slice[padding..].copy_from_slice(&info);
192 }
193
194 if let Some(info) = &func.unwind_info {
195 registry
196 .register(vmfunc.as_ptr() as usize, 0, func_len as u32, info)
197 .expect("failed to register unwind information");
198 }
199
200 vmfunc
201 }
202
203 fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] {
205 let byte_ptr: *mut [u8] = slice;
206 let body_ptr = byte_ptr as *mut [VMFunctionBody];
207 unsafe { &mut *body_ptr }
208 }
209}
210
211fn round_up(size: usize, multiple: usize) -> usize {
212 debug_assert!(multiple.is_power_of_two());
213 (size + (multiple - 1)) & !(multiple - 1)
214}
215
216#[cfg(test)]
217mod tests {
218 use super::CodeMemory;
219 fn _assert() {
220 fn _assert_send_sync<T: Send + Sync>() {}
221 _assert_send_sync::<CodeMemory>();
222 }
223}