wasmer_engine_universal/
code_memory.rs1use crate::unwind::UnwindRegistry;
6use loupe::MemoryUsage;
7use wasmer_compiler::{CompiledFunctionUnwindInfo, CustomSection, FunctionBody};
8use wasmer_vm::{Mmap, VMFunctionBody};
9
10const ARCH_FUNCTION_ALIGNMENT: usize = 16;
16
17const DATA_SECTION_ALIGNMENT: usize = 64;
20
21#[derive(MemoryUsage)]
23pub struct CodeMemory {
24 unwind_registry: UnwindRegistry,
25 mmap: Mmap,
26 start_of_nonexecutable_pages: usize,
27}
28
29impl CodeMemory {
30 pub fn new() -> Self {
32 Self {
33 unwind_registry: UnwindRegistry::new(),
34 mmap: Mmap::new(),
35 start_of_nonexecutable_pages: 0,
36 }
37 }
38
39 pub fn unwind_registry_mut(&mut self) -> &mut UnwindRegistry {
41 &mut self.unwind_registry
42 }
43
44 pub fn allocate(
46 &mut self,
47 functions: &[&FunctionBody],
48 executable_sections: &[&CustomSection],
49 data_sections: &[&CustomSection],
50 ) -> Result<(Vec<&mut [VMFunctionBody]>, Vec<&mut [u8]>, Vec<&mut [u8]>), String> {
51 let mut function_result = vec![];
52 let mut data_section_result = vec![];
53 let mut executable_section_result = vec![];
54
55 let page_size = region::page::size();
56
57 let total_len = round_up(
68 functions.iter().fold(0, |acc, func| {
69 round_up(
70 acc + Self::function_allocation_size(func),
71 ARCH_FUNCTION_ALIGNMENT,
72 )
73 }) + executable_sections.iter().fold(0, |acc, exec| {
74 round_up(acc + exec.bytes.len(), ARCH_FUNCTION_ALIGNMENT)
75 }),
76 page_size,
77 ) + data_sections.iter().fold(0, |acc, data| {
78 round_up(acc + data.bytes.len(), DATA_SECTION_ALIGNMENT)
79 });
80
81 self.mmap = Mmap::with_at_least(total_len)?;
84
85 let mut bytes = 0;
89 let mut buf = self.mmap.as_mut_slice();
90 for func in functions {
91 let len = round_up(
92 Self::function_allocation_size(func),
93 ARCH_FUNCTION_ALIGNMENT,
94 );
95 let (func_buf, next_buf) = buf.split_at_mut(len);
96 buf = next_buf;
97 bytes += len;
98
99 let vmfunc = Self::copy_function(&mut self.unwind_registry, func, func_buf);
100 assert_eq!(vmfunc.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
101 function_result.push(vmfunc);
102 }
103 for section in executable_sections {
104 let section = §ion.bytes;
105 assert_eq!(buf.as_mut_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
106 let len = round_up(section.len(), ARCH_FUNCTION_ALIGNMENT);
107 let (s, next_buf) = buf.split_at_mut(len);
108 buf = next_buf;
109 bytes += len;
110 s[..section.len()].copy_from_slice(section.as_slice());
111 executable_section_result.push(s);
112 }
113
114 self.start_of_nonexecutable_pages = bytes;
115
116 if !data_sections.is_empty() {
117 let padding = round_up(bytes, page_size) - bytes;
120 buf = buf.split_at_mut(padding).1;
121
122 for section in data_sections {
123 let section = §ion.bytes;
124 assert_eq!(buf.as_mut_ptr() as usize % DATA_SECTION_ALIGNMENT, 0);
125 let len = round_up(section.len(), DATA_SECTION_ALIGNMENT);
126 let (s, next_buf) = buf.split_at_mut(len);
127 buf = next_buf;
128 s[..section.len()].copy_from_slice(section.as_slice());
129 data_section_result.push(s);
130 }
131 }
132
133 Ok((
134 function_result,
135 executable_section_result,
136 data_section_result,
137 ))
138 }
139
140 pub fn publish(&mut self) {
142 if self.mmap.is_empty() || self.start_of_nonexecutable_pages == 0 {
143 return;
144 }
145 assert!(self.mmap.len() >= self.start_of_nonexecutable_pages);
146 unsafe {
147 region::protect(
148 self.mmap.as_mut_ptr(),
149 self.start_of_nonexecutable_pages,
150 region::Protection::READ_EXECUTE,
151 )
152 }
153 .expect("unable to make memory readonly and executable");
154 }
155
156 fn function_allocation_size(func: &FunctionBody) -> usize {
158 match &func.unwind_info {
159 Some(CompiledFunctionUnwindInfo::WindowsX64(info)) => {
160 ((func.body.len() + 3) & !3) + info.len()
164 }
165 _ => func.body.len(),
166 }
167 }
168
169 fn copy_function<'a>(
173 registry: &mut UnwindRegistry,
174 func: &FunctionBody,
175 buf: &'a mut [u8],
176 ) -> &'a mut [VMFunctionBody] {
177 assert_eq!(buf.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
178
179 let func_len = func.body.len();
180
181 let (body, remainder) = buf.split_at_mut(func_len);
182 body.copy_from_slice(&func.body);
183 let vmfunc = Self::view_as_mut_vmfunc_slice(body);
184
185 if let Some(CompiledFunctionUnwindInfo::WindowsX64(info)) = &func.unwind_info {
186 let unwind_start = (func_len + 3) & !3;
189 let unwind_size = info.len();
190 let padding = unwind_start - func_len;
191 assert_eq!((func_len + padding) % 4, 0);
192 let slice = remainder.split_at_mut(padding + unwind_size).0;
193 slice[padding..].copy_from_slice(&info);
194 }
195
196 if let Some(info) = &func.unwind_info {
197 registry
198 .register(vmfunc.as_ptr() as usize, 0, func_len as u32, info)
199 .expect("failed to register unwind information");
200 }
201
202 vmfunc
203 }
204
205 fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] {
207 let byte_ptr: *mut [u8] = slice;
208 let body_ptr = byte_ptr as *mut [VMFunctionBody];
209 unsafe { &mut *body_ptr }
210 }
211}
212
213fn round_up(size: usize, multiple: usize) -> usize {
214 debug_assert!(multiple.is_power_of_two());
215 (size + (multiple - 1)) & !(multiple - 1)
216}
217
218#[cfg(test)]
219mod tests {
220 use super::CodeMemory;
221 fn _assert() {
222 fn _assert_send_sync<T: Send + Sync>() {}
223 _assert_send_sync::<CodeMemory>();
224 }
225}