substrate_wasmtime_jit/
code_memory.rs1use crate::unwind::UnwindRegistry;
4use region;
5use std::mem::ManuallyDrop;
6use std::{cmp, mem};
7use wasmtime_environ::{
8 isa::{unwind::UnwindInfo, TargetIsa},
9 Compilation, CompiledFunction,
10};
11use wasmtime_runtime::{Mmap, VMFunctionBody};
12
13struct CodeMemoryEntry {
14 mmap: ManuallyDrop<Mmap>,
15 registry: ManuallyDrop<UnwindRegistry>,
16}
17
18impl CodeMemoryEntry {
19 fn with_capacity(cap: usize) -> Result<Self, String> {
20 let mmap = ManuallyDrop::new(Mmap::with_at_least(cap)?);
21 let registry = ManuallyDrop::new(UnwindRegistry::new(mmap.as_ptr() as usize));
22 Ok(Self { mmap, registry })
23 }
24
25 fn contains(&self, addr: usize) -> bool {
26 let start = self.mmap.as_ptr() as usize;
27 let end = start + self.mmap.len();
28 start <= addr && addr < end
29 }
30}
31
32impl Drop for CodeMemoryEntry {
33 fn drop(&mut self) {
34 unsafe {
35 ManuallyDrop::drop(&mut self.registry);
37 ManuallyDrop::drop(&mut self.mmap);
38 }
39 }
40}
41
42pub struct CodeMemory {
44 current: Option<CodeMemoryEntry>,
45 entries: Vec<CodeMemoryEntry>,
46 position: usize,
47 published: usize,
48}
49
50fn _assert() {
51 fn _assert_send_sync<T: Send + Sync>() {}
52 _assert_send_sync::<CodeMemory>();
53}
54
55impl CodeMemory {
56 pub fn new() -> Self {
58 Self {
59 current: None,
60 entries: Vec::new(),
61 position: 0,
62 published: 0,
63 }
64 }
65
66 pub fn allocate_for_function(
70 &mut self,
71 func: &CompiledFunction,
72 ) -> Result<&mut [VMFunctionBody], String> {
73 let size = Self::function_allocation_size(func);
74
75 let (buf, registry, start) = self.allocate(size)?;
76
77 let (_, _, vmfunc) = Self::copy_function(func, start as u32, buf, registry);
78
79 Ok(vmfunc)
80 }
81
82 pub fn allocate_for_compilation(
84 &mut self,
85 compilation: &Compilation,
86 ) -> Result<Box<[&mut [VMFunctionBody]]>, String> {
87 let total_len = compilation
88 .into_iter()
89 .fold(0, |acc, func| acc + Self::function_allocation_size(func));
90
91 let (mut buf, registry, start) = self.allocate(total_len)?;
92 let mut result = Vec::with_capacity(compilation.len());
93 let mut start = start as u32;
94
95 for func in compilation.into_iter() {
96 let (next_start, next_buf, vmfunc) = Self::copy_function(func, start, buf, registry);
97
98 result.push(vmfunc);
99
100 start = next_start;
101 buf = next_buf;
102 }
103
104 Ok(result.into_boxed_slice())
105 }
106
107 pub fn publish(&mut self, isa: &dyn TargetIsa) {
109 self.push_current(0)
110 .expect("failed to push current memory map");
111
112 for CodeMemoryEntry {
113 mmap: m,
114 registry: r,
115 } in &mut self.entries[self.published..]
116 {
117 r.publish(isa)
119 .expect("failed to publish function unwind registry");
120
121 if !m.is_empty() {
122 unsafe {
123 region::protect(m.as_mut_ptr(), m.len(), region::Protection::ReadExecute)
124 }
125 .expect("unable to make memory readonly and executable");
126 }
127 }
128
129 self.published = self.entries.len();
130 }
131
132 fn allocate(&mut self, size: usize) -> Result<(&mut [u8], &mut UnwindRegistry, usize), String> {
145 assert!(size > 0);
146
147 if match &self.current {
148 Some(e) => e.mmap.len() - self.position < size,
149 None => true,
150 } {
151 self.push_current(cmp::max(0x10000, size))?;
152 }
153
154 let old_position = self.position;
155 self.position += size;
156
157 let e = self.current.as_mut().unwrap();
158
159 Ok((
160 &mut e.mmap.as_mut_slice()[old_position..self.position],
161 &mut e.registry,
162 old_position,
163 ))
164 }
165
166 fn function_allocation_size(func: &CompiledFunction) -> usize {
168 match &func.unwind_info {
169 Some(UnwindInfo::WindowsX64(info)) => {
170 ((func.body.len() + 3) & !3) + info.emit_size()
174 }
175 _ => func.body.len(),
176 }
177 }
178
179 fn copy_function<'a>(
183 func: &CompiledFunction,
184 func_start: u32,
185 buf: &'a mut [u8],
186 registry: &mut UnwindRegistry,
187 ) -> (u32, &'a mut [u8], &'a mut [VMFunctionBody]) {
188 let func_len = func.body.len();
189 let mut func_end = func_start + (func_len as u32);
190
191 let (body, mut remainder) = buf.split_at_mut(func_len);
192 body.copy_from_slice(&func.body);
193 let vmfunc = Self::view_as_mut_vmfunc_slice(body);
194
195 if let Some(UnwindInfo::WindowsX64(info)) = &func.unwind_info {
196 let unwind_start = (func_end + 3) & !3;
199 let unwind_size = info.emit_size();
200 let padding = (unwind_start - func_end) as usize;
201
202 let (slice, r) = remainder.split_at_mut(padding + unwind_size);
203
204 info.emit(&mut slice[padding..]);
205
206 func_end = unwind_start + (unwind_size as u32);
207 remainder = r;
208 }
209
210 if let Some(info) = &func.unwind_info {
211 registry
212 .register(func_start, func_len as u32, info)
213 .expect("failed to register unwind information");
214 }
215
216 (func_end, remainder, vmfunc)
217 }
218
219 fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] {
221 let byte_ptr: *mut [u8] = slice;
222 let body_ptr = byte_ptr as *mut [VMFunctionBody];
223 unsafe { &mut *body_ptr }
224 }
225
226 fn push_current(&mut self, new_size: usize) -> Result<(), String> {
228 let previous = mem::replace(
229 &mut self.current,
230 if new_size == 0 {
231 None
232 } else {
233 Some(CodeMemoryEntry::with_capacity(cmp::max(0x10000, new_size))?)
234 },
235 );
236
237 if let Some(e) = previous {
238 self.entries.push(e);
239 }
240
241 self.position = 0;
242
243 Ok(())
244 }
245
246 pub fn published_contains(&self, addr: usize) -> bool {
249 self.entries[..self.published]
250 .iter()
251 .any(|entry| entry.contains(addr))
252 }
253}