near_vm_engine_universal/
code_memory.rs1use crate::unwind::UnwindRegistry;
6use near_vm_compiler::{CompiledFunctionUnwindInfoRef, CustomSectionRef, FunctionBodyRef};
7use near_vm_vm::{Mmap, VMFunctionBody};
8
9const ARCH_FUNCTION_ALIGNMENT: usize = 16;
15
16const DATA_SECTION_ALIGNMENT: usize = 64;
19
20pub struct CodeMemory {
22 unwind_registry: UnwindRegistry,
23 mmap: Mmap,
24 start_of_nonexecutable_pages: usize,
25}
26
27impl CodeMemory {
28 pub fn new() -> Self {
30 Self {
31 unwind_registry: UnwindRegistry::new(),
32 mmap: Mmap::new(),
33 start_of_nonexecutable_pages: 0,
34 }
35 }
36
37 pub fn unwind_registry_mut(&mut self) -> &mut UnwindRegistry {
39 &mut self.unwind_registry
40 }
41
42 pub fn allocate(
44 &mut self,
45 functions: &[FunctionBodyRef<'_>],
46 executable_sections: &[CustomSectionRef<'_>],
47 data_sections: &[CustomSectionRef<'_>],
48 ) -> Result<(Vec<&mut [VMFunctionBody]>, Vec<&mut [u8]>, Vec<&mut [u8]>), String> {
49 let mut function_result = vec![];
50 let mut data_section_result = vec![];
51 let mut executable_section_result = vec![];
52
53 let page_size = region::page::size();
54
55 let total_len = round_up(
66 functions.iter().fold(0, |acc, func| {
67 round_up(acc + Self::function_allocation_size(*func), ARCH_FUNCTION_ALIGNMENT)
68 }) + executable_sections
69 .iter()
70 .fold(0, |acc, exec| round_up(acc + exec.bytes.len(), ARCH_FUNCTION_ALIGNMENT)),
71 page_size,
72 ) + data_sections
73 .iter()
74 .fold(0, |acc, data| round_up(acc + data.bytes.len(), DATA_SECTION_ALIGNMENT));
75
76 self.mmap = Mmap::with_at_least(total_len)?;
79
80 let mut bytes = 0;
84 let mut buf = self.mmap.as_mut_slice();
85 for func in functions {
86 let len = round_up(Self::function_allocation_size(*func), ARCH_FUNCTION_ALIGNMENT);
87 let (func_buf, next_buf) = buf.split_at_mut(len);
88 buf = next_buf;
89 bytes += len;
90
91 let vmfunc = Self::copy_function(&mut self.unwind_registry, *func, func_buf);
92 assert_eq!(vmfunc.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
93 function_result.push(vmfunc);
94 }
95 for section in executable_sections {
96 let section = §ion.bytes;
97 assert_eq!(buf.as_mut_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
98 let len = round_up(section.len(), ARCH_FUNCTION_ALIGNMENT);
99 let (s, next_buf) = buf.split_at_mut(len);
100 buf = next_buf;
101 bytes += len;
102 s[..section.len()].copy_from_slice(*section);
103 executable_section_result.push(s);
104 }
105
106 self.start_of_nonexecutable_pages = bytes;
107
108 if !data_sections.is_empty() {
109 let padding = round_up(bytes, page_size) - bytes;
112 buf = buf.split_at_mut(padding).1;
113
114 for section in data_sections {
115 let section = §ion.bytes;
116 assert_eq!(buf.as_mut_ptr() as usize % DATA_SECTION_ALIGNMENT, 0);
117 let len = round_up(section.len(), DATA_SECTION_ALIGNMENT);
118 let (s, next_buf) = buf.split_at_mut(len);
119 buf = next_buf;
120 s[..section.len()].copy_from_slice(*section);
121 data_section_result.push(s);
122 }
123 }
124
125 Ok((function_result, executable_section_result, data_section_result))
126 }
127
128 pub fn publish(&mut self) {
130 if self.mmap.is_empty() || self.start_of_nonexecutable_pages == 0 {
131 return;
132 }
133 assert!(self.mmap.len() >= self.start_of_nonexecutable_pages);
134 unsafe {
135 region::protect(
136 self.mmap.as_mut_ptr(),
137 self.start_of_nonexecutable_pages,
138 region::Protection::READ_EXECUTE,
139 )
140 }
141 .expect("unable to make memory readonly and executable");
142 }
143
144 fn function_allocation_size(func: FunctionBodyRef<'_>) -> usize {
146 match &func.unwind_info {
147 Some(CompiledFunctionUnwindInfoRef::WindowsX64(info)) => {
148 ((func.body.len() + 3) & !3) + info.len()
152 }
153 _ => func.body.len(),
154 }
155 }
156
157 fn copy_function<'a>(
161 registry: &mut UnwindRegistry,
162 func: FunctionBodyRef<'_>,
163 buf: &'a mut [u8],
164 ) -> &'a mut [VMFunctionBody] {
165 assert_eq!(buf.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
166
167 let func_len = func.body.len();
168
169 let (body, remainder) = buf.split_at_mut(func_len);
170 body.copy_from_slice(&func.body);
171 let vmfunc = Self::view_as_mut_vmfunc_slice(body);
172
173 if let Some(CompiledFunctionUnwindInfoRef::WindowsX64(info)) = &func.unwind_info {
174 let unwind_start = (func_len + 3) & !3;
177 let unwind_size = info.len();
178 let padding = unwind_start - func_len;
179 assert_eq!((func_len + padding) % 4, 0);
180 let slice = remainder.split_at_mut(padding + unwind_size).0;
181 slice[padding..].copy_from_slice(&info);
182 }
183
184 if let Some(info) = &func.unwind_info {
185 registry
186 .register(vmfunc.as_ptr() as usize, 0, func_len as u32, *info)
187 .expect("failed to register unwind information");
188 }
189
190 vmfunc
191 }
192
193 fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] {
195 let byte_ptr: *mut [u8] = slice;
196 let body_ptr = byte_ptr as *mut [VMFunctionBody];
197 unsafe { &mut *body_ptr }
198 }
199}
200
201fn round_up(size: usize, multiple: usize) -> usize {
202 debug_assert!(multiple.is_power_of_two());
203 (size + (multiple - 1)) & !(multiple - 1)
204}
205
206#[cfg(test)]
207mod tests {
208 use super::CodeMemory;
209 fn _assert() {
210 fn _assert_send_sync<T: Send + Sync>() {}
211 _assert_send_sync::<CodeMemory>();
212 }
213}