1mod allocator;
10
11use crate::export::VMExtern;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17 memory32_atomic_check32, memory32_atomic_check64, memory_copy, memory_fill,
18 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
19 VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
20 VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, VMTagImport,
21 VMTrampoline,
22};
23use crate::{wasmer_call_trampoline, FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag};
24use crate::{LinearMemory, NotifyLocation};
25use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
26pub use allocator::InstanceAllocator;
27use memoffset::offset_of;
28use more_asserts::assert_lt;
29use std::alloc::Layout;
30use std::cell::RefCell;
31use std::collections::HashMap;
32use std::convert::TryFrom;
33use std::fmt;
34use std::mem;
35use std::ptr::{self, NonNull};
36use std::slice;
37use std::sync::Arc;
38use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
39use wasmer_types::{
40 DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
41 LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, LocalTagIndex,
42 MemoryError, MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer,
43 TagIndex, VMOffsets,
44};
45
46#[repr(C)]
53#[allow(clippy::type_complexity)]
54pub(crate) struct Instance {
55 module: Arc<ModuleInfo>,
57
58 context: *mut StoreObjects,
60
61 offsets: VMOffsets,
63
64 memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
66
67 tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
69
70 globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
72
73 tags: BoxedSlice<LocalTagIndex, InternalStoreHandle<VMTag>>,
75
76 functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
78
79 function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
81
82 passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
85
86 passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
89
90 funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
93
94 imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
97
98 vmctx: VMContext,
103}
104
105impl fmt::Debug for Instance {
106 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
107 formatter.debug_struct("Instance").finish()
108 }
109}
110
111#[allow(clippy::cast_ptr_alignment)]
112impl Instance {
113 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
116 (self.vmctx_ptr() as *mut u8)
117 .add(usize::try_from(offset).unwrap())
118 .cast()
119 }
120
121 fn module(&self) -> &Arc<ModuleInfo> {
122 &self.module
123 }
124
125 pub(crate) fn module_ref(&self) -> &ModuleInfo {
126 &self.module
127 }
128
129 fn context(&self) -> &StoreObjects {
130 unsafe { &*self.context }
131 }
132
133 fn context_mut(&mut self) -> &mut StoreObjects {
134 unsafe { &mut *self.context }
135 }
136
137 fn offsets(&self) -> &VMOffsets {
139 &self.offsets
140 }
141
142 fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
144 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
145 }
146
147 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
149 let index = usize::try_from(index.as_u32()).unwrap();
150 unsafe { &*self.imported_functions_ptr().add(index) }
151 }
152
153 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
155 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
156 }
157
158 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
160 let index = usize::try_from(index.as_u32()).unwrap();
161 unsafe { &*self.imported_tables_ptr().add(index) }
162 }
163
164 fn imported_tables_ptr(&self) -> *mut VMTableImport {
166 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
167 }
168
169 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
171 let index = usize::try_from(index.as_u32()).unwrap();
172 unsafe { &*self.imported_memories_ptr().add(index) }
173 }
174
175 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
177 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
178 }
179
180 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
182 let index = usize::try_from(index.as_u32()).unwrap();
183 unsafe { &*self.imported_globals_ptr().add(index) }
184 }
185
186 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
188 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
189 }
190
191 fn imported_tag(&self, index: TagIndex) -> &VMTagImport {
193 let index = usize::try_from(index.as_u32()).unwrap();
194 unsafe { &*self.imported_tags_ptr().add(index) }
195 }
196
197 fn imported_tags_ptr(&self) -> *mut VMTagImport {
199 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tags_begin()) }
200 }
201
202 #[allow(dead_code)]
204 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
205 unsafe { *self.table_ptr(index).as_ref() }
206 }
207
208 #[allow(dead_code)]
209 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
211 unsafe {
212 *self.table_ptr(index).as_ptr() = *table;
213 }
214 }
215
216 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
218 let index = usize::try_from(index.as_u32()).unwrap();
219 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
220 }
221
222 fn tables_ptr(&self) -> *mut VMTableDefinition {
224 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
225 }
226
227 #[allow(dead_code)]
228 fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
230 if let Some(local_index) = self.module.local_memory_index(index) {
231 self.memory(local_index)
232 } else {
233 let import = self.imported_memory(index);
234 unsafe { *import.definition.as_ref() }
235 }
236 }
237
238 fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
240 unsafe { *self.memory_ptr(index).as_ref() }
241 }
242
243 #[allow(dead_code)]
244 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
246 unsafe {
247 *self.memory_ptr(index).as_ptr() = *mem;
248 }
249 }
250
251 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
253 let index = usize::try_from(index.as_u32()).unwrap();
254 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
255 }
256
257 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
259 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
260 }
261
262 fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
264 if let Some(local_index) = self.module.local_memory_index(index) {
265 unsafe {
266 self.memories
267 .get(local_index)
268 .unwrap()
269 .get(self.context.as_ref().unwrap())
270 }
271 } else {
272 let import = self.imported_memory(index);
273 unsafe { import.handle.get(self.context.as_ref().unwrap()) }
274 }
275 }
276
277 fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
279 if let Some(local_index) = self.module.local_memory_index(index) {
280 unsafe {
281 self.memories
282 .get_mut(local_index)
283 .unwrap()
284 .get_mut(self.context.as_mut().unwrap())
285 }
286 } else {
287 let import = self.imported_memory(index);
288 unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
289 }
290 }
291
292 fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
294 unsafe {
295 self.memories
296 .get_mut(local_index)
297 .unwrap()
298 .get_mut(self.context.as_mut().unwrap())
299 }
300 }
301
302 fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
304 unsafe { self.global_ptr(index).as_ref().clone() }
305 }
306
307 #[allow(dead_code)]
309 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
310 unsafe {
311 *self.global_ptr(index).as_ptr() = global.clone();
312 }
313 }
314
315 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
317 let index = usize::try_from(index.as_u32()).unwrap();
318 NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
320 }
321
322 fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
324 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
325 }
326
327 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
329 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
330 }
331
332 fn vmctx(&self) -> &VMContext {
334 &self.vmctx
335 }
336
337 fn vmctx_ptr(&self) -> *mut VMContext {
339 self.vmctx() as *const VMContext as *mut VMContext
340 }
341
342 fn invoke_start_function(
344 &self,
345 config: &VMConfig,
346 trap_handler: Option<*const TrapHandlerFn<'static>>,
347 ) -> Result<(), Trap> {
348 let start_index = match self.module.start_function {
349 Some(idx) => idx,
350 None => return Ok(()),
351 };
352
353 let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
354 Some(local_index) => {
355 let body = self
356 .functions
357 .get(local_index)
358 .expect("function index is out of bounds")
359 .0;
360 (
361 body as *const _,
362 VMFunctionContext {
363 vmctx: self.vmctx_ptr(),
364 },
365 )
366 }
367 None => {
368 assert_lt!(start_index.index(), self.module.num_imported_functions);
369 let import = self.imported_function(start_index);
370 (import.body, import.environment)
371 }
372 };
373
374 let sig = self.module.functions[start_index];
375 let trampoline = self.function_call_trampolines[sig];
376 let values_vec = vec![].as_mut_ptr();
377
378 unsafe {
379 wasmer_call_trampoline(
383 trap_handler,
384 config,
385 callee_vmctx,
386 trampoline,
387 callee_address,
388 values_vec,
389 )
390 }
391 }
392
393 #[inline]
395 pub(crate) fn vmctx_offset() -> isize {
396 offset_of!(Self, vmctx) as isize
397 }
398
399 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
401 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
402 let end: *const VMTableDefinition = table;
403 let index = LocalTableIndex::new(
405 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
406 );
407 assert_lt!(index.index(), self.tables.len());
408 index
409 }
410
411 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
413 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
414 let end: *const VMMemoryDefinition = memory;
415 let index = LocalMemoryIndex::new(
417 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
418 );
419 assert_lt!(index.index(), self.memories.len());
420 index
421 }
422
423 pub(crate) fn memory_grow<IntoPages>(
428 &mut self,
429 memory_index: LocalMemoryIndex,
430 delta: IntoPages,
431 ) -> Result<Pages, MemoryError>
432 where
433 IntoPages: Into<Pages>,
434 {
435 let mem = *self
436 .memories
437 .get(memory_index)
438 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
439 mem.get_mut(self.context_mut()).grow(delta.into())
440 }
441
442 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
451 &mut self,
452 memory_index: MemoryIndex,
453 delta: IntoPages,
454 ) -> Result<Pages, MemoryError>
455 where
456 IntoPages: Into<Pages>,
457 {
458 let import = self.imported_memory(memory_index);
459 let mem = import.handle;
460 mem.get_mut(self.context_mut()).grow(delta.into())
461 }
462
463 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
465 let mem = *self
466 .memories
467 .get(memory_index)
468 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
469 mem.get(self.context()).size()
470 }
471
472 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
478 let import = self.imported_memory(memory_index);
479 let mem = import.handle;
480 mem.get(self.context()).size()
481 }
482
483 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
485 let table = self
486 .tables
487 .get(table_index)
488 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
489 table.get(self.context()).size()
490 }
491
492 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
497 let import = self.imported_table(table_index);
498 let table = import.handle;
499 table.get(self.context()).size()
500 }
501
502 pub(crate) fn table_grow(
507 &mut self,
508 table_index: LocalTableIndex,
509 delta: u32,
510 init_value: TableElement,
511 ) -> Option<u32> {
512 let table = *self
513 .tables
514 .get(table_index)
515 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
516 table.get_mut(self.context_mut()).grow(delta, init_value)
517 }
518
519 pub(crate) unsafe fn imported_table_grow(
524 &mut self,
525 table_index: TableIndex,
526 delta: u32,
527 init_value: TableElement,
528 ) -> Option<u32> {
529 let import = self.imported_table(table_index);
530 let table = import.handle;
531 table.get_mut(self.context_mut()).grow(delta, init_value)
532 }
533
534 pub(crate) fn table_get(
536 &self,
537 table_index: LocalTableIndex,
538 index: u32,
539 ) -> Option<TableElement> {
540 let table = self
541 .tables
542 .get(table_index)
543 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
544 table.get(self.context()).get(index)
545 }
546
547 pub(crate) unsafe fn imported_table_get(
552 &self,
553 table_index: TableIndex,
554 index: u32,
555 ) -> Option<TableElement> {
556 let import = self.imported_table(table_index);
557 let table = import.handle;
558 table.get(self.context()).get(index)
559 }
560
561 pub(crate) fn table_set(
563 &mut self,
564 table_index: LocalTableIndex,
565 index: u32,
566 val: TableElement,
567 ) -> Result<(), Trap> {
568 let table = *self
569 .tables
570 .get(table_index)
571 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
572 table.get_mut(self.context_mut()).set(index, val)
573 }
574
575 pub(crate) unsafe fn imported_table_set(
580 &mut self,
581 table_index: TableIndex,
582 index: u32,
583 val: TableElement,
584 ) -> Result<(), Trap> {
585 let import = self.imported_table(table_index);
586 let table = import.handle;
587 table.get_mut(self.context_mut()).set(index, val)
588 }
589
590 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
592 if function_index == FunctionIndex::reserved_value() {
593 None
594 } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
595 Some(VMFuncRef(NonNull::from(
596 &self.funcrefs[local_function_index],
597 )))
598 } else {
599 Some(VMFuncRef(self.imported_funcrefs[function_index]))
600 }
601 }
602
603 pub(crate) fn table_init(
611 &mut self,
612 table_index: TableIndex,
613 elem_index: ElemIndex,
614 dst: u32,
615 src: u32,
616 len: u32,
617 ) -> Result<(), Trap> {
618 let table = self.get_table_handle(table_index);
621 let table = unsafe { table.get_mut(&mut *self.context) };
622 let passive_elements = self.passive_elements.borrow();
623 let elem = passive_elements
624 .get(&elem_index)
625 .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
626
627 if src
628 .checked_add(len)
629 .map_or(true, |n| n as usize > elem.len())
630 || dst.checked_add(len).map_or(true, |m| m > table.size())
631 {
632 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
633 }
634
635 for (dst, src) in (dst..dst + len).zip(src..src + len) {
636 table
637 .set(dst, TableElement::FuncRef(elem[src as usize]))
638 .expect("should never panic because we already did the bounds check above");
639 }
640
641 Ok(())
642 }
643
644 pub(crate) fn table_fill(
650 &mut self,
651 table_index: TableIndex,
652 start_index: u32,
653 item: TableElement,
654 len: u32,
655 ) -> Result<(), Trap> {
656 let table = self.get_table(table_index);
659 let table_size = table.size() as usize;
660
661 if start_index
662 .checked_add(len)
663 .map_or(true, |n| n as usize > table_size)
664 {
665 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
666 }
667
668 for i in start_index..(start_index + len) {
669 table
670 .set(i, item.clone())
671 .expect("should never panic because we already did the bounds check above");
672 }
673
674 Ok(())
675 }
676
677 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
679 let mut passive_elements = self.passive_elements.borrow_mut();
682 passive_elements.remove(&elem_index);
683 }
686
687 pub(crate) fn local_memory_copy(
694 &self,
695 memory_index: LocalMemoryIndex,
696 dst: u32,
697 src: u32,
698 len: u32,
699 ) -> Result<(), Trap> {
700 let memory = self.memory(memory_index);
703 unsafe { memory_copy(&memory, dst, src, len) }
705 }
706
707 pub(crate) fn imported_memory_copy(
709 &self,
710 memory_index: MemoryIndex,
711 dst: u32,
712 src: u32,
713 len: u32,
714 ) -> Result<(), Trap> {
715 let import = self.imported_memory(memory_index);
716 let memory = unsafe { import.definition.as_ref() };
717 unsafe { memory_copy(memory, dst, src, len) }
719 }
720
721 pub(crate) fn local_memory_fill(
727 &self,
728 memory_index: LocalMemoryIndex,
729 dst: u32,
730 val: u32,
731 len: u32,
732 ) -> Result<(), Trap> {
733 let memory = self.memory(memory_index);
734 unsafe { memory_fill(&memory, dst, val, len) }
736 }
737
738 pub(crate) fn imported_memory_fill(
744 &self,
745 memory_index: MemoryIndex,
746 dst: u32,
747 val: u32,
748 len: u32,
749 ) -> Result<(), Trap> {
750 let import = self.imported_memory(memory_index);
751 let memory = unsafe { import.definition.as_ref() };
752 unsafe { memory_fill(memory, dst, val, len) }
754 }
755
756 pub(crate) fn memory_init(
764 &self,
765 memory_index: MemoryIndex,
766 data_index: DataIndex,
767 dst: u32,
768 src: u32,
769 len: u32,
770 ) -> Result<(), Trap> {
771 let memory = self.get_vmmemory(memory_index);
774 let passive_data = self.passive_data.borrow();
775 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
776
777 let current_length = unsafe { memory.vmmemory().as_ref().current_length };
778 if src
779 .checked_add(len)
780 .map_or(true, |n| n as usize > data.len())
781 || dst
782 .checked_add(len)
783 .map_or(true, |m| usize::try_from(m).unwrap() > current_length)
784 {
785 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
786 }
787 let src_slice = &data[src as usize..(src + len) as usize];
788 unsafe { memory.initialize_with_data(dst as usize, src_slice) }
789 }
790
791 pub(crate) fn data_drop(&self, data_index: DataIndex) {
793 let mut passive_data = self.passive_data.borrow_mut();
794 passive_data.remove(&data_index);
795 }
796
797 pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
800 if let Some(local_table_index) = self.module.local_table_index(table_index) {
801 self.get_local_table(local_table_index)
802 } else {
803 self.get_foreign_table(table_index)
804 }
805 }
806
807 pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
809 let table = self.tables[index];
810 table.get_mut(self.context_mut())
811 }
812
813 pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
815 let import = self.imported_table(index);
816 let table = import.handle;
817 table.get_mut(self.context_mut())
818 }
819
820 pub(crate) fn get_table_handle(
823 &mut self,
824 table_index: TableIndex,
825 ) -> InternalStoreHandle<VMTable> {
826 if let Some(local_table_index) = self.module.local_table_index(table_index) {
827 self.tables[local_table_index]
828 } else {
829 self.imported_table(table_index).handle
830 }
831 }
832
833 fn memory_wait(memory: &mut VMMemory, dst: u32, timeout: i64) -> Result<u32, Trap> {
834 let location = NotifyLocation { address: dst };
835 let timeout = if timeout < 0 {
836 None
837 } else {
838 Some(std::time::Duration::from_nanos(timeout as u64))
839 };
840 match memory.do_wait(location, timeout) {
841 Ok(count) => Ok(count),
842 Err(_err) => {
843 Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
845 }
846 }
847 }
848
849 pub(crate) fn local_memory_wait32(
851 &mut self,
852 memory_index: LocalMemoryIndex,
853 dst: u32,
854 val: u32,
855 timeout: i64,
856 ) -> Result<u32, Trap> {
857 let memory = self.memory(memory_index);
858 let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
863
864 if let Ok(mut ret) = ret {
865 if ret == 0 {
866 let memory = self.get_local_vmmemory_mut(memory_index);
867 ret = Self::memory_wait(memory, dst, timeout)?;
868 }
869 Ok(ret)
870 } else {
871 ret
872 }
873 }
874
875 pub(crate) fn imported_memory_wait32(
877 &mut self,
878 memory_index: MemoryIndex,
879 dst: u32,
880 val: u32,
881 timeout: i64,
882 ) -> Result<u32, Trap> {
883 let import = self.imported_memory(memory_index);
884 let memory = unsafe { import.definition.as_ref() };
885 let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
890 if let Ok(mut ret) = ret {
891 if ret == 0 {
892 let memory = self.get_vmmemory_mut(memory_index);
893 ret = Self::memory_wait(memory, dst, timeout)?;
894 }
895 Ok(ret)
896 } else {
897 ret
898 }
899 }
900
901 pub(crate) fn local_memory_wait64(
903 &mut self,
904 memory_index: LocalMemoryIndex,
905 dst: u32,
906 val: u64,
907 timeout: i64,
908 ) -> Result<u32, Trap> {
909 let memory = self.memory(memory_index);
910 let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
915
916 if let Ok(mut ret) = ret {
917 if ret == 0 {
918 let memory = self.get_local_vmmemory_mut(memory_index);
919 ret = Self::memory_wait(memory, dst, timeout)?;
920 }
921 Ok(ret)
922 } else {
923 ret
924 }
925 }
926
927 pub(crate) fn imported_memory_wait64(
929 &mut self,
930 memory_index: MemoryIndex,
931 dst: u32,
932 val: u64,
933 timeout: i64,
934 ) -> Result<u32, Trap> {
935 let import = self.imported_memory(memory_index);
936 let memory = unsafe { import.definition.as_ref() };
937 let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
942
943 if let Ok(mut ret) = ret {
944 if ret == 0 {
945 let memory = self.get_vmmemory_mut(memory_index);
946 ret = Self::memory_wait(memory, dst, timeout)?;
947 }
948 Ok(ret)
949 } else {
950 ret
951 }
952 }
953
954 pub(crate) fn local_memory_notify(
956 &mut self,
957 memory_index: LocalMemoryIndex,
958 dst: u32,
959 count: u32,
960 ) -> Result<u32, Trap> {
961 let memory = self.get_local_vmmemory_mut(memory_index);
962 let location = NotifyLocation { address: dst };
964 Ok(memory.do_notify(location, count))
965 }
966
967 pub(crate) fn imported_memory_notify(
969 &mut self,
970 memory_index: MemoryIndex,
971 dst: u32,
972 count: u32,
973 ) -> Result<u32, Trap> {
974 let memory = self.get_vmmemory_mut(memory_index);
975 let location = NotifyLocation { address: dst };
977 Ok(memory.do_notify(location, count))
978 }
979}
980
981#[derive(Debug, Eq, PartialEq)]
986pub struct VMInstance {
987 instance_layout: Layout,
989
990 instance: NonNull<Instance>,
1000}
1001
1002impl Drop for VMInstance {
1006 fn drop(&mut self) {
1007 let instance_ptr = self.instance.as_ptr();
1008
1009 unsafe {
1010 instance_ptr.drop_in_place();
1012 std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1014 }
1015 }
1016}
1017
1018impl VMInstance {
1019 #[allow(clippy::too_many_arguments)]
1041 pub unsafe fn new(
1042 allocator: InstanceAllocator,
1043 module: Arc<ModuleInfo>,
1044 context: &mut StoreObjects,
1045 finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1046 finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1047 finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1048 finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1049 finished_tags: BoxedSlice<LocalTagIndex, InternalStoreHandle<VMTag>>,
1050 finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1051 imports: Imports,
1052 vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1053 ) -> Result<Self, Trap> {
1054 let vmctx_globals = finished_globals
1055 .values()
1056 .map(|m| m.get(context).vmglobal())
1057 .collect::<PrimaryMap<LocalGlobalIndex, _>>()
1058 .into_boxed_slice();
1059 let passive_data = RefCell::new(
1060 module
1061 .passive_data
1062 .clone()
1063 .into_iter()
1064 .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1065 .collect::<HashMap<_, _>>(),
1066 );
1067
1068 let handle = {
1069 let offsets = allocator.offsets().clone();
1070 let funcrefs = PrimaryMap::new().into_boxed_slice();
1072 let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1073 let instance = Instance {
1075 module,
1076 context,
1077 offsets,
1078 memories: finished_memories,
1079 tables: finished_tables,
1080 tags: finished_tags,
1081 globals: finished_globals,
1082 functions: finished_functions,
1083 function_call_trampolines: finished_function_call_trampolines,
1084 passive_elements: Default::default(),
1085 passive_data,
1086 funcrefs,
1087 imported_funcrefs,
1088 vmctx: VMContext {},
1089 };
1090
1091 let mut instance_handle = allocator.into_vminstance(instance);
1092
1093 {
1095 let instance = instance_handle.instance_mut();
1096 let vmctx_ptr = instance.vmctx_ptr();
1097 (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1098 &instance.module,
1099 context,
1100 &imports,
1101 &instance.functions,
1102 &vmshared_signatures,
1103 &instance.function_call_trampolines,
1104 vmctx_ptr,
1105 );
1106 }
1107
1108 instance_handle
1109 };
1110 let instance = handle.instance();
1111
1112 ptr::copy(
1113 vmshared_signatures.values().as_slice().as_ptr(),
1114 instance.signature_ids_ptr(),
1115 vmshared_signatures.len(),
1116 );
1117 ptr::copy(
1118 imports.functions.values().as_slice().as_ptr(),
1119 instance.imported_functions_ptr(),
1120 imports.functions.len(),
1121 );
1122 ptr::copy(
1123 imports.tables.values().as_slice().as_ptr(),
1124 instance.imported_tables_ptr(),
1125 imports.tables.len(),
1126 );
1127 ptr::copy(
1128 imports.memories.values().as_slice().as_ptr(),
1129 instance.imported_memories_ptr(),
1130 imports.memories.len(),
1131 );
1132 ptr::copy(
1133 imports.globals.values().as_slice().as_ptr(),
1134 instance.imported_globals_ptr(),
1135 imports.globals.len(),
1136 );
1137 ptr::copy(
1141 vmctx_globals.values().as_slice().as_ptr(),
1142 instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1143 vmctx_globals.len(),
1144 );
1145 ptr::write(
1146 instance.builtin_functions_ptr(),
1147 VMBuiltinFunctionsArray::initialized(),
1148 );
1149
1150 initialize_passive_elements(instance);
1153 initialize_globals(instance);
1154
1155 Ok(handle)
1156 }
1157
1158 pub(crate) fn instance(&self) -> &Instance {
1160 unsafe { self.instance.as_ref() }
1161 }
1162
1163 pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1165 unsafe { self.instance.as_mut() }
1166 }
1167
1168 pub unsafe fn finish_instantiation(
1174 &mut self,
1175 config: &VMConfig,
1176 trap_handler: Option<*const TrapHandlerFn<'static>>,
1177 data_initializers: &[DataInitializer<'_>],
1178 ) -> Result<(), Trap> {
1179 let instance = self.instance_mut();
1180
1181 initialize_tables(instance)?;
1183 initialize_memories(instance, data_initializers)?;
1184
1185 instance.invoke_start_function(config, trap_handler)?;
1188 Ok(())
1189 }
1190
1191 pub fn vmctx(&self) -> &VMContext {
1193 self.instance().vmctx()
1194 }
1195
1196 pub fn vmctx_ptr(&self) -> *mut VMContext {
1198 self.instance().vmctx_ptr()
1199 }
1200
1201 pub fn vmoffsets(&self) -> &VMOffsets {
1205 self.instance().offsets()
1206 }
1207
1208 pub fn module(&self) -> &Arc<ModuleInfo> {
1210 self.instance().module()
1211 }
1212
1213 pub fn module_ref(&self) -> &ModuleInfo {
1215 self.instance().module_ref()
1216 }
1217
1218 pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1220 let export = *self.module_ref().exports.get(field)?;
1221
1222 Some(self.lookup_by_declaration(export))
1223 }
1224
1225 pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1227 let instance = self.instance();
1228
1229 match export {
1230 ExportIndex::Function(index) => {
1231 let sig_index = &instance.module.functions[index];
1232 let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1233 let signature = instance.module.signatures[*sig_index].clone();
1236 let vm_function = VMFunction {
1237 anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1238 &instance.funcrefs[def_index],
1239 )),
1240 signature,
1241 kind: VMFunctionKind::Static,
1246 host_data: Box::new(()),
1247 };
1248 InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1249 } else {
1250 let import = instance.imported_function(index);
1251 import.handle
1252 };
1253
1254 VMExtern::Function(handle)
1255 }
1256 ExportIndex::Table(index) => {
1257 let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1258 instance.tables[def_index]
1259 } else {
1260 let import = instance.imported_table(index);
1261 import.handle
1262 };
1263 VMExtern::Table(handle)
1264 }
1265 ExportIndex::Memory(index) => {
1266 let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1267 instance.memories[def_index]
1268 } else {
1269 let import = instance.imported_memory(index);
1270 import.handle
1271 };
1272 VMExtern::Memory(handle)
1273 }
1274 ExportIndex::Global(index) => {
1275 let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1276 instance.globals[def_index]
1277 } else {
1278 let import = instance.imported_global(index);
1279 import.handle
1280 };
1281 VMExtern::Global(handle)
1282 }
1283
1284 ExportIndex::Tag(index) => {
1285 let handle = if let Some(def_index) = instance.module.local_tag_index(index) {
1286 instance.tags[def_index]
1287 } else {
1288 let import = instance.imported_tag(index);
1289 import.handle
1290 };
1291 VMExtern::Tag(handle)
1292 }
1293 }
1294 }
1295
1296 pub fn exports(&self) -> indexmap::map::Iter<String, ExportIndex> {
1302 self.module().exports.iter()
1303 }
1304
1305 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1307 self.instance().memory_index(memory)
1308 }
1309
1310 pub fn memory_grow<IntoPages>(
1315 &mut self,
1316 memory_index: LocalMemoryIndex,
1317 delta: IntoPages,
1318 ) -> Result<Pages, MemoryError>
1319 where
1320 IntoPages: Into<Pages>,
1321 {
1322 self.instance_mut().memory_grow(memory_index, delta)
1323 }
1324
1325 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1327 self.instance().table_index(table)
1328 }
1329
1330 pub fn table_grow(
1335 &mut self,
1336 table_index: LocalTableIndex,
1337 delta: u32,
1338 init_value: TableElement,
1339 ) -> Option<u32> {
1340 self.instance_mut()
1341 .table_grow(table_index, delta, init_value)
1342 }
1343
1344 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1348 self.instance().table_get(table_index, index)
1349 }
1350
1351 pub fn table_set(
1355 &mut self,
1356 table_index: LocalTableIndex,
1357 index: u32,
1358 val: TableElement,
1359 ) -> Result<(), Trap> {
1360 self.instance_mut().table_set(table_index, index, val)
1361 }
1362
1363 pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1365 self.instance_mut().get_local_table(index)
1366 }
1367}
1368
1369fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1371 let mut start = init.location.offset;
1372
1373 if let Some(base) = init.location.base {
1374 let val = unsafe {
1375 if let Some(def_index) = instance.module.local_global_index(base) {
1376 instance.global(def_index).val.u32
1377 } else {
1378 instance.imported_global(base).definition.as_ref().val.u32
1379 }
1380 };
1381 start += usize::try_from(val).unwrap();
1382 }
1383
1384 start
1385}
1386
1387#[allow(clippy::mut_from_ref)]
1388#[allow(dead_code)]
1389unsafe fn get_memory_slice<'instance>(
1391 init: &DataInitializer<'_>,
1392 instance: &'instance Instance,
1393) -> &'instance mut [u8] {
1394 let memory = if let Some(local_memory_index) = instance
1395 .module
1396 .local_memory_index(init.location.memory_index)
1397 {
1398 instance.memory(local_memory_index)
1399 } else {
1400 let import = instance.imported_memory(init.location.memory_index);
1401 *import.definition.as_ref()
1402 };
1403 slice::from_raw_parts_mut(memory.base, memory.current_length)
1404}
1405
1406fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1408 let mut start = init.offset;
1409
1410 if let Some(base) = init.base {
1411 let val = unsafe {
1412 if let Some(def_index) = instance.module.local_global_index(base) {
1413 instance.global(def_index).val.u32
1414 } else {
1415 instance.imported_global(base).definition.as_ref().val.u32
1416 }
1417 };
1418 start += usize::try_from(val).unwrap();
1419 }
1420
1421 start
1422}
1423
1424fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1426 let module = Arc::clone(&instance.module);
1427 for init in &module.table_initializers {
1428 let start = get_table_init_start(init, instance);
1429 let table = instance.get_table_handle(init.table_index);
1430 let table = unsafe { table.get_mut(&mut *instance.context) };
1431
1432 if start
1433 .checked_add(init.elements.len())
1434 .map_or(true, |end| end > table.size() as usize)
1435 {
1436 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1437 }
1438
1439 if let wasmer_types::Type::FuncRef = table.ty().ty {
1440 for (i, func_idx) in init.elements.iter().enumerate() {
1441 let anyfunc = instance.func_ref(*func_idx);
1442 table
1443 .set(
1444 u32::try_from(start + i).unwrap(),
1445 TableElement::FuncRef(anyfunc),
1446 )
1447 .unwrap();
1448 }
1449 } else {
1450 for i in 0..init.elements.len() {
1451 table
1452 .set(
1453 u32::try_from(start + i).unwrap(),
1454 TableElement::ExternRef(None),
1455 )
1456 .unwrap();
1457 }
1458 }
1459 }
1460
1461 Ok(())
1462}
1463
1464fn initialize_passive_elements(instance: &Instance) {
1468 let mut passive_elements = instance.passive_elements.borrow_mut();
1469 debug_assert!(
1470 passive_elements.is_empty(),
1471 "should only be called once, at initialization time"
1472 );
1473
1474 passive_elements.extend(
1475 instance
1476 .module
1477 .passive_elements
1478 .iter()
1479 .filter(|(_, segments)| !segments.is_empty())
1480 .map(|(idx, segments)| {
1481 (
1482 *idx,
1483 segments.iter().map(|s| instance.func_ref(*s)).collect(),
1484 )
1485 }),
1486 );
1487}
1488
1489fn initialize_memories(
1491 instance: &mut Instance,
1492 data_initializers: &[DataInitializer<'_>],
1493) -> Result<(), Trap> {
1494 for init in data_initializers {
1495 let memory = instance.get_vmmemory(init.location.memory_index);
1496
1497 let start = get_memory_init_start(init, instance);
1498 unsafe {
1499 let current_length = memory.vmmemory().as_ref().current_length;
1500 if start
1501 .checked_add(init.data.len())
1502 .map_or(true, |end| end > current_length)
1503 {
1504 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1505 }
1506 memory.initialize_with_data(start, init.data)?;
1507 }
1508 }
1509
1510 Ok(())
1511}
1512
1513fn initialize_globals(instance: &Instance) {
1514 let module = Arc::clone(&instance.module);
1515 for (index, initializer) in module.global_initializers.iter() {
1516 unsafe {
1517 let to = instance.global_ptr(index).as_ptr();
1518 match initializer {
1519 GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1520 GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1521 GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1522 GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1523 GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1524 GlobalInit::GetGlobal(x) => {
1525 let from: VMGlobalDefinition =
1526 if let Some(def_x) = module.local_global_index(*x) {
1527 instance.global(def_x)
1528 } else {
1529 instance.imported_global(*x).definition.as_ref().clone()
1530 };
1531 *to = from;
1532 }
1533 GlobalInit::RefNullConst => (*to).val.funcref = 0,
1534 GlobalInit::RefFunc(func_idx) => {
1535 let funcref = instance.func_ref(*func_idx).unwrap();
1536 (*to).val = funcref.into_raw();
1537 }
1538 }
1539 }
1540 }
1541}
1542
1543fn build_funcrefs(
1546 module_info: &ModuleInfo,
1547 ctx: &StoreObjects,
1548 imports: &Imports,
1549 finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1550 vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1551 function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1552 vmctx_ptr: *mut VMContext,
1553) -> (
1554 BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1555 BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1556) {
1557 let mut func_refs =
1558 PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1559 let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1560
1561 for import in imports.functions.values() {
1563 imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1564 }
1565
1566 for (local_index, func_ptr) in finished_functions.iter() {
1568 let index = module_info.func_index(local_index);
1569 let sig_index = module_info.functions[index];
1570 let type_index = vmshared_signatures[sig_index];
1571 let call_trampoline = function_call_trampolines[sig_index];
1572 let anyfunc = VMCallerCheckedAnyfunc {
1573 func_ptr: func_ptr.0,
1574 type_index,
1575 vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1576 call_trampoline,
1577 };
1578 func_refs.push(anyfunc);
1579 }
1580 (
1581 func_refs.into_boxed_slice(),
1582 imported_func_refs.into_boxed_slice(),
1583 )
1584}