1mod allocator;
11mod r#ref;
12
13pub use allocator::InstanceAllocator;
14pub use r#ref::{InstanceRef, WeakInstanceRef, WeakOrStrongInstanceRef};
15
16use crate::func_data_registry::VMFuncRef;
17use crate::global::Global;
18use crate::imports::Imports;
19use crate::memory::{Memory, MemoryError};
20use crate::sig_registry::VMSharedSignatureIndex;
21use crate::table::{Table, TableElement};
22use crate::trap::traphandlers::get_trap_handler;
23use crate::trap::{catch_traps, Trap, TrapCode};
24use crate::vmcontext::{
25 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody,
26 VMFunctionEnvironment, VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport,
27 VMLocalFunction, VMMemoryDefinition, VMMemoryImport, VMTableDefinition, VMTableImport,
28};
29use crate::{wasmer_call_trampoline, Artifact, VMOffsets, VMTrampoline};
30use crate::{VMExtern, VMFunction, VMGlobal};
31use memoffset::offset_of;
32use more_asserts::assert_lt;
33use std::any::Any;
34use std::cell::RefCell;
35use std::collections::BTreeMap;
36use std::convert::TryFrom;
37use std::ffi;
38use std::fmt;
39use std::mem;
40use std::ptr::{self, NonNull};
41use std::slice;
42use std::sync::Arc;
43use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
44use wasmer_types::{
45 DataIndex, DataInitializer, ElemIndex, ExportIndex, FastGasCounter, FunctionIndex, GlobalIndex,
46 GlobalInit, InstanceConfig, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryIndex,
47 OwnedTableInitializer, Pages, TableIndex,
48};
49
50pub type ImportInitializerFuncPtr<ResultErr = *mut ffi::c_void> =
53 fn(*mut ffi::c_void, *const ffi::c_void) -> Result<(), ResultErr>;
54
55#[repr(C)]
62pub(crate) struct Instance {
63 pub(crate) artifact: Arc<dyn Artifact>,
64
65 config: InstanceConfig,
67
68 memories: BoxedSlice<LocalMemoryIndex, Arc<dyn Memory>>,
70
71 tables: BoxedSlice<LocalTableIndex, Arc<dyn Table>>,
73
74 globals: BoxedSlice<LocalGlobalIndex, Arc<Global>>,
76
77 passive_elements: RefCell<BTreeMap<ElemIndex, Box<[VMFuncRef]>>>,
80
81 passive_data: RefCell<BTreeMap<DataIndex, Arc<[u8]>>>,
84
85 funcrefs: BoxedSlice<FunctionIndex, VMCallerCheckedAnyfunc>,
89
90 host_state: Box<dyn Any>,
92
93 imported_function_envs: BoxedSlice<FunctionIndex, ImportFunctionEnv>,
99
100 vmctx: VMContext,
105}
106
107#[derive(Debug)]
109pub enum ImportFunctionEnv {
110 NoEnv,
113 Env {
120 env: *mut ffi::c_void,
123
124 clone: fn(*mut ffi::c_void) -> *mut ffi::c_void,
126 initializer: Option<ImportInitializerFuncPtr>,
130 destructor: unsafe fn(*mut ffi::c_void),
136 },
137}
138
139impl Clone for ImportFunctionEnv {
140 fn clone(&self) -> Self {
141 match &self {
142 Self::NoEnv => Self::NoEnv,
143 Self::Env {
144 env,
145 clone,
146 destructor,
147 initializer,
148 } => {
149 let new_env = (*clone)(*env);
150 Self::Env {
151 env: new_env,
152 clone: *clone,
153 destructor: *destructor,
154 initializer: *initializer,
155 }
156 }
157 }
158 }
159}
160
161impl Drop for ImportFunctionEnv {
162 fn drop(&mut self) {
163 match self {
164 Self::Env {
165 env, destructor, ..
166 } => {
167 unsafe {
171 (destructor)(*env);
172 }
173 }
174 Self::NoEnv => (),
175 }
176 }
177}
178
179impl fmt::Debug for Instance {
180 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
181 formatter.debug_struct("Instance").finish()
182 }
183}
184
185#[allow(clippy::cast_ptr_alignment)]
186impl Instance {
187 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
190 (self.vmctx_ptr() as *mut u8)
191 .add(usize::try_from(offset).unwrap())
192 .cast()
193 }
194
195 fn offsets(&self) -> &VMOffsets {
197 self.artifact.offsets()
198 }
199
200 fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
202 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_signature_ids_begin()) }
203 }
204
205 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
207 let index = usize::try_from(index.as_u32()).unwrap();
208 unsafe { &*self.imported_functions_ptr().add(index) }
209 }
210
211 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
213 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_functions_begin()) }
214 }
215
216 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
218 let index = usize::try_from(index.as_u32()).unwrap();
219 unsafe { &*self.imported_tables_ptr().add(index) }
220 }
221
222 fn imported_tables_ptr(&self) -> *mut VMTableImport {
224 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_tables_begin()) }
225 }
226
227 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
229 let index = usize::try_from(index.as_u32()).unwrap();
230 let addr = unsafe { self.imported_memories_ptr().add(index) };
231 let align = std::mem::align_of::<VMMemoryImport>();
232 debug_assert!(
233 addr as usize % align == 0,
234 "VMMemoryImport addr is not aligned to {}: {:p}",
235 align,
236 addr
237 );
238 unsafe { &*addr }
239 }
240
241 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
243 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_memories_begin()) }
244 }
245
246 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
248 let index = usize::try_from(index.as_u32()).unwrap();
249 unsafe { &*self.imported_globals_ptr().add(index) }
250 }
251
252 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
254 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_globals_begin()) }
255 }
256
257 #[allow(unused)]
259 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
260 unsafe { *self.table_ptr(index).as_ref() }
261 }
262
263 #[allow(unused)]
265 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
266 unsafe {
267 *self.table_ptr(index).as_ptr() = *table;
268 }
269 }
270
271 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
273 let index = usize::try_from(index.as_u32()).unwrap();
274 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
275 }
276
277 fn tables_ptr(&self) -> *mut VMTableDefinition {
279 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_tables_begin()) }
280 }
281
282 fn memory_definition(&self, index: MemoryIndex) -> &VMMemoryDefinition {
284 match self.artifact.import_counts().local_memory_index(index) {
285 Ok(local) => unsafe { self.memory_ptr(local).as_ref() },
286 Err(import) => unsafe { &self.imported_memory(import).from.vmmemory().as_ref() },
287 }
288 }
289
290 #[allow(dead_code)]
291 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
293 unsafe {
294 *self.memory_ptr(index).as_ptr() = *mem;
295 }
296 }
297
298 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
300 let index = usize::try_from(index.as_u32()).unwrap();
301 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
302 }
303
304 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
306 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_memories_begin()) }
307 }
308
309 fn global(&self, index: GlobalIndex) -> &VMGlobalDefinition {
311 match self.artifact.import_counts().local_global_index(index) {
312 Ok(local) => unsafe { self.global_ptr(local).as_ref() },
313 Err(import) => unsafe { self.imported_global(import).definition.as_ref() },
314 }
315 }
316
317 #[allow(dead_code)]
319 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
320 unsafe {
321 *self.global_ptr(index).as_ptr() = global.clone();
322 }
323 }
324
325 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
327 let index = usize::try_from(index.as_u32()).unwrap();
328 NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
330 }
331
332 fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
334 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_globals_begin()) }
335 }
336
337 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
339 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_builtin_functions_begin()) }
340 }
341
342 fn vmctx(&self) -> &VMContext {
344 &self.vmctx
345 }
346
347 fn vmctx_ptr(&self) -> *mut VMContext {
349 self.vmctx() as *const VMContext as *mut VMContext
350 }
351
352 #[inline]
354 pub fn host_state(&self) -> &dyn Any {
355 &*self.host_state
356 }
357
358 fn trap_catcher_ptr(&self) -> *mut *const u8 {
360 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_trap_handler()) }
361 }
362
363 pub fn gas_counter_ptr(&self) -> *mut *const FastGasCounter {
365 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_gas_limiter_pointer()) }
366 }
367
368 pub fn stack_limit_initial_ptr(&self) -> *mut i32 {
370 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_stack_limit_initial_begin()) }
371 }
372
373 pub fn stack_limit_ptr(&self) -> *mut i32 {
375 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_stack_limit_begin()) }
376 }
377
378 fn invoke_start_function(&self) -> Result<(), Trap> {
380 let start_index = match self.artifact.start_function() {
381 Some(idx) => idx,
382 None => return Ok(()),
383 };
384 let start_funcref = self.funcrefs[start_index];
385 self.reset_stack_meter();
387 let result = unsafe {
388 catch_traps(|| {
389 mem::transmute::<*const VMFunctionBody, unsafe extern "C" fn(VMFunctionEnvironment)>(
390 start_funcref.func_ptr,
391 )(start_funcref.vmctx)
392 })
393 };
394 result
395 }
396
397 fn reset_stack_meter(&self) {
398 unsafe {
399 *(self.stack_limit_ptr()) = *(self.stack_limit_initial_ptr());
400 }
401 }
402
403 #[inline]
405 pub(crate) fn vmctx_offset() -> isize {
406 offset_of!(Self, vmctx) as isize
407 }
408
409 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
411 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
412 let end: *const VMTableDefinition = table;
413 let index = LocalTableIndex::new(
415 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
416 );
417 assert_lt!(index.index(), self.tables.len());
418 index
419 }
420
421 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
423 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
424 let end: *const VMMemoryDefinition = memory;
425 let index = LocalMemoryIndex::new(
427 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
428 );
429 assert_lt!(index.index(), self.memories.len());
430 index
431 }
432
433 pub(crate) fn memory_grow<IntoPages>(
438 &self,
439 memory_index: LocalMemoryIndex,
440 delta: IntoPages,
441 ) -> Result<Pages, MemoryError>
442 where
443 IntoPages: Into<Pages>,
444 {
445 let mem = self
446 .memories
447 .get(memory_index)
448 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
449 mem.grow(delta.into())
450 }
451
452 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
461 &self,
462 memory_index: MemoryIndex,
463 delta: IntoPages,
464 ) -> Result<Pages, MemoryError>
465 where
466 IntoPages: Into<Pages>,
467 {
468 let import = self.imported_memory(memory_index);
469 import.from.grow(delta.into())
470 }
471
472 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
474 self.memories
475 .get(memory_index)
476 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()))
477 .size()
478 }
479
480 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
486 self.imported_memory(memory_index).from.size()
487 }
488
489 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
491 self.tables[table_index].size()
492 }
493
494 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
499 self.imported_table(table_index).from.size()
500 }
501
502 pub(crate) fn table_grow(
507 &self,
508 table_index: LocalTableIndex,
509 delta: u32,
510 init_value: TableElement,
511 ) -> Option<u32> {
512 let result = self
513 .tables
514 .get(table_index)
515 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
516 .grow(delta, init_value);
517
518 result
519 }
520
521 pub(crate) unsafe fn imported_table_grow(
526 &self,
527 table_index: TableIndex,
528 delta: u32,
529 init_value: TableElement,
530 ) -> Option<u32> {
531 let import = self.imported_table(table_index);
532 import.from.grow(delta, init_value)
533 }
534
535 pub(crate) fn table_get(
537 &self,
538 table_index: LocalTableIndex,
539 index: u32,
540 ) -> Option<TableElement> {
541 self.tables
542 .get(table_index)
543 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
544 .get(index)
545 }
546
547 pub(crate) unsafe fn imported_table_get(
552 &self,
553 table_index: TableIndex,
554 index: u32,
555 ) -> Option<TableElement> {
556 let import = self.imported_table(table_index);
557 import.from.get(index)
558 }
559
560 pub(crate) fn table_set(
562 &self,
563 table_index: LocalTableIndex,
564 index: u32,
565 val: TableElement,
566 ) -> Result<(), Trap> {
567 self.tables
568 .get(table_index)
569 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
570 .set(index, val)
571 }
572
573 pub(crate) unsafe fn imported_table_set(
578 &self,
579 table_index: TableIndex,
580 index: u32,
581 val: TableElement,
582 ) -> Result<(), Trap> {
583 let import = self.imported_table(table_index);
584 import.from.set(index, val)
585 }
586
587 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
588 Some(self.get_vm_funcref(function_index))
589 }
590
591 fn get_vm_funcref(&self, index: FunctionIndex) -> VMFuncRef {
593 if index == FunctionIndex::reserved_value() {
594 return VMFuncRef::null();
595 }
596 VMFuncRef(&self.funcrefs[index])
597 }
598
599 pub(crate) fn table_init(
607 &self,
608 table_index: TableIndex,
609 elem_index: ElemIndex,
610 dst: u32,
611 src: u32,
612 len: u32,
613 ) -> Result<(), Trap> {
614 let table = self.get_table(table_index);
617 let passive_elements = self.passive_elements.borrow();
618 let elem = passive_elements
619 .get(&elem_index)
620 .map_or::<&[VMFuncRef], _>(&[], |e| &**e);
621
622 if src
623 .checked_add(len)
624 .map_or(true, |n| n as usize > elem.len())
625 || dst.checked_add(len).map_or(true, |m| m > table.size())
626 {
627 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
628 }
629
630 for (dst, src) in (dst..dst + len).zip(src..src + len) {
631 table
632 .set(dst, TableElement::FuncRef(elem[src as usize]))
633 .expect("should never panic because we already did the bounds check above");
634 }
635
636 Ok(())
637 }
638
639 pub(crate) fn table_fill(
645 &self,
646 table_index: TableIndex,
647 start_index: u32,
648 item: TableElement,
649 len: u32,
650 ) -> Result<(), Trap> {
651 let table = self.get_table(table_index);
654 let table_size = table.size() as usize;
655
656 if start_index
657 .checked_add(len)
658 .map_or(true, |n| n as usize > table_size)
659 {
660 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
661 }
662
663 for i in start_index..(start_index + len) {
664 table
665 .set(i, item.clone())
666 .expect("should never panic because we already did the bounds check above");
667 }
668
669 Ok(())
670 }
671
672 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
674 let mut passive_elements = self.passive_elements.borrow_mut();
677 passive_elements.remove(&elem_index);
678 }
681
682 pub(crate) fn local_memory_copy(
689 &self,
690 memory_index: LocalMemoryIndex,
691 dst: u32,
692 src: u32,
693 len: u32,
694 ) -> Result<(), Trap> {
695 let memory = unsafe { self.memory_ptr(memory_index).as_ref() };
697 unsafe { memory.memory_copy(dst, src, len) }
699 }
700
701 pub(crate) fn imported_memory_copy(
703 &self,
704 memory_index: MemoryIndex,
705 dst: u32,
706 src: u32,
707 len: u32,
708 ) -> Result<(), Trap> {
709 let import = self.imported_memory(memory_index);
710 unsafe { import.from.vmmemory().as_ref().memory_copy(dst, src, len) }
712 }
713
714 pub(crate) fn local_memory_fill(
720 &self,
721 memory_index: LocalMemoryIndex,
722 dst: u32,
723 val: u32,
724 len: u32,
725 ) -> Result<(), Trap> {
726 let memory = unsafe { self.memory_ptr(memory_index).as_ref() };
727 unsafe { memory.memory_fill(dst, val, len) }
729 }
730
731 pub(crate) fn imported_memory_fill(
737 &self,
738 memory_index: MemoryIndex,
739 dst: u32,
740 val: u32,
741 len: u32,
742 ) -> Result<(), Trap> {
743 let import = self.imported_memory(memory_index);
744 unsafe { import.from.vmmemory().as_ref().memory_fill(dst, val, len) }
746 }
747
748 pub(crate) fn memory_init(
756 &self,
757 memory_index: MemoryIndex,
758 data_index: DataIndex,
759 dst: u32,
760 src: u32,
761 len: u32,
762 ) -> Result<(), Trap> {
763 let memory = self.memory_definition(memory_index);
766 let passive_data = self.passive_data.borrow();
767 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
768
769 let oob_access = src
770 .checked_add(len)
771 .map_or(true, |n| n as usize > data.len())
772 || dst.checked_add(len).map_or(true, |m| {
773 usize::try_from(m).unwrap() > memory.current_length
774 });
775
776 if oob_access {
777 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
778 }
779 let src_slice = &data[src as usize..(src + len) as usize];
780 unsafe {
781 let dst_start = memory.base.add(dst as usize);
782 let dst_slice = slice::from_raw_parts_mut(dst_start, len as usize);
783 dst_slice.copy_from_slice(src_slice);
784 }
785 Ok(())
786 }
787
788 pub(crate) fn data_drop(&self, data_index: DataIndex) {
790 let mut passive_data = self.passive_data.borrow_mut();
791 passive_data.remove(&data_index);
792 }
793
794 pub(crate) fn get_table(&self, table_index: TableIndex) -> &dyn Table {
797 match self.artifact.import_counts().local_table_index(table_index) {
798 Ok(local) => self.get_local_table(local),
799 Err(import) => self.get_foreign_table(import),
800 }
801 }
802
803 pub(crate) fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table {
805 self.tables[index].as_ref()
806 }
807
808 pub(crate) fn get_foreign_table(&self, index: TableIndex) -> &dyn Table {
810 let import = self.imported_table(index);
811 &*import.from
812 }
813}
814
815#[derive(Debug, PartialEq)]
821pub struct InstanceHandle {
822 instance: InstanceRef,
824}
825
826impl InstanceHandle {
827 #[allow(clippy::too_many_arguments)]
850 pub unsafe fn new(
851 artifact: Arc<dyn Artifact>,
852 allocator: InstanceAllocator,
853 finished_memories: BoxedSlice<LocalMemoryIndex, Arc<dyn Memory>>,
854 finished_tables: BoxedSlice<LocalTableIndex, Arc<dyn Table>>,
855 finished_globals: BoxedSlice<LocalGlobalIndex, Arc<Global>>,
856 imports: Imports,
857 passive_data: BTreeMap<DataIndex, Arc<[u8]>>,
858 host_state: Box<dyn Any>,
859 imported_function_envs: BoxedSlice<FunctionIndex, ImportFunctionEnv>,
860 instance_config: InstanceConfig,
861 ) -> Self {
862 let vmctx_globals = finished_globals
863 .values()
864 .map(|m| m.vmglobal())
865 .collect::<PrimaryMap<LocalGlobalIndex, _>>()
866 .into_boxed_slice();
867 let passive_data = RefCell::new(passive_data);
868
869 let handle = {
870 let funcrefs = PrimaryMap::new().into_boxed_slice();
872 let instance = Instance {
874 artifact,
875 config: instance_config.clone(),
876 memories: finished_memories,
877 tables: finished_tables,
878 globals: finished_globals,
879 passive_elements: Default::default(),
880 passive_data,
881 host_state,
882 funcrefs,
883 imported_function_envs,
884 vmctx: VMContext {},
885 };
886
887 let mut instance_ref = allocator.write_instance(instance);
888
889 {
891 let instance = instance_ref.as_mut().unwrap();
892 let vmctx_ptr = instance.vmctx_ptr();
893 instance.funcrefs = build_funcrefs(
894 &imports,
895 instance.artifact.functions().iter().map(|(_, f)| f),
896 vmctx_ptr,
897 );
898 *(instance.trap_catcher_ptr()) = get_trap_handler();
899 *(instance.gas_counter_ptr()) = instance_config.gas_counter;
900 *(instance.stack_limit_ptr()) = instance_config.stack_limit;
901 *(instance.stack_limit_initial_ptr()) = instance_config.stack_limit;
902 }
903
904 Self {
905 instance: instance_ref,
906 }
907 };
908 let instance = handle.instance().as_ref();
909
910 ptr::copy(
911 instance.artifact.signatures().as_ptr(),
912 instance.signature_ids_ptr() as *mut VMSharedSignatureIndex,
913 instance.artifact.signatures().len(),
914 );
915
916 ptr::copy(
917 imports.functions.values().as_slice().as_ptr(),
918 instance.imported_functions_ptr() as *mut VMFunctionImport,
919 imports.functions.len(),
920 );
921 ptr::copy(
922 imports.tables.values().as_slice().as_ptr(),
923 instance.imported_tables_ptr() as *mut VMTableImport,
924 imports.tables.len(),
925 );
926 ptr::copy(
927 imports.memories.values().as_slice().as_ptr(),
928 instance.imported_memories_ptr() as *mut VMMemoryImport,
929 imports.memories.len(),
930 );
931 ptr::copy(
932 imports.globals.values().as_slice().as_ptr(),
933 instance.imported_globals_ptr() as *mut VMGlobalImport,
934 imports.globals.len(),
935 );
936 ptr::copy(
940 vmctx_globals.values().as_slice().as_ptr(),
941 instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
942 vmctx_globals.len(),
943 );
944 ptr::write(
945 instance.builtin_functions_ptr() as *mut VMBuiltinFunctionsArray,
946 VMBuiltinFunctionsArray::initialized(),
947 );
948
949 initialize_passive_elements(instance);
952 initialize_globals(instance);
953 handle
954 }
955
956 pub(crate) fn instance(&self) -> &InstanceRef {
958 &self.instance
959 }
960
961 pub unsafe fn finish_instantiation(&self) -> Result<(), Trap> {
967 let instance = self.instance().as_ref();
968
969 initialize_tables(instance)?;
971 initialize_memories(
972 instance,
973 instance.artifact.data_segments().iter().map(Into::into),
974 )?;
975
976 instance.invoke_start_function()?;
979 Ok(())
980 }
981
982 pub unsafe fn invoke_function(
984 &self,
985 vmctx: VMFunctionEnvironment,
986 trampoline: VMTrampoline,
987 callee: *const VMFunctionBody,
988 values_vec: *mut u8,
989 ) -> Result<(), Trap> {
990 {
992 let instance = self.instance().as_ref();
993 instance.reset_stack_meter();
994 }
995 wasmer_call_trampoline(vmctx, trampoline, callee, values_vec)
996 }
997
998 pub fn vmctx(&self) -> &VMContext {
1000 self.instance().as_ref().vmctx()
1001 }
1002
1003 pub fn vmctx_ptr(&self) -> *mut VMContext {
1005 self.instance().as_ref().vmctx_ptr()
1006 }
1007
1008 pub fn vmoffsets(&self) -> &VMOffsets {
1012 self.instance().as_ref().offsets()
1013 }
1014
1015 pub fn function_by_index(&self, idx: FunctionIndex) -> Option<VMFunction> {
1017 let instance = self.instance.as_ref();
1018
1019 let (address, signature, vmctx, call_trampoline) =
1020 match instance.artifact.import_counts().local_function_index(idx) {
1021 Ok(local) => {
1022 let func = instance.artifact.functions().get(local)?;
1023 (
1024 *(func.body),
1025 func.signature,
1026 VMFunctionEnvironment {
1027 vmctx: instance.vmctx_ptr(),
1028 },
1029 Some(func.trampoline),
1030 )
1031 }
1032 Err(import) => {
1033 let import = instance.imported_function(import);
1034 (
1035 *(import.body),
1036 import.signature,
1037 import.environment,
1038 import.trampoline,
1039 )
1040 }
1041 };
1042 Some(VMFunction {
1043 kind: VMFunctionKind::Static,
1048 address,
1049 signature,
1050 vmctx,
1051 call_trampoline,
1052 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1053 })
1054 }
1055
1056 fn memory_by_index(&self, index: MemoryIndex) -> Option<crate::VMMemory> {
1058 let instance = self.instance.as_ref();
1059 let from = match instance.artifact.import_counts().local_memory_index(index) {
1060 Ok(local) => Arc::clone(&instance.memories[local]),
1061 Err(import) => Arc::clone(&instance.imported_memory(import).from),
1062 };
1063 Some(crate::VMMemory {
1064 from,
1065 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1066 })
1067 }
1068
1069 fn table_by_index(&self, index: TableIndex) -> Option<crate::VMTable> {
1071 let instance = self.instance.as_ref();
1072 let from = match instance.artifact.import_counts().local_table_index(index) {
1073 Ok(local) => Arc::clone(&instance.tables[local]),
1074 Err(import) => Arc::clone(&instance.imported_table(import).from),
1075 };
1076 Some(crate::VMTable {
1077 from,
1078 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1079 })
1080 }
1081
1082 pub fn global_by_index(&self, index: GlobalIndex) -> Option<VMGlobal> {
1084 let instance = self.instance.as_ref();
1085 let from = match instance.artifact.import_counts().local_global_index(index) {
1086 Ok(local) => Arc::clone(&instance.globals[local]),
1087 Err(import) => Arc::clone(&instance.imported_global(import).from),
1088 };
1089 Some(crate::VMGlobal {
1090 from,
1091 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1092 })
1093 }
1094
1095 pub fn lookup(&self, field: &str) -> Option<VMExtern> {
1097 let instance = self.instance.as_ref();
1098 Some(match instance.artifact.export_field(field)? {
1099 ExportIndex::Function(idx) => VMExtern::Function(self.function_by_index(idx)?),
1100 ExportIndex::Table(idx) => VMExtern::Table(self.table_by_index(idx)?),
1101 ExportIndex::Global(idx) => VMExtern::Global(self.global_by_index(idx)?),
1102 ExportIndex::Memory(idx) => VMExtern::Memory(self.memory_by_index(idx)?),
1103 })
1104 }
1105
1106 pub fn host_state(&self) -> &dyn Any {
1108 self.instance().as_ref().host_state()
1109 }
1110
1111 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1113 self.instance().as_ref().memory_index(memory)
1114 }
1115
1116 pub fn memory_grow<IntoPages>(
1121 &self,
1122 memory_index: LocalMemoryIndex,
1123 delta: IntoPages,
1124 ) -> Result<Pages, MemoryError>
1125 where
1126 IntoPages: Into<Pages>,
1127 {
1128 self.instance().as_ref().memory_grow(memory_index, delta)
1129 }
1130
1131 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1133 self.instance().as_ref().table_index(table)
1134 }
1135
1136 pub fn table_grow(
1141 &self,
1142 table_index: LocalTableIndex,
1143 delta: u32,
1144 init_value: TableElement,
1145 ) -> Option<u32> {
1146 self.instance()
1147 .as_ref()
1148 .table_grow(table_index, delta, init_value)
1149 }
1150
1151 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1155 self.instance().as_ref().table_get(table_index, index)
1156 }
1157
1158 pub fn table_set(
1162 &self,
1163 table_index: LocalTableIndex,
1164 index: u32,
1165 val: TableElement,
1166 ) -> Result<(), Trap> {
1167 self.instance().as_ref().table_set(table_index, index, val)
1168 }
1169
1170 pub fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table {
1172 self.instance().as_ref().get_local_table(index)
1173 }
1174}
1175
1176pub unsafe fn initialize_host_envs<Err: Sized>(
1184 handle: &std::sync::Mutex<InstanceHandle>,
1185 instance_ptr: *const ffi::c_void,
1186) -> Result<(), Err> {
1187 let initializers = {
1188 let mut instance_lock = handle.lock().unwrap();
1189 let instance_ref = instance_lock.instance.as_mut_unchecked();
1190 let mut initializers = vec![];
1191 for import_function_env in instance_ref.imported_function_envs.values_mut() {
1192 match import_function_env {
1193 ImportFunctionEnv::Env {
1194 env,
1195 ref mut initializer,
1196 ..
1197 } => {
1198 if let Some(init) = initializer.take() {
1199 initializers.push((init, *env));
1200 }
1201 }
1202 ImportFunctionEnv::NoEnv => (),
1203 }
1204 }
1205 initializers
1206 };
1207 for (init, env) in initializers {
1208 let f = mem::transmute::<&ImportInitializerFuncPtr, &ImportInitializerFuncPtr<Err>>(&init);
1209 f(env, instance_ptr)?;
1210 }
1211 Ok(())
1212}
1213
1214fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1216 let mut start = init.location.offset;
1217 if let Some(base) = init.location.base {
1218 let val = instance.global(base).to_u32();
1219 start += usize::try_from(val).unwrap();
1220 }
1221 start
1222}
1223
1224#[allow(clippy::mut_from_ref)]
1225unsafe fn get_memory_slice<'instance>(
1227 init: &DataInitializer<'_>,
1228 instance: &'instance Instance,
1229) -> &'instance mut [u8] {
1230 let memory = instance.memory_definition(init.location.memory_index);
1231 slice::from_raw_parts_mut(memory.base, memory.current_length)
1232}
1233
1234fn get_table_init_start(init: &OwnedTableInitializer, instance: &Instance) -> usize {
1236 let mut start = init.offset;
1237 if let Some(base) = init.base {
1238 let val = instance.global(base).to_u32();
1239 start += usize::try_from(val).unwrap();
1240 }
1241 start
1242}
1243
1244fn initialize_tables(instance: &Instance) -> Result<(), Trap> {
1246 for init in instance.artifact.element_segments() {
1247 let start = get_table_init_start(init, instance);
1248 let table = instance.get_table(init.table_index);
1249
1250 if start
1251 .checked_add(init.elements.len())
1252 .map_or(true, |end| end > table.size() as usize)
1253 {
1254 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1255 }
1256
1257 for (i, func_idx) in init.elements.iter().enumerate() {
1258 let anyfunc = instance.get_vm_funcref(*func_idx);
1259 table
1260 .set(
1261 u32::try_from(start + i).unwrap(),
1262 TableElement::FuncRef(anyfunc),
1263 )
1264 .unwrap();
1265 }
1266 }
1267
1268 Ok(())
1269}
1270
1271fn initialize_passive_elements(instance: &Instance) {
1275 let mut passive_elements = instance.passive_elements.borrow_mut();
1276 debug_assert!(
1277 passive_elements.is_empty(),
1278 "should only be called once, at initialization time"
1279 );
1280
1281 passive_elements.extend(
1282 instance
1283 .artifact
1284 .passive_elements()
1285 .iter()
1286 .filter(|(_, segments)| !segments.is_empty())
1287 .map(|(idx, segments)| {
1288 (
1289 *idx,
1290 segments
1291 .iter()
1292 .map(|s| instance.get_vm_funcref(*s))
1293 .collect(),
1294 )
1295 }),
1296 );
1297}
1298
1299fn initialize_memories<'a>(
1301 instance: &Instance,
1302 data_initializers: impl Iterator<Item = DataInitializer<'a>>,
1303) -> Result<(), Trap> {
1304 for init in data_initializers {
1305 let memory = instance.memory_definition(init.location.memory_index);
1306
1307 let start = get_memory_init_start(&init, instance);
1308 if start
1309 .checked_add(init.data.len())
1310 .map_or(true, |end| end > memory.current_length)
1311 {
1312 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1313 }
1314
1315 unsafe {
1316 let mem_slice = get_memory_slice(&init, instance);
1317 let end = start + init.data.len();
1318 let to_init = &mut mem_slice[start..end];
1319 to_init.copy_from_slice(init.data);
1320 }
1321 }
1322
1323 Ok(())
1324}
1325
1326fn initialize_globals(instance: &Instance) {
1327 for (index, (_, initializer)) in instance.artifact.globals().iter().enumerate() {
1328 unsafe {
1329 let to = instance.global_ptr(LocalGlobalIndex::new(index)).as_ptr();
1330 match initializer {
1331 GlobalInit::I32Const(x) => *(*to).as_i32_mut() = *x,
1332 GlobalInit::I64Const(x) => *(*to).as_i64_mut() = *x,
1333 GlobalInit::F32Const(x) => *(*to).as_f32_mut() = *x,
1334 GlobalInit::F64Const(x) => *(*to).as_f64_mut() = *x,
1335 GlobalInit::V128Const(x) => *(*to).as_bytes_mut() = *x.bytes(),
1336 GlobalInit::GetGlobal(x) => *to = instance.global(*x).clone(),
1337 GlobalInit::RefNullConst => *(*to).as_funcref_mut() = VMFuncRef::null(),
1338 GlobalInit::RefFunc(func_idx) => {
1339 let funcref = instance.func_ref(*func_idx).unwrap();
1340 *(*to).as_funcref_mut() = funcref;
1341 }
1342 }
1343 }
1344 }
1345}
1346
1347pub fn build_funcrefs<'a>(
1350 imports: &Imports,
1351 finished_functions: impl ExactSizeIterator<Item = &'a VMLocalFunction>,
1352 vmctx_ptr: *mut VMContext,
1354) -> BoxedSlice<FunctionIndex, VMCallerCheckedAnyfunc> {
1355 let mut func_refs =
1356 PrimaryMap::with_capacity(imports.functions.len() + finished_functions.len());
1357 for (_, import) in imports.functions.iter() {
1358 let anyfunc = VMCallerCheckedAnyfunc {
1359 func_ptr: *(import.body),
1360 type_index: import.signature,
1361 vmctx: import.environment,
1362 };
1363 func_refs.push(anyfunc);
1364 }
1365 for function in finished_functions {
1367 let anyfunc = VMCallerCheckedAnyfunc {
1368 func_ptr: *(function.body),
1369 type_index: function.signature,
1370 vmctx: VMFunctionEnvironment { vmctx: vmctx_ptr },
1371 };
1372 func_refs.push(anyfunc);
1373 }
1374 func_refs.into_boxed_slice()
1375}