wasmer_vm/instance/
mod.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/master/ATTRIBUTIONS.md
3
4//! An `Instance` contains all the runtime state used by execution of
5//! a WebAssembly module (except its callstack and register state). An
6//! `InstanceRef` is a wrapper around `Instance` that manages
7//! how it is allocated and deallocated. An `InstanceHandle` is a
8//! wrapper around an `InstanceRef`.
9
10mod allocator;
11mod r#ref;
12
13pub use allocator::InstanceAllocator;
14pub use r#ref::{InstanceRef, WeakInstanceRef, WeakOrStrongInstanceRef};
15
16use crate::func_data_registry::VMFuncRef;
17use crate::global::Global;
18use crate::imports::Imports;
19use crate::memory::{Memory, MemoryError};
20use crate::sig_registry::VMSharedSignatureIndex;
21use crate::table::{Table, TableElement};
22use crate::trap::traphandlers::get_trap_handler;
23use crate::trap::{catch_traps, Trap, TrapCode};
24use crate::vmcontext::{
25    VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody,
26    VMFunctionEnvironment, VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport,
27    VMLocalFunction, VMMemoryDefinition, VMMemoryImport, VMTableDefinition, VMTableImport,
28};
29use crate::{wasmer_call_trampoline, Artifact, VMOffsets, VMTrampoline};
30use crate::{VMExtern, VMFunction, VMGlobal};
31use memoffset::offset_of;
32use more_asserts::assert_lt;
33use std::any::Any;
34use std::cell::RefCell;
35use std::collections::BTreeMap;
36use std::convert::TryFrom;
37use std::ffi;
38use std::fmt;
39use std::mem;
40use std::ptr::{self, NonNull};
41use std::slice;
42use std::sync::Arc;
43use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
44use wasmer_types::{
45    DataIndex, DataInitializer, ElemIndex, ExportIndex, FastGasCounter, FunctionIndex, GlobalIndex,
46    GlobalInit, InstanceConfig, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryIndex,
47    OwnedTableInitializer, Pages, TableIndex,
48};
49
50/// The function pointer to call with data and an [`Instance`] pointer to
51/// finish initializing the host env.
52pub type ImportInitializerFuncPtr<ResultErr = *mut ffi::c_void> =
53    fn(*mut ffi::c_void, *const ffi::c_void) -> Result<(), ResultErr>;
54
55/// A WebAssembly instance.
56///
57/// The type is dynamically-sized. Indeed, the `vmctx` field can
58/// contain various data. That's why the type has a C representation
59/// to ensure that the `vmctx` field is last. See the documentation of
60/// the `vmctx` field to learn more.
61#[repr(C)]
62pub(crate) struct Instance {
63    pub(crate) artifact: Arc<dyn Artifact>,
64
65    /// External configuration for instance.
66    config: InstanceConfig,
67
68    /// WebAssembly linear memory data.
69    memories: BoxedSlice<LocalMemoryIndex, Arc<dyn Memory>>,
70
71    /// Table data...
72    tables: BoxedSlice<LocalTableIndex, Arc<dyn Table>>,
73
74    /// WebAssembly global data.
75    globals: BoxedSlice<LocalGlobalIndex, Arc<Global>>,
76
77    /// Passive elements in this instantiation. As `elem.drop`s happen, these
78    /// entries get removed.
79    passive_elements: RefCell<BTreeMap<ElemIndex, Box<[VMFuncRef]>>>,
80
81    /// Passive data segments from our module. As `data.drop`s happen, entries
82    /// get removed. A missing entry is considered equivalent to an empty slice.
83    passive_data: RefCell<BTreeMap<DataIndex, Arc<[u8]>>>,
84
85    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
86    /// will point to elements here for functions defined or imported by this
87    /// instance.
88    funcrefs: BoxedSlice<FunctionIndex, VMCallerCheckedAnyfunc>,
89
90    /// Hosts can store arbitrary per-instance information here.
91    host_state: Box<dyn Any>,
92
93    /// Functions to operate on host environments in the imports
94    /// and pointers to the environments.
95    ///
96    /// TODO: Be sure to test with serialize/deserialize and imported
97    /// functions from other Wasm modules.
98    imported_function_envs: BoxedSlice<FunctionIndex, ImportFunctionEnv>,
99
100    /// Additional context used by compiled WebAssembly code. This
101    /// field is last, and represents a dynamically-sized array that
102    /// extends beyond the nominal end of the struct (similar to a
103    /// flexible array member).
104    vmctx: VMContext,
105}
106
107/// A collection of data about host envs used by imported functions.
108#[derive(Debug)]
109pub enum ImportFunctionEnv {
110    /// The `vmctx` pointer does not refer to a host env, there is no
111    /// metadata about it.
112    NoEnv,
113    /// We're dealing with a user-defined host env.
114    ///
115    /// This host env may be either unwrapped (the user-supplied host env
116    /// directly) or wrapped. i.e. in the case of Dynamic functions, we
117    /// store our own extra data along with the user supplied env,
118    /// thus the `env` pointer here points to the outermost type.
119    Env {
120        /// The function environment. This is not always the user-supplied
121        /// env.
122        env: *mut ffi::c_void,
123
124        /// A clone function for duplicating the env.
125        clone: fn(*mut ffi::c_void) -> *mut ffi::c_void,
126        /// This field is not always present. When it is present, it
127        /// should be set to `None` after use to prevent double
128        /// initialization.
129        initializer: Option<ImportInitializerFuncPtr>,
130        /// The destructor to clean up the type in `env`.
131        ///
132        /// # Safety
133        /// - This function must be called ina synchronized way. For
134        ///   example, in the `Drop` implementation of this type.
135        destructor: unsafe fn(*mut ffi::c_void),
136    },
137}
138
139impl Clone for ImportFunctionEnv {
140    fn clone(&self) -> Self {
141        match &self {
142            Self::NoEnv => Self::NoEnv,
143            Self::Env {
144                env,
145                clone,
146                destructor,
147                initializer,
148            } => {
149                let new_env = (*clone)(*env);
150                Self::Env {
151                    env: new_env,
152                    clone: *clone,
153                    destructor: *destructor,
154                    initializer: *initializer,
155                }
156            }
157        }
158    }
159}
160
161impl Drop for ImportFunctionEnv {
162    fn drop(&mut self) {
163        match self {
164            Self::Env {
165                env, destructor, ..
166            } => {
167                // # Safety
168                // - This is correct because we know no other references
169                //   to this data can exist if we're dropping it.
170                unsafe {
171                    (destructor)(*env);
172                }
173            }
174            Self::NoEnv => (),
175        }
176    }
177}
178
179impl fmt::Debug for Instance {
180    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
181        formatter.debug_struct("Instance").finish()
182    }
183}
184
185#[allow(clippy::cast_ptr_alignment)]
186impl Instance {
187    /// Helper function to access various locations offset from our `*mut
188    /// VMContext` object.
189    unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
190        (self.vmctx_ptr() as *mut u8)
191            .add(usize::try_from(offset).unwrap())
192            .cast()
193    }
194
195    /// Offsets in the `vmctx` region.
196    fn offsets(&self) -> &VMOffsets {
197        self.artifact.offsets()
198    }
199
200    /// Return a pointer to the `VMSharedSignatureIndex`s.
201    fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
202        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_signature_ids_begin()) }
203    }
204
205    /// Return the indexed `VMFunctionImport`.
206    fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
207        let index = usize::try_from(index.as_u32()).unwrap();
208        unsafe { &*self.imported_functions_ptr().add(index) }
209    }
210
211    /// Return a pointer to the `VMFunctionImport`s.
212    fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
213        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_functions_begin()) }
214    }
215
216    /// Return the index `VMTableImport`.
217    fn imported_table(&self, index: TableIndex) -> &VMTableImport {
218        let index = usize::try_from(index.as_u32()).unwrap();
219        unsafe { &*self.imported_tables_ptr().add(index) }
220    }
221
222    /// Return a pointer to the `VMTableImports`s.
223    fn imported_tables_ptr(&self) -> *mut VMTableImport {
224        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_tables_begin()) }
225    }
226
227    /// Return the indexed `VMMemoryImport`.
228    fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
229        let index = usize::try_from(index.as_u32()).unwrap();
230        let addr = unsafe { self.imported_memories_ptr().add(index) };
231        let align = std::mem::align_of::<VMMemoryImport>();
232        debug_assert!(
233            addr as usize % align == 0,
234            "VMMemoryImport addr is not aligned to {}: {:p}",
235            align,
236            addr
237        );
238        unsafe { &*addr }
239    }
240
241    /// Return a pointer to the `VMMemoryImport`s.
242    fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
243        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_memories_begin()) }
244    }
245
246    /// Return the indexed `VMGlobalImport`.
247    fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
248        let index = usize::try_from(index.as_u32()).unwrap();
249        unsafe { &*self.imported_globals_ptr().add(index) }
250    }
251
252    /// Return a pointer to the `VMGlobalImport`s.
253    fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
254        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_globals_begin()) }
255    }
256
257    /// Return the indexed `VMTableDefinition`.
258    #[allow(unused)]
259    fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
260        unsafe { *self.table_ptr(index).as_ref() }
261    }
262
263    /// Updates the value for a defined table to `VMTableDefinition`.
264    #[allow(unused)]
265    fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
266        unsafe {
267            *self.table_ptr(index).as_ptr() = *table;
268        }
269    }
270
271    /// Return the indexed `VMTableDefinition`.
272    fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
273        let index = usize::try_from(index.as_u32()).unwrap();
274        NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
275    }
276
277    /// Return a pointer to the `VMTableDefinition`s.
278    fn tables_ptr(&self) -> *mut VMTableDefinition {
279        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_tables_begin()) }
280    }
281
282    /// Return the indexed `VMMemoryDefinition`.
283    fn memory_definition(&self, index: MemoryIndex) -> &VMMemoryDefinition {
284        match self.artifact.import_counts().local_memory_index(index) {
285            Ok(local) => unsafe { self.memory_ptr(local).as_ref() },
286            Err(import) => unsafe { &self.imported_memory(import).from.vmmemory().as_ref() },
287        }
288    }
289
290    #[allow(dead_code)]
291    /// Set the indexed memory to `VMMemoryDefinition`.
292    fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
293        unsafe {
294            *self.memory_ptr(index).as_ptr() = *mem;
295        }
296    }
297
298    /// Return the indexed `VMMemoryDefinition`.
299    fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
300        let index = usize::try_from(index.as_u32()).unwrap();
301        NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
302    }
303
304    /// Return a pointer to the `VMMemoryDefinition`s.
305    fn memories_ptr(&self) -> *mut VMMemoryDefinition {
306        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_memories_begin()) }
307    }
308
309    /// Return the indexed `VMGlobalDefinition`.
310    fn global(&self, index: GlobalIndex) -> &VMGlobalDefinition {
311        match self.artifact.import_counts().local_global_index(index) {
312            Ok(local) => unsafe { self.global_ptr(local).as_ref() },
313            Err(import) => unsafe { self.imported_global(import).definition.as_ref() },
314        }
315    }
316
317    /// Set the indexed global to `VMGlobalDefinition`.
318    #[allow(dead_code)]
319    fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
320        unsafe {
321            *self.global_ptr(index).as_ptr() = global.clone();
322        }
323    }
324
325    /// Return the indexed `VMGlobalDefinition`.
326    fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
327        let index = usize::try_from(index.as_u32()).unwrap();
328        // TODO:
329        NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
330    }
331
332    /// Return a pointer to the `VMGlobalDefinition`s.
333    fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
334        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_globals_begin()) }
335    }
336
337    /// Return a pointer to the `VMBuiltinFunctionsArray`.
338    fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
339        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_builtin_functions_begin()) }
340    }
341
342    /// Return a reference to the vmctx used by compiled wasm code.
343    fn vmctx(&self) -> &VMContext {
344        &self.vmctx
345    }
346
347    /// Return a raw pointer to the vmctx used by compiled wasm code.
348    fn vmctx_ptr(&self) -> *mut VMContext {
349        self.vmctx() as *const VMContext as *mut VMContext
350    }
351
352    /// Return a reference to the custom state attached to this instance.
353    #[inline]
354    pub fn host_state(&self) -> &dyn Any {
355        &*self.host_state
356    }
357
358    /// Return a pointer to the trap catcher.
359    fn trap_catcher_ptr(&self) -> *mut *const u8 {
360        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_trap_handler()) }
361    }
362
363    /// Return a pointer to the gas limiter.
364    pub fn gas_counter_ptr(&self) -> *mut *const FastGasCounter {
365        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_gas_limiter_pointer()) }
366    }
367
368    /// Return a pointer to initial stack limit.
369    pub fn stack_limit_initial_ptr(&self) -> *mut i32 {
370        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_stack_limit_initial_begin()) }
371    }
372
373    /// Return a pointer to current stack limit.
374    pub fn stack_limit_ptr(&self) -> *mut i32 {
375        unsafe { self.vmctx_plus_offset(self.offsets().vmctx_stack_limit_begin()) }
376    }
377
378    /// Invoke the WebAssembly start function of the instance, if one is present.
379    fn invoke_start_function(&self) -> Result<(), Trap> {
380        let start_index = match self.artifact.start_function() {
381            Some(idx) => idx,
382            None => return Ok(()),
383        };
384        let start_funcref = self.funcrefs[start_index];
385        // Make the call.
386        self.reset_stack_meter();
387        let result = unsafe {
388            catch_traps(|| {
389                mem::transmute::<*const VMFunctionBody, unsafe extern "C" fn(VMFunctionEnvironment)>(
390                    start_funcref.func_ptr,
391                )(start_funcref.vmctx)
392            })
393        };
394        result
395    }
396
397    fn reset_stack_meter(&self) {
398        unsafe {
399            *(self.stack_limit_ptr()) = *(self.stack_limit_initial_ptr());
400        }
401    }
402
403    /// Return the offset from the vmctx pointer to its containing `Instance`.
404    #[inline]
405    pub(crate) fn vmctx_offset() -> isize {
406        offset_of!(Self, vmctx) as isize
407    }
408
409    /// Return the table index for the given `VMTableDefinition`.
410    pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
411        let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
412        let end: *const VMTableDefinition = table;
413        // TODO: Use `offset_from` once it stablizes.
414        let index = LocalTableIndex::new(
415            (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
416        );
417        assert_lt!(index.index(), self.tables.len());
418        index
419    }
420
421    /// Return the memory index for the given `VMMemoryDefinition`.
422    pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
423        let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
424        let end: *const VMMemoryDefinition = memory;
425        // TODO: Use `offset_from` once it stablizes.
426        let index = LocalMemoryIndex::new(
427            (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
428        );
429        assert_lt!(index.index(), self.memories.len());
430        index
431    }
432
433    /// Grow memory by the specified amount of pages.
434    ///
435    /// Returns `None` if memory can't be grown by the specified amount
436    /// of pages.
437    pub(crate) fn memory_grow<IntoPages>(
438        &self,
439        memory_index: LocalMemoryIndex,
440        delta: IntoPages,
441    ) -> Result<Pages, MemoryError>
442    where
443        IntoPages: Into<Pages>,
444    {
445        let mem = self
446            .memories
447            .get(memory_index)
448            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
449        mem.grow(delta.into())
450    }
451
452    /// Grow imported memory by the specified amount of pages.
453    ///
454    /// Returns `None` if memory can't be grown by the specified amount
455    /// of pages.
456    ///
457    /// # Safety
458    /// This and `imported_memory_size` are currently unsafe because they
459    /// dereference the memory import's pointers.
460    pub(crate) unsafe fn imported_memory_grow<IntoPages>(
461        &self,
462        memory_index: MemoryIndex,
463        delta: IntoPages,
464    ) -> Result<Pages, MemoryError>
465    where
466        IntoPages: Into<Pages>,
467    {
468        let import = self.imported_memory(memory_index);
469        import.from.grow(delta.into())
470    }
471
472    /// Returns the number of allocated wasm pages.
473    pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
474        self.memories
475            .get(memory_index)
476            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()))
477            .size()
478    }
479
480    /// Returns the number of allocated wasm pages in an imported memory.
481    ///
482    /// # Safety
483    /// This and `imported_memory_grow` are currently unsafe because they
484    /// dereference the memory import's pointers.
485    pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
486        self.imported_memory(memory_index).from.size()
487    }
488
489    /// Returns the number of elements in a given table.
490    pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
491        self.tables[table_index].size()
492    }
493
494    /// Returns the number of elements in a given imported table.
495    ///
496    /// # Safety
497    /// `table_index` must be a valid, imported table index.
498    pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
499        self.imported_table(table_index).from.size()
500    }
501
502    /// Grow table by the specified amount of elements.
503    ///
504    /// Returns `None` if table can't be grown by the specified amount
505    /// of elements.
506    pub(crate) fn table_grow(
507        &self,
508        table_index: LocalTableIndex,
509        delta: u32,
510        init_value: TableElement,
511    ) -> Option<u32> {
512        let result = self
513            .tables
514            .get(table_index)
515            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
516            .grow(delta, init_value);
517
518        result
519    }
520
521    /// Grow table by the specified amount of elements.
522    ///
523    /// # Safety
524    /// `table_index` must be a valid, imported table index.
525    pub(crate) unsafe fn imported_table_grow(
526        &self,
527        table_index: TableIndex,
528        delta: u32,
529        init_value: TableElement,
530    ) -> Option<u32> {
531        let import = self.imported_table(table_index);
532        import.from.grow(delta, init_value)
533    }
534
535    /// Get table element by index.
536    pub(crate) fn table_get(
537        &self,
538        table_index: LocalTableIndex,
539        index: u32,
540    ) -> Option<TableElement> {
541        self.tables
542            .get(table_index)
543            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
544            .get(index)
545    }
546
547    /// Returns the element at the given index.
548    ///
549    /// # Safety
550    /// `table_index` must be a valid, imported table index.
551    pub(crate) unsafe fn imported_table_get(
552        &self,
553        table_index: TableIndex,
554        index: u32,
555    ) -> Option<TableElement> {
556        let import = self.imported_table(table_index);
557        import.from.get(index)
558    }
559
560    /// Set table element by index.
561    pub(crate) fn table_set(
562        &self,
563        table_index: LocalTableIndex,
564        index: u32,
565        val: TableElement,
566    ) -> Result<(), Trap> {
567        self.tables
568            .get(table_index)
569            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
570            .set(index, val)
571    }
572
573    /// Set table element by index for an imported table.
574    ///
575    /// # Safety
576    /// `table_index` must be a valid, imported table index.
577    pub(crate) unsafe fn imported_table_set(
578        &self,
579        table_index: TableIndex,
580        index: u32,
581        val: TableElement,
582    ) -> Result<(), Trap> {
583        let import = self.imported_table(table_index);
584        import.from.set(index, val)
585    }
586
587    pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
588        Some(self.get_vm_funcref(function_index))
589    }
590
591    /// Get a `VMFuncRef` for the given `FunctionIndex`.
592    fn get_vm_funcref(&self, index: FunctionIndex) -> VMFuncRef {
593        if index == FunctionIndex::reserved_value() {
594            return VMFuncRef::null();
595        }
596        VMFuncRef(&self.funcrefs[index])
597    }
598
599    /// The `table.init` operation: initializes a portion of a table with a
600    /// passive element.
601    ///
602    /// # Errors
603    ///
604    /// Returns a `Trap` error when the range within the table is out of bounds
605    /// or the range within the passive element is out of bounds.
606    pub(crate) fn table_init(
607        &self,
608        table_index: TableIndex,
609        elem_index: ElemIndex,
610        dst: u32,
611        src: u32,
612        len: u32,
613    ) -> Result<(), Trap> {
614        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
615
616        let table = self.get_table(table_index);
617        let passive_elements = self.passive_elements.borrow();
618        let elem = passive_elements
619            .get(&elem_index)
620            .map_or::<&[VMFuncRef], _>(&[], |e| &**e);
621
622        if src
623            .checked_add(len)
624            .map_or(true, |n| n as usize > elem.len())
625            || dst.checked_add(len).map_or(true, |m| m > table.size())
626        {
627            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
628        }
629
630        for (dst, src) in (dst..dst + len).zip(src..src + len) {
631            table
632                .set(dst, TableElement::FuncRef(elem[src as usize]))
633                .expect("should never panic because we already did the bounds check above");
634        }
635
636        Ok(())
637    }
638
639    /// The `table.fill` operation: fills a portion of a table with a given value.
640    ///
641    /// # Errors
642    ///
643    /// Returns a `Trap` error when the range within the table is out of bounds
644    pub(crate) fn table_fill(
645        &self,
646        table_index: TableIndex,
647        start_index: u32,
648        item: TableElement,
649        len: u32,
650    ) -> Result<(), Trap> {
651        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
652
653        let table = self.get_table(table_index);
654        let table_size = table.size() as usize;
655
656        if start_index
657            .checked_add(len)
658            .map_or(true, |n| n as usize > table_size)
659        {
660            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
661        }
662
663        for i in start_index..(start_index + len) {
664            table
665                .set(i, item.clone())
666                .expect("should never panic because we already did the bounds check above");
667        }
668
669        Ok(())
670    }
671
672    /// Drop an element.
673    pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
674        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
675
676        let mut passive_elements = self.passive_elements.borrow_mut();
677        passive_elements.remove(&elem_index);
678        // Note that we don't check that we actually removed an element because
679        // dropping a non-passive element is a no-op (not a trap).
680    }
681
682    /// Do a `memory.copy` for a locally defined memory.
683    ///
684    /// # Errors
685    ///
686    /// Returns a `Trap` error when the source or destination ranges are out of
687    /// bounds.
688    pub(crate) fn local_memory_copy(
689        &self,
690        memory_index: LocalMemoryIndex,
691        dst: u32,
692        src: u32,
693        len: u32,
694    ) -> Result<(), Trap> {
695        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-memory-copy
696        let memory = unsafe { self.memory_ptr(memory_index).as_ref() };
697        // The following memory copy is not synchronized and is not atomic:
698        unsafe { memory.memory_copy(dst, src, len) }
699    }
700
701    /// Perform a `memory.copy` on an imported memory.
702    pub(crate) fn imported_memory_copy(
703        &self,
704        memory_index: MemoryIndex,
705        dst: u32,
706        src: u32,
707        len: u32,
708    ) -> Result<(), Trap> {
709        let import = self.imported_memory(memory_index);
710        // The following memory copy is not synchronized and is not atomic:
711        unsafe { import.from.vmmemory().as_ref().memory_copy(dst, src, len) }
712    }
713
714    /// Perform the `memory.fill` operation on a locally defined memory.
715    ///
716    /// # Errors
717    ///
718    /// Returns a `Trap` error if the memory range is out of bounds.
719    pub(crate) fn local_memory_fill(
720        &self,
721        memory_index: LocalMemoryIndex,
722        dst: u32,
723        val: u32,
724        len: u32,
725    ) -> Result<(), Trap> {
726        let memory = unsafe { self.memory_ptr(memory_index).as_ref() };
727        // The following memory fill is not synchronized and is not atomic:
728        unsafe { memory.memory_fill(dst, val, len) }
729    }
730
731    /// Perform the `memory.fill` operation on an imported memory.
732    ///
733    /// # Errors
734    ///
735    /// Returns a `Trap` error if the memory range is out of bounds.
736    pub(crate) fn imported_memory_fill(
737        &self,
738        memory_index: MemoryIndex,
739        dst: u32,
740        val: u32,
741        len: u32,
742    ) -> Result<(), Trap> {
743        let import = self.imported_memory(memory_index);
744        // The following memory fill is not synchronized and is not atomic:
745        unsafe { import.from.vmmemory().as_ref().memory_fill(dst, val, len) }
746    }
747
748    /// Performs the `memory.init` operation.
749    ///
750    /// # Errors
751    ///
752    /// Returns a `Trap` error if the destination range is out of this module's
753    /// memory's bounds or if the source range is outside the data segment's
754    /// bounds.
755    pub(crate) fn memory_init(
756        &self,
757        memory_index: MemoryIndex,
758        data_index: DataIndex,
759        dst: u32,
760        src: u32,
761        len: u32,
762    ) -> Result<(), Trap> {
763        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-memory-init
764
765        let memory = self.memory_definition(memory_index);
766        let passive_data = self.passive_data.borrow();
767        let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
768
769        let oob_access = src
770            .checked_add(len)
771            .map_or(true, |n| n as usize > data.len())
772            || dst.checked_add(len).map_or(true, |m| {
773                usize::try_from(m).unwrap() > memory.current_length
774            });
775
776        if oob_access {
777            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
778        }
779        let src_slice = &data[src as usize..(src + len) as usize];
780        unsafe {
781            let dst_start = memory.base.add(dst as usize);
782            let dst_slice = slice::from_raw_parts_mut(dst_start, len as usize);
783            dst_slice.copy_from_slice(src_slice);
784        }
785        Ok(())
786    }
787
788    /// Drop the given data segment, truncating its length to zero.
789    pub(crate) fn data_drop(&self, data_index: DataIndex) {
790        let mut passive_data = self.passive_data.borrow_mut();
791        passive_data.remove(&data_index);
792    }
793
794    /// Get a table by index regardless of whether it is locally-defined or an
795    /// imported, foreign table.
796    pub(crate) fn get_table(&self, table_index: TableIndex) -> &dyn Table {
797        match self.artifact.import_counts().local_table_index(table_index) {
798            Ok(local) => self.get_local_table(local),
799            Err(import) => self.get_foreign_table(import),
800        }
801    }
802
803    /// Get a locally-defined table.
804    pub(crate) fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table {
805        self.tables[index].as_ref()
806    }
807
808    /// Get an imported, foreign table.
809    pub(crate) fn get_foreign_table(&self, index: TableIndex) -> &dyn Table {
810        let import = self.imported_table(index);
811        &*import.from
812    }
813}
814
815/// A handle holding an `InstanceRef`, which holds an `Instance`
816/// of a WebAssembly module.
817///
818/// This is more or less a public facade of the private `Instance`,
819/// providing useful higher-level API.
820#[derive(Debug, PartialEq)]
821pub struct InstanceHandle {
822    /// The [`InstanceRef`]. See its documentation to learn more.
823    instance: InstanceRef,
824}
825
826impl InstanceHandle {
827    /// Create a new `InstanceHandle` pointing at a new [`InstanceRef`].
828    ///
829    /// # Safety
830    ///
831    /// This method is not necessarily inherently unsafe to call, but in general
832    /// the APIs of an `Instance` are quite unsafe and have not been really
833    /// audited for safety that much. As a result the unsafety here on this
834    /// method is a low-overhead way of saying “this is an extremely unsafe type
835    /// to work with”.
836    ///
837    /// Extreme care must be taken when working with `InstanceHandle` and it's
838    /// recommended to have relatively intimate knowledge of how it works
839    /// internally if you'd like to do so. If possible it's recommended to use
840    /// the `wasmer` crate API rather than this type since that is vetted for
841    /// safety.
842    ///
843    /// However the following must be taken care of before calling this function:
844    /// - The memory at `instance.tables_ptr()` must be initialized with data for
845    ///   all the local tables.
846    /// - The memory at `instance.memories_ptr()` must be initialized with data for
847    ///   all the local memories.
848    // FIXME: instances should just store a reference to an Artifact
849    #[allow(clippy::too_many_arguments)]
850    pub unsafe fn new(
851        artifact: Arc<dyn Artifact>,
852        allocator: InstanceAllocator,
853        finished_memories: BoxedSlice<LocalMemoryIndex, Arc<dyn Memory>>,
854        finished_tables: BoxedSlice<LocalTableIndex, Arc<dyn Table>>,
855        finished_globals: BoxedSlice<LocalGlobalIndex, Arc<Global>>,
856        imports: Imports,
857        passive_data: BTreeMap<DataIndex, Arc<[u8]>>,
858        host_state: Box<dyn Any>,
859        imported_function_envs: BoxedSlice<FunctionIndex, ImportFunctionEnv>,
860        instance_config: InstanceConfig,
861    ) -> Self {
862        let vmctx_globals = finished_globals
863            .values()
864            .map(|m| m.vmglobal())
865            .collect::<PrimaryMap<LocalGlobalIndex, _>>()
866            .into_boxed_slice();
867        let passive_data = RefCell::new(passive_data);
868
869        let handle = {
870            // use dummy value to create an instance so we can get the vmctx pointer
871            let funcrefs = PrimaryMap::new().into_boxed_slice();
872            // Create the `Instance`. The unique, the One.
873            let instance = Instance {
874                artifact,
875                config: instance_config.clone(),
876                memories: finished_memories,
877                tables: finished_tables,
878                globals: finished_globals,
879                passive_elements: Default::default(),
880                passive_data,
881                host_state,
882                funcrefs,
883                imported_function_envs,
884                vmctx: VMContext {},
885            };
886
887            let mut instance_ref = allocator.write_instance(instance);
888
889            // Set the funcrefs after we've built the instance
890            {
891                let instance = instance_ref.as_mut().unwrap();
892                let vmctx_ptr = instance.vmctx_ptr();
893                instance.funcrefs = build_funcrefs(
894                    &imports,
895                    instance.artifact.functions().iter().map(|(_, f)| f),
896                    vmctx_ptr,
897                );
898                *(instance.trap_catcher_ptr()) = get_trap_handler();
899                *(instance.gas_counter_ptr()) = instance_config.gas_counter;
900                *(instance.stack_limit_ptr()) = instance_config.stack_limit;
901                *(instance.stack_limit_initial_ptr()) = instance_config.stack_limit;
902            }
903
904            Self {
905                instance: instance_ref,
906            }
907        };
908        let instance = handle.instance().as_ref();
909
910        ptr::copy(
911            instance.artifact.signatures().as_ptr(),
912            instance.signature_ids_ptr() as *mut VMSharedSignatureIndex,
913            instance.artifact.signatures().len(),
914        );
915
916        ptr::copy(
917            imports.functions.values().as_slice().as_ptr(),
918            instance.imported_functions_ptr() as *mut VMFunctionImport,
919            imports.functions.len(),
920        );
921        ptr::copy(
922            imports.tables.values().as_slice().as_ptr(),
923            instance.imported_tables_ptr() as *mut VMTableImport,
924            imports.tables.len(),
925        );
926        ptr::copy(
927            imports.memories.values().as_slice().as_ptr(),
928            instance.imported_memories_ptr() as *mut VMMemoryImport,
929            imports.memories.len(),
930        );
931        ptr::copy(
932            imports.globals.values().as_slice().as_ptr(),
933            instance.imported_globals_ptr() as *mut VMGlobalImport,
934            imports.globals.len(),
935        );
936        // these should already be set, add asserts here? for:
937        // - instance.tables_ptr() as *mut VMTableDefinition
938        // - instance.memories_ptr() as *mut VMMemoryDefinition
939        ptr::copy(
940            vmctx_globals.values().as_slice().as_ptr(),
941            instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
942            vmctx_globals.len(),
943        );
944        ptr::write(
945            instance.builtin_functions_ptr() as *mut VMBuiltinFunctionsArray,
946            VMBuiltinFunctionsArray::initialized(),
947        );
948
949        // Perform infallible initialization in this constructor, while fallible
950        // initialization is deferred to the `initialize` method.
951        initialize_passive_elements(instance);
952        initialize_globals(instance);
953        handle
954    }
955
956    /// Return a reference to the contained `Instance`.
957    pub(crate) fn instance(&self) -> &InstanceRef {
958        &self.instance
959    }
960
961    /// Finishes the instantiation process started by `Instance::new`.
962    ///
963    /// # Safety
964    ///
965    /// Only safe to call immediately after instantiation.
966    pub unsafe fn finish_instantiation(&self) -> Result<(), Trap> {
967        let instance = self.instance().as_ref();
968
969        // Apply the initializers.
970        initialize_tables(instance)?;
971        initialize_memories(
972            instance,
973            instance.artifact.data_segments().iter().map(Into::into),
974        )?;
975
976        // The WebAssembly spec specifies that the start function is
977        // invoked automatically at instantiation time.
978        instance.invoke_start_function()?;
979        Ok(())
980    }
981
982    /// See [`traphandlers::wasmer_call_trampoline`].
983    pub unsafe fn invoke_function(
984        &self,
985        vmctx: VMFunctionEnvironment,
986        trampoline: VMTrampoline,
987        callee: *const VMFunctionBody,
988        values_vec: *mut u8,
989    ) -> Result<(), Trap> {
990        // `vmctx` is always `*mut VMContext` here, as we call to WASM.
991        {
992            let instance = self.instance().as_ref();
993            instance.reset_stack_meter();
994        }
995        wasmer_call_trampoline(vmctx, trampoline, callee, values_vec)
996    }
997
998    /// Return a reference to the vmctx used by compiled wasm code.
999    pub fn vmctx(&self) -> &VMContext {
1000        self.instance().as_ref().vmctx()
1001    }
1002
1003    /// Return a raw pointer to the vmctx used by compiled wasm code.
1004    pub fn vmctx_ptr(&self) -> *mut VMContext {
1005        self.instance().as_ref().vmctx_ptr()
1006    }
1007
1008    /// Return a reference to the `VMOffsets` to get offsets in the
1009    /// `Self::vmctx_ptr` region. Be careful when doing pointer
1010    /// arithmetic!
1011    pub fn vmoffsets(&self) -> &VMOffsets {
1012        self.instance().as_ref().offsets()
1013    }
1014
1015    /// Lookup an exported function with the specified function index.
1016    pub fn function_by_index(&self, idx: FunctionIndex) -> Option<VMFunction> {
1017        let instance = self.instance.as_ref();
1018
1019        let (address, signature, vmctx, call_trampoline) =
1020            match instance.artifact.import_counts().local_function_index(idx) {
1021                Ok(local) => {
1022                    let func = instance.artifact.functions().get(local)?;
1023                    (
1024                        *(func.body),
1025                        func.signature,
1026                        VMFunctionEnvironment {
1027                            vmctx: instance.vmctx_ptr(),
1028                        },
1029                        Some(func.trampoline),
1030                    )
1031                }
1032                Err(import) => {
1033                    let import = instance.imported_function(import);
1034                    (
1035                        *(import.body),
1036                        import.signature,
1037                        import.environment,
1038                        import.trampoline,
1039                    )
1040                }
1041            };
1042        Some(VMFunction {
1043            // Any function received is already static at this point as:
1044            // 1. All locally defined functions in the Wasm have a static signature.
1045            // 2. All the imported functions are already static (because
1046            //    they point to the trampolines rather than the dynamic addresses).
1047            kind: VMFunctionKind::Static,
1048            address,
1049            signature,
1050            vmctx,
1051            call_trampoline,
1052            instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1053        })
1054    }
1055
1056    /// Return the indexed `VMMemoryDefinition`.
1057    fn memory_by_index(&self, index: MemoryIndex) -> Option<crate::VMMemory> {
1058        let instance = self.instance.as_ref();
1059        let from = match instance.artifact.import_counts().local_memory_index(index) {
1060            Ok(local) => Arc::clone(&instance.memories[local]),
1061            Err(import) => Arc::clone(&instance.imported_memory(import).from),
1062        };
1063        Some(crate::VMMemory {
1064            from,
1065            instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1066        })
1067    }
1068
1069    /// Return the indexed `VMMemoryDefinition`.
1070    fn table_by_index(&self, index: TableIndex) -> Option<crate::VMTable> {
1071        let instance = self.instance.as_ref();
1072        let from = match instance.artifact.import_counts().local_table_index(index) {
1073            Ok(local) => Arc::clone(&instance.tables[local]),
1074            Err(import) => Arc::clone(&instance.imported_table(import).from),
1075        };
1076        Some(crate::VMTable {
1077            from,
1078            instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1079        })
1080    }
1081
1082    /// Obtain a reference to a global entity by its index.
1083    pub fn global_by_index(&self, index: GlobalIndex) -> Option<VMGlobal> {
1084        let instance = self.instance.as_ref();
1085        let from = match instance.artifact.import_counts().local_global_index(index) {
1086            Ok(local) => Arc::clone(&instance.globals[local]),
1087            Err(import) => Arc::clone(&instance.imported_global(import).from),
1088        };
1089        Some(crate::VMGlobal {
1090            from,
1091            instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1092        })
1093    }
1094
1095    /// Lookup an exported function with the given name.
1096    pub fn lookup(&self, field: &str) -> Option<VMExtern> {
1097        let instance = self.instance.as_ref();
1098        Some(match instance.artifact.export_field(field)? {
1099            ExportIndex::Function(idx) => VMExtern::Function(self.function_by_index(idx)?),
1100            ExportIndex::Table(idx) => VMExtern::Table(self.table_by_index(idx)?),
1101            ExportIndex::Global(idx) => VMExtern::Global(self.global_by_index(idx)?),
1102            ExportIndex::Memory(idx) => VMExtern::Memory(self.memory_by_index(idx)?),
1103        })
1104    }
1105
1106    /// Return a reference to the custom state attached to this instance.
1107    pub fn host_state(&self) -> &dyn Any {
1108        self.instance().as_ref().host_state()
1109    }
1110
1111    /// Return the memory index for the given `VMMemoryDefinition` in this instance.
1112    pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1113        self.instance().as_ref().memory_index(memory)
1114    }
1115
1116    /// Grow memory in this instance by the specified amount of pages.
1117    ///
1118    /// Returns `None` if memory can't be grown by the specified amount
1119    /// of pages.
1120    pub fn memory_grow<IntoPages>(
1121        &self,
1122        memory_index: LocalMemoryIndex,
1123        delta: IntoPages,
1124    ) -> Result<Pages, MemoryError>
1125    where
1126        IntoPages: Into<Pages>,
1127    {
1128        self.instance().as_ref().memory_grow(memory_index, delta)
1129    }
1130
1131    /// Return the table index for the given `VMTableDefinition` in this instance.
1132    pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1133        self.instance().as_ref().table_index(table)
1134    }
1135
1136    /// Grow table in this instance by the specified amount of pages.
1137    ///
1138    /// Returns `None` if memory can't be grown by the specified amount
1139    /// of pages.
1140    pub fn table_grow(
1141        &self,
1142        table_index: LocalTableIndex,
1143        delta: u32,
1144        init_value: TableElement,
1145    ) -> Option<u32> {
1146        self.instance()
1147            .as_ref()
1148            .table_grow(table_index, delta, init_value)
1149    }
1150
1151    /// Get table element reference.
1152    ///
1153    /// Returns `None` if index is out of bounds.
1154    pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1155        self.instance().as_ref().table_get(table_index, index)
1156    }
1157
1158    /// Set table element reference.
1159    ///
1160    /// Returns an error if the index is out of bounds
1161    pub fn table_set(
1162        &self,
1163        table_index: LocalTableIndex,
1164        index: u32,
1165        val: TableElement,
1166    ) -> Result<(), Trap> {
1167        self.instance().as_ref().table_set(table_index, index, val)
1168    }
1169
1170    /// Get a table defined locally within this module.
1171    pub fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table {
1172        self.instance().as_ref().get_local_table(index)
1173    }
1174}
1175
1176/// Initializes the host environments.
1177///
1178/// # Safety
1179/// - This function must be called with the correct `Err` type parameter: the error type is not
1180///   visible to code in `wasmer_vm`, so it's the caller's responsibility to ensure these
1181///   functions are called with the correct type.
1182/// - `instance_ptr` must point to a valid `wasmer::Instance`.
1183pub unsafe fn initialize_host_envs<Err: Sized>(
1184    handle: &std::sync::Mutex<InstanceHandle>,
1185    instance_ptr: *const ffi::c_void,
1186) -> Result<(), Err> {
1187    let initializers = {
1188        let mut instance_lock = handle.lock().unwrap();
1189        let instance_ref = instance_lock.instance.as_mut_unchecked();
1190        let mut initializers = vec![];
1191        for import_function_env in instance_ref.imported_function_envs.values_mut() {
1192            match import_function_env {
1193                ImportFunctionEnv::Env {
1194                    env,
1195                    ref mut initializer,
1196                    ..
1197                } => {
1198                    if let Some(init) = initializer.take() {
1199                        initializers.push((init, *env));
1200                    }
1201                }
1202                ImportFunctionEnv::NoEnv => (),
1203            }
1204        }
1205        initializers
1206    };
1207    for (init, env) in initializers {
1208        let f = mem::transmute::<&ImportInitializerFuncPtr, &ImportInitializerFuncPtr<Err>>(&init);
1209        f(env, instance_ptr)?;
1210    }
1211    Ok(())
1212}
1213
1214/// Compute the offset for a memory data initializer.
1215fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1216    let mut start = init.location.offset;
1217    if let Some(base) = init.location.base {
1218        let val = instance.global(base).to_u32();
1219        start += usize::try_from(val).unwrap();
1220    }
1221    start
1222}
1223
1224#[allow(clippy::mut_from_ref)]
1225/// Return a byte-slice view of a memory's data.
1226unsafe fn get_memory_slice<'instance>(
1227    init: &DataInitializer<'_>,
1228    instance: &'instance Instance,
1229) -> &'instance mut [u8] {
1230    let memory = instance.memory_definition(init.location.memory_index);
1231    slice::from_raw_parts_mut(memory.base, memory.current_length)
1232}
1233
1234/// Compute the offset for a table element initializer.
1235fn get_table_init_start(init: &OwnedTableInitializer, instance: &Instance) -> usize {
1236    let mut start = init.offset;
1237    if let Some(base) = init.base {
1238        let val = instance.global(base).to_u32();
1239        start += usize::try_from(val).unwrap();
1240    }
1241    start
1242}
1243
1244/// Initialize the table memory from the provided initializers.
1245fn initialize_tables(instance: &Instance) -> Result<(), Trap> {
1246    for init in instance.artifact.element_segments() {
1247        let start = get_table_init_start(init, instance);
1248        let table = instance.get_table(init.table_index);
1249
1250        if start
1251            .checked_add(init.elements.len())
1252            .map_or(true, |end| end > table.size() as usize)
1253        {
1254            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1255        }
1256
1257        for (i, func_idx) in init.elements.iter().enumerate() {
1258            let anyfunc = instance.get_vm_funcref(*func_idx);
1259            table
1260                .set(
1261                    u32::try_from(start + i).unwrap(),
1262                    TableElement::FuncRef(anyfunc),
1263                )
1264                .unwrap();
1265        }
1266    }
1267
1268    Ok(())
1269}
1270
1271/// Initialize the `Instance::passive_elements` map by resolving the
1272/// `ModuleInfo::passive_elements`'s `FunctionIndex`s into `VMCallerCheckedAnyfunc`s for
1273/// this instance.
1274fn initialize_passive_elements(instance: &Instance) {
1275    let mut passive_elements = instance.passive_elements.borrow_mut();
1276    debug_assert!(
1277        passive_elements.is_empty(),
1278        "should only be called once, at initialization time"
1279    );
1280
1281    passive_elements.extend(
1282        instance
1283            .artifact
1284            .passive_elements()
1285            .iter()
1286            .filter(|(_, segments)| !segments.is_empty())
1287            .map(|(idx, segments)| {
1288                (
1289                    *idx,
1290                    segments
1291                        .iter()
1292                        .map(|s| instance.get_vm_funcref(*s))
1293                        .collect(),
1294                )
1295            }),
1296    );
1297}
1298
1299/// Initialize the table memory from the provided initializers.
1300fn initialize_memories<'a>(
1301    instance: &Instance,
1302    data_initializers: impl Iterator<Item = DataInitializer<'a>>,
1303) -> Result<(), Trap> {
1304    for init in data_initializers {
1305        let memory = instance.memory_definition(init.location.memory_index);
1306
1307        let start = get_memory_init_start(&init, instance);
1308        if start
1309            .checked_add(init.data.len())
1310            .map_or(true, |end| end > memory.current_length)
1311        {
1312            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1313        }
1314
1315        unsafe {
1316            let mem_slice = get_memory_slice(&init, instance);
1317            let end = start + init.data.len();
1318            let to_init = &mut mem_slice[start..end];
1319            to_init.copy_from_slice(init.data);
1320        }
1321    }
1322
1323    Ok(())
1324}
1325
1326fn initialize_globals(instance: &Instance) {
1327    for (index, (_, initializer)) in instance.artifact.globals().iter().enumerate() {
1328        unsafe {
1329            let to = instance.global_ptr(LocalGlobalIndex::new(index)).as_ptr();
1330            match initializer {
1331                GlobalInit::I32Const(x) => *(*to).as_i32_mut() = *x,
1332                GlobalInit::I64Const(x) => *(*to).as_i64_mut() = *x,
1333                GlobalInit::F32Const(x) => *(*to).as_f32_mut() = *x,
1334                GlobalInit::F64Const(x) => *(*to).as_f64_mut() = *x,
1335                GlobalInit::V128Const(x) => *(*to).as_bytes_mut() = *x.bytes(),
1336                GlobalInit::GetGlobal(x) => *to = instance.global(*x).clone(),
1337                GlobalInit::RefNullConst => *(*to).as_funcref_mut() = VMFuncRef::null(),
1338                GlobalInit::RefFunc(func_idx) => {
1339                    let funcref = instance.func_ref(*func_idx).unwrap();
1340                    *(*to).as_funcref_mut() = funcref;
1341                }
1342            }
1343        }
1344    }
1345}
1346
1347/// Eagerly builds all the `VMFuncRef`s for imported and local functions so that all
1348/// future funcref operations are just looking up this data.
1349pub fn build_funcrefs<'a>(
1350    imports: &Imports,
1351    finished_functions: impl ExactSizeIterator<Item = &'a VMLocalFunction>,
1352    // vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1353    vmctx_ptr: *mut VMContext,
1354) -> BoxedSlice<FunctionIndex, VMCallerCheckedAnyfunc> {
1355    let mut func_refs =
1356        PrimaryMap::with_capacity(imports.functions.len() + finished_functions.len());
1357    for (_, import) in imports.functions.iter() {
1358        let anyfunc = VMCallerCheckedAnyfunc {
1359            func_ptr: *(import.body),
1360            type_index: import.signature,
1361            vmctx: import.environment,
1362        };
1363        func_refs.push(anyfunc);
1364    }
1365    // local functions
1366    for function in finished_functions {
1367        let anyfunc = VMCallerCheckedAnyfunc {
1368            func_ptr: *(function.body),
1369            type_index: function.signature,
1370            vmctx: VMFunctionEnvironment { vmctx: vmctx_ptr },
1371        };
1372        func_refs.push(anyfunc);
1373    }
1374    func_refs.into_boxed_slice()
1375}