wasmer_vm/instance/
mod.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/main/docs/ATTRIBUTIONS.md
3
4//! An `Instance` contains all the runtime state used by execution of
5//! a WebAssembly module (except its callstack and register state). An
6//! `VMInstance` is a wrapper around `Instance` that manages
7//! how it is allocated and deallocated.
8
9mod allocator;
10
11use crate::export::VMExtern;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17    memory32_atomic_check32, memory32_atomic_check64, memory_copy, memory_fill,
18    VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
19    VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
20    VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, VMTagImport,
21    VMTrampoline,
22};
23use crate::{wasmer_call_trampoline, FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag};
24use crate::{LinearMemory, NotifyLocation};
25use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
26pub use allocator::InstanceAllocator;
27use memoffset::offset_of;
28use more_asserts::assert_lt;
29use std::alloc::Layout;
30use std::cell::RefCell;
31use std::collections::HashMap;
32use std::convert::TryFrom;
33use std::fmt;
34use std::mem;
35use std::ptr::{self, NonNull};
36use std::slice;
37use std::sync::Arc;
38use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
39use wasmer_types::{
40    DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
41    LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, LocalTagIndex,
42    MemoryError, MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer,
43    TagIndex, VMOffsets,
44};
45
46/// A WebAssembly instance.
47///
48/// The type is dynamically-sized. Indeed, the `vmctx` field can
49/// contain various data. That's why the type has a C representation
50/// to ensure that the `vmctx` field is last. See the documentation of
51/// the `vmctx` field to learn more.
52#[repr(C)]
53#[allow(clippy::type_complexity)]
54pub(crate) struct Instance {
55    /// The `ModuleInfo` this `Instance` was instantiated from.
56    module: Arc<ModuleInfo>,
57
58    /// Pointer to the object store of the context owning this instance.
59    context: *mut StoreObjects,
60
61    /// Offsets in the `vmctx` region.
62    offsets: VMOffsets,
63
64    /// WebAssembly linear memory data.
65    memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
66
67    /// WebAssembly table data.
68    tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
69
70    /// WebAssembly global data.
71    globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
72
73    /// WebAssembly global data.
74    tags: BoxedSlice<LocalTagIndex, InternalStoreHandle<VMTag>>,
75
76    /// Pointers to functions in executable memory.
77    functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
78
79    /// Pointers to function call trampolines in executable memory.
80    function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
81
82    /// Passive elements in this instantiation. As `elem.drop`s happen, these
83    /// entries get removed.
84    passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
85
86    /// Passive data segments from our module. As `data.drop`s happen, entries
87    /// get removed. A missing entry is considered equivalent to an empty slice.
88    passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
89
90    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
91    /// will point to elements here for functions defined by this instance.
92    funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
93
94    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
95    /// will point to elements here for functions imported by this instance.
96    imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
97
98    /// Additional context used by compiled WebAssembly code. This
99    /// field is last, and represents a dynamically-sized array that
100    /// extends beyond the nominal end of the struct (similar to a
101    /// flexible array member).
102    vmctx: VMContext,
103}
104
105impl fmt::Debug for Instance {
106    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
107        formatter.debug_struct("Instance").finish()
108    }
109}
110
111#[allow(clippy::cast_ptr_alignment)]
112impl Instance {
113    /// Helper function to access various locations offset from our `*mut
114    /// VMContext` object.
115    unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
116        (self.vmctx_ptr() as *mut u8)
117            .add(usize::try_from(offset).unwrap())
118            .cast()
119    }
120
121    fn module(&self) -> &Arc<ModuleInfo> {
122        &self.module
123    }
124
125    pub(crate) fn module_ref(&self) -> &ModuleInfo {
126        &self.module
127    }
128
129    fn context(&self) -> &StoreObjects {
130        unsafe { &*self.context }
131    }
132
133    fn context_mut(&mut self) -> &mut StoreObjects {
134        unsafe { &mut *self.context }
135    }
136
137    /// Offsets in the `vmctx` region.
138    fn offsets(&self) -> &VMOffsets {
139        &self.offsets
140    }
141
142    /// Return a pointer to the `VMSharedSignatureIndex`s.
143    fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
144        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
145    }
146
147    /// Return the indexed `VMFunctionImport`.
148    fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
149        let index = usize::try_from(index.as_u32()).unwrap();
150        unsafe { &*self.imported_functions_ptr().add(index) }
151    }
152
153    /// Return a pointer to the `VMFunctionImport`s.
154    fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
155        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
156    }
157
158    /// Return the index `VMTableImport`.
159    fn imported_table(&self, index: TableIndex) -> &VMTableImport {
160        let index = usize::try_from(index.as_u32()).unwrap();
161        unsafe { &*self.imported_tables_ptr().add(index) }
162    }
163
164    /// Return a pointer to the `VMTableImports`s.
165    fn imported_tables_ptr(&self) -> *mut VMTableImport {
166        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
167    }
168
169    /// Return the indexed `VMMemoryImport`.
170    fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
171        let index = usize::try_from(index.as_u32()).unwrap();
172        unsafe { &*self.imported_memories_ptr().add(index) }
173    }
174
175    /// Return a pointer to the `VMMemoryImport`s.
176    fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
177        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
178    }
179
180    /// Return the indexed `VMGlobalImport`.
181    fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
182        let index = usize::try_from(index.as_u32()).unwrap();
183        unsafe { &*self.imported_globals_ptr().add(index) }
184    }
185
186    /// Return a pointer to the `VMGlobalImport`s.
187    fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
188        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
189    }
190
191    /// Return the indexed `VMTagImport`.
192    fn imported_tag(&self, index: TagIndex) -> &VMTagImport {
193        let index = usize::try_from(index.as_u32()).unwrap();
194        unsafe { &*self.imported_tags_ptr().add(index) }
195    }
196
197    /// Return a pointer to the `VMTagImport`s.
198    fn imported_tags_ptr(&self) -> *mut VMTagImport {
199        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tags_begin()) }
200    }
201
202    /// Return the indexed `VMTableDefinition`.
203    #[allow(dead_code)]
204    fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
205        unsafe { *self.table_ptr(index).as_ref() }
206    }
207
208    #[allow(dead_code)]
209    /// Updates the value for a defined table to `VMTableDefinition`.
210    fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
211        unsafe {
212            *self.table_ptr(index).as_ptr() = *table;
213        }
214    }
215
216    /// Return the indexed `VMTableDefinition`.
217    fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
218        let index = usize::try_from(index.as_u32()).unwrap();
219        NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
220    }
221
222    /// Return a pointer to the `VMTableDefinition`s.
223    fn tables_ptr(&self) -> *mut VMTableDefinition {
224        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
225    }
226
227    #[allow(dead_code)]
228    /// Get a locally defined or imported memory.
229    fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
230        if let Some(local_index) = self.module.local_memory_index(index) {
231            self.memory(local_index)
232        } else {
233            let import = self.imported_memory(index);
234            unsafe { *import.definition.as_ref() }
235        }
236    }
237
238    /// Return the indexed `VMMemoryDefinition`.
239    fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
240        unsafe { *self.memory_ptr(index).as_ref() }
241    }
242
243    #[allow(dead_code)]
244    /// Set the indexed memory to `VMMemoryDefinition`.
245    fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
246        unsafe {
247            *self.memory_ptr(index).as_ptr() = *mem;
248        }
249    }
250
251    /// Return the indexed `VMMemoryDefinition`.
252    fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
253        let index = usize::try_from(index.as_u32()).unwrap();
254        NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
255    }
256
257    /// Return a pointer to the `VMMemoryDefinition`s.
258    fn memories_ptr(&self) -> *mut VMMemoryDefinition {
259        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
260    }
261
262    /// Get a locally defined or imported memory.
263    fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
264        if let Some(local_index) = self.module.local_memory_index(index) {
265            unsafe {
266                self.memories
267                    .get(local_index)
268                    .unwrap()
269                    .get(self.context.as_ref().unwrap())
270            }
271        } else {
272            let import = self.imported_memory(index);
273            unsafe { import.handle.get(self.context.as_ref().unwrap()) }
274        }
275    }
276
277    /// Get a locally defined or imported memory.
278    fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
279        if let Some(local_index) = self.module.local_memory_index(index) {
280            unsafe {
281                self.memories
282                    .get_mut(local_index)
283                    .unwrap()
284                    .get_mut(self.context.as_mut().unwrap())
285            }
286        } else {
287            let import = self.imported_memory(index);
288            unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
289        }
290    }
291
292    /// Get a locally defined memory as mutable.
293    fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
294        unsafe {
295            self.memories
296                .get_mut(local_index)
297                .unwrap()
298                .get_mut(self.context.as_mut().unwrap())
299        }
300    }
301
302    /// Return the indexed `VMGlobalDefinition`.
303    fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
304        unsafe { self.global_ptr(index).as_ref().clone() }
305    }
306
307    /// Set the indexed global to `VMGlobalDefinition`.
308    #[allow(dead_code)]
309    fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
310        unsafe {
311            *self.global_ptr(index).as_ptr() = global.clone();
312        }
313    }
314
315    /// Return the indexed `VMGlobalDefinition`.
316    fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
317        let index = usize::try_from(index.as_u32()).unwrap();
318        // TODO:
319        NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
320    }
321
322    /// Return a pointer to the `VMGlobalDefinition`s.
323    fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
324        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
325    }
326
327    /// Return a pointer to the `VMBuiltinFunctionsArray`.
328    fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
329        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
330    }
331
332    /// Return a reference to the vmctx used by compiled wasm code.
333    fn vmctx(&self) -> &VMContext {
334        &self.vmctx
335    }
336
337    /// Return a raw pointer to the vmctx used by compiled wasm code.
338    fn vmctx_ptr(&self) -> *mut VMContext {
339        self.vmctx() as *const VMContext as *mut VMContext
340    }
341
342    /// Invoke the WebAssembly start function of the instance, if one is present.
343    fn invoke_start_function(
344        &self,
345        config: &VMConfig,
346        trap_handler: Option<*const TrapHandlerFn<'static>>,
347    ) -> Result<(), Trap> {
348        let start_index = match self.module.start_function {
349            Some(idx) => idx,
350            None => return Ok(()),
351        };
352
353        let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
354            Some(local_index) => {
355                let body = self
356                    .functions
357                    .get(local_index)
358                    .expect("function index is out of bounds")
359                    .0;
360                (
361                    body as *const _,
362                    VMFunctionContext {
363                        vmctx: self.vmctx_ptr(),
364                    },
365                )
366            }
367            None => {
368                assert_lt!(start_index.index(), self.module.num_imported_functions);
369                let import = self.imported_function(start_index);
370                (import.body, import.environment)
371            }
372        };
373
374        let sig = self.module.functions[start_index];
375        let trampoline = self.function_call_trampolines[sig];
376        let values_vec = vec![].as_mut_ptr();
377
378        unsafe {
379            // Even though we already know the type of the function we need to call, in certain
380            // specific cases trampoline prepare callee arguments for specific optimizations, such
381            // as passing g0 and m0_base_ptr as paramters.
382            wasmer_call_trampoline(
383                trap_handler,
384                config,
385                callee_vmctx,
386                trampoline,
387                callee_address,
388                values_vec,
389            )
390        }
391    }
392
393    /// Return the offset from the vmctx pointer to its containing `Instance`.
394    #[inline]
395    pub(crate) fn vmctx_offset() -> isize {
396        offset_of!(Self, vmctx) as isize
397    }
398
399    /// Return the table index for the given `VMTableDefinition`.
400    pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
401        let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
402        let end: *const VMTableDefinition = table;
403        // TODO: Use `offset_from` once it stablizes.
404        let index = LocalTableIndex::new(
405            (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
406        );
407        assert_lt!(index.index(), self.tables.len());
408        index
409    }
410
411    /// Return the memory index for the given `VMMemoryDefinition`.
412    pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
413        let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
414        let end: *const VMMemoryDefinition = memory;
415        // TODO: Use `offset_from` once it stablizes.
416        let index = LocalMemoryIndex::new(
417            (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
418        );
419        assert_lt!(index.index(), self.memories.len());
420        index
421    }
422
423    /// Grow memory by the specified amount of pages.
424    ///
425    /// Returns `None` if memory can't be grown by the specified amount
426    /// of pages.
427    pub(crate) fn memory_grow<IntoPages>(
428        &mut self,
429        memory_index: LocalMemoryIndex,
430        delta: IntoPages,
431    ) -> Result<Pages, MemoryError>
432    where
433        IntoPages: Into<Pages>,
434    {
435        let mem = *self
436            .memories
437            .get(memory_index)
438            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
439        mem.get_mut(self.context_mut()).grow(delta.into())
440    }
441
442    /// Grow imported memory by the specified amount of pages.
443    ///
444    /// Returns `None` if memory can't be grown by the specified amount
445    /// of pages.
446    ///
447    /// # Safety
448    /// This and `imported_memory_size` are currently unsafe because they
449    /// dereference the memory import's pointers.
450    pub(crate) unsafe fn imported_memory_grow<IntoPages>(
451        &mut self,
452        memory_index: MemoryIndex,
453        delta: IntoPages,
454    ) -> Result<Pages, MemoryError>
455    where
456        IntoPages: Into<Pages>,
457    {
458        let import = self.imported_memory(memory_index);
459        let mem = import.handle;
460        mem.get_mut(self.context_mut()).grow(delta.into())
461    }
462
463    /// Returns the number of allocated wasm pages.
464    pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
465        let mem = *self
466            .memories
467            .get(memory_index)
468            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
469        mem.get(self.context()).size()
470    }
471
472    /// Returns the number of allocated wasm pages in an imported memory.
473    ///
474    /// # Safety
475    /// This and `imported_memory_grow` are currently unsafe because they
476    /// dereference the memory import's pointers.
477    pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
478        let import = self.imported_memory(memory_index);
479        let mem = import.handle;
480        mem.get(self.context()).size()
481    }
482
483    /// Returns the number of elements in a given table.
484    pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
485        let table = self
486            .tables
487            .get(table_index)
488            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
489        table.get(self.context()).size()
490    }
491
492    /// Returns the number of elements in a given imported table.
493    ///
494    /// # Safety
495    /// `table_index` must be a valid, imported table index.
496    pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
497        let import = self.imported_table(table_index);
498        let table = import.handle;
499        table.get(self.context()).size()
500    }
501
502    /// Grow table by the specified amount of elements.
503    ///
504    /// Returns `None` if table can't be grown by the specified amount
505    /// of elements.
506    pub(crate) fn table_grow(
507        &mut self,
508        table_index: LocalTableIndex,
509        delta: u32,
510        init_value: TableElement,
511    ) -> Option<u32> {
512        let table = *self
513            .tables
514            .get(table_index)
515            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
516        table.get_mut(self.context_mut()).grow(delta, init_value)
517    }
518
519    /// Grow table by the specified amount of elements.
520    ///
521    /// # Safety
522    /// `table_index` must be a valid, imported table index.
523    pub(crate) unsafe fn imported_table_grow(
524        &mut self,
525        table_index: TableIndex,
526        delta: u32,
527        init_value: TableElement,
528    ) -> Option<u32> {
529        let import = self.imported_table(table_index);
530        let table = import.handle;
531        table.get_mut(self.context_mut()).grow(delta, init_value)
532    }
533
534    /// Get table element by index.
535    pub(crate) fn table_get(
536        &self,
537        table_index: LocalTableIndex,
538        index: u32,
539    ) -> Option<TableElement> {
540        let table = self
541            .tables
542            .get(table_index)
543            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
544        table.get(self.context()).get(index)
545    }
546
547    /// Returns the element at the given index.
548    ///
549    /// # Safety
550    /// `table_index` must be a valid, imported table index.
551    pub(crate) unsafe fn imported_table_get(
552        &self,
553        table_index: TableIndex,
554        index: u32,
555    ) -> Option<TableElement> {
556        let import = self.imported_table(table_index);
557        let table = import.handle;
558        table.get(self.context()).get(index)
559    }
560
561    /// Set table element by index.
562    pub(crate) fn table_set(
563        &mut self,
564        table_index: LocalTableIndex,
565        index: u32,
566        val: TableElement,
567    ) -> Result<(), Trap> {
568        let table = *self
569            .tables
570            .get(table_index)
571            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
572        table.get_mut(self.context_mut()).set(index, val)
573    }
574
575    /// Set table element by index for an imported table.
576    ///
577    /// # Safety
578    /// `table_index` must be a valid, imported table index.
579    pub(crate) unsafe fn imported_table_set(
580        &mut self,
581        table_index: TableIndex,
582        index: u32,
583        val: TableElement,
584    ) -> Result<(), Trap> {
585        let import = self.imported_table(table_index);
586        let table = import.handle;
587        table.get_mut(self.context_mut()).set(index, val)
588    }
589
590    /// Get a `VMFuncRef` for the given `FunctionIndex`.
591    pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
592        if function_index == FunctionIndex::reserved_value() {
593            None
594        } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
595            Some(VMFuncRef(NonNull::from(
596                &self.funcrefs[local_function_index],
597            )))
598        } else {
599            Some(VMFuncRef(self.imported_funcrefs[function_index]))
600        }
601    }
602
603    /// The `table.init` operation: initializes a portion of a table with a
604    /// passive element.
605    ///
606    /// # Errors
607    ///
608    /// Returns a `Trap` error when the range within the table is out of bounds
609    /// or the range within the passive element is out of bounds.
610    pub(crate) fn table_init(
611        &mut self,
612        table_index: TableIndex,
613        elem_index: ElemIndex,
614        dst: u32,
615        src: u32,
616        len: u32,
617    ) -> Result<(), Trap> {
618        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
619
620        let table = self.get_table_handle(table_index);
621        let table = unsafe { table.get_mut(&mut *self.context) };
622        let passive_elements = self.passive_elements.borrow();
623        let elem = passive_elements
624            .get(&elem_index)
625            .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
626
627        if src
628            .checked_add(len)
629            .map_or(true, |n| n as usize > elem.len())
630            || dst.checked_add(len).map_or(true, |m| m > table.size())
631        {
632            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
633        }
634
635        for (dst, src) in (dst..dst + len).zip(src..src + len) {
636            table
637                .set(dst, TableElement::FuncRef(elem[src as usize]))
638                .expect("should never panic because we already did the bounds check above");
639        }
640
641        Ok(())
642    }
643
644    /// The `table.fill` operation: fills a portion of a table with a given value.
645    ///
646    /// # Errors
647    ///
648    /// Returns a `Trap` error when the range within the table is out of bounds
649    pub(crate) fn table_fill(
650        &mut self,
651        table_index: TableIndex,
652        start_index: u32,
653        item: TableElement,
654        len: u32,
655    ) -> Result<(), Trap> {
656        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
657
658        let table = self.get_table(table_index);
659        let table_size = table.size() as usize;
660
661        if start_index
662            .checked_add(len)
663            .map_or(true, |n| n as usize > table_size)
664        {
665            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
666        }
667
668        for i in start_index..(start_index + len) {
669            table
670                .set(i, item.clone())
671                .expect("should never panic because we already did the bounds check above");
672        }
673
674        Ok(())
675    }
676
677    /// Drop an element.
678    pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
679        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
680
681        let mut passive_elements = self.passive_elements.borrow_mut();
682        passive_elements.remove(&elem_index);
683        // Note that we don't check that we actually removed an element because
684        // dropping a non-passive element is a no-op (not a trap).
685    }
686
687    /// Do a `memory.copy` for a locally defined memory.
688    ///
689    /// # Errors
690    ///
691    /// Returns a `Trap` error when the source or destination ranges are out of
692    /// bounds.
693    pub(crate) fn local_memory_copy(
694        &self,
695        memory_index: LocalMemoryIndex,
696        dst: u32,
697        src: u32,
698        len: u32,
699    ) -> Result<(), Trap> {
700        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-memory-copy
701
702        let memory = self.memory(memory_index);
703        // The following memory copy is not synchronized and is not atomic:
704        unsafe { memory_copy(&memory, dst, src, len) }
705    }
706
707    /// Perform a `memory.copy` on an imported memory.
708    pub(crate) fn imported_memory_copy(
709        &self,
710        memory_index: MemoryIndex,
711        dst: u32,
712        src: u32,
713        len: u32,
714    ) -> Result<(), Trap> {
715        let import = self.imported_memory(memory_index);
716        let memory = unsafe { import.definition.as_ref() };
717        // The following memory copy is not synchronized and is not atomic:
718        unsafe { memory_copy(memory, dst, src, len) }
719    }
720
721    /// Perform the `memory.fill` operation on a locally defined memory.
722    ///
723    /// # Errors
724    ///
725    /// Returns a `Trap` error if the memory range is out of bounds.
726    pub(crate) fn local_memory_fill(
727        &self,
728        memory_index: LocalMemoryIndex,
729        dst: u32,
730        val: u32,
731        len: u32,
732    ) -> Result<(), Trap> {
733        let memory = self.memory(memory_index);
734        // The following memory fill is not synchronized and is not atomic:
735        unsafe { memory_fill(&memory, dst, val, len) }
736    }
737
738    /// Perform the `memory.fill` operation on an imported memory.
739    ///
740    /// # Errors
741    ///
742    /// Returns a `Trap` error if the memory range is out of bounds.
743    pub(crate) fn imported_memory_fill(
744        &self,
745        memory_index: MemoryIndex,
746        dst: u32,
747        val: u32,
748        len: u32,
749    ) -> Result<(), Trap> {
750        let import = self.imported_memory(memory_index);
751        let memory = unsafe { import.definition.as_ref() };
752        // The following memory fill is not synchronized and is not atomic:
753        unsafe { memory_fill(memory, dst, val, len) }
754    }
755
756    /// Performs the `memory.init` operation.
757    ///
758    /// # Errors
759    ///
760    /// Returns a `Trap` error if the destination range is out of this module's
761    /// memory's bounds or if the source range is outside the data segment's
762    /// bounds.
763    pub(crate) fn memory_init(
764        &self,
765        memory_index: MemoryIndex,
766        data_index: DataIndex,
767        dst: u32,
768        src: u32,
769        len: u32,
770    ) -> Result<(), Trap> {
771        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-memory-init
772
773        let memory = self.get_vmmemory(memory_index);
774        let passive_data = self.passive_data.borrow();
775        let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
776
777        let current_length = unsafe { memory.vmmemory().as_ref().current_length };
778        if src
779            .checked_add(len)
780            .map_or(true, |n| n as usize > data.len())
781            || dst
782                .checked_add(len)
783                .map_or(true, |m| usize::try_from(m).unwrap() > current_length)
784        {
785            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
786        }
787        let src_slice = &data[src as usize..(src + len) as usize];
788        unsafe { memory.initialize_with_data(dst as usize, src_slice) }
789    }
790
791    /// Drop the given data segment, truncating its length to zero.
792    pub(crate) fn data_drop(&self, data_index: DataIndex) {
793        let mut passive_data = self.passive_data.borrow_mut();
794        passive_data.remove(&data_index);
795    }
796
797    /// Get a table by index regardless of whether it is locally-defined or an
798    /// imported, foreign table.
799    pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
800        if let Some(local_table_index) = self.module.local_table_index(table_index) {
801            self.get_local_table(local_table_index)
802        } else {
803            self.get_foreign_table(table_index)
804        }
805    }
806
807    /// Get a locally-defined table.
808    pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
809        let table = self.tables[index];
810        table.get_mut(self.context_mut())
811    }
812
813    /// Get an imported, foreign table.
814    pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
815        let import = self.imported_table(index);
816        let table = import.handle;
817        table.get_mut(self.context_mut())
818    }
819
820    /// Get a table handle by index regardless of whether it is locally-defined
821    /// or an imported, foreign table.
822    pub(crate) fn get_table_handle(
823        &mut self,
824        table_index: TableIndex,
825    ) -> InternalStoreHandle<VMTable> {
826        if let Some(local_table_index) = self.module.local_table_index(table_index) {
827            self.tables[local_table_index]
828        } else {
829            self.imported_table(table_index).handle
830        }
831    }
832
833    fn memory_wait(memory: &mut VMMemory, dst: u32, timeout: i64) -> Result<u32, Trap> {
834        let location = NotifyLocation { address: dst };
835        let timeout = if timeout < 0 {
836            None
837        } else {
838            Some(std::time::Duration::from_nanos(timeout as u64))
839        };
840        match memory.do_wait(location, timeout) {
841            Ok(count) => Ok(count),
842            Err(_err) => {
843                // ret is None if there is more than 2^32 waiter in queue or some other error
844                Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
845            }
846        }
847    }
848
849    /// Perform an Atomic.Wait32
850    pub(crate) fn local_memory_wait32(
851        &mut self,
852        memory_index: LocalMemoryIndex,
853        dst: u32,
854        val: u32,
855        timeout: i64,
856    ) -> Result<u32, Trap> {
857        let memory = self.memory(memory_index);
858        //if ! memory.shared {
859        // We should trap according to spec, but official test rely on not trapping...
860        //}
861
862        let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
863
864        if let Ok(mut ret) = ret {
865            if ret == 0 {
866                let memory = self.get_local_vmmemory_mut(memory_index);
867                ret = Self::memory_wait(memory, dst, timeout)?;
868            }
869            Ok(ret)
870        } else {
871            ret
872        }
873    }
874
875    /// Perform an Atomic.Wait32
876    pub(crate) fn imported_memory_wait32(
877        &mut self,
878        memory_index: MemoryIndex,
879        dst: u32,
880        val: u32,
881        timeout: i64,
882    ) -> Result<u32, Trap> {
883        let import = self.imported_memory(memory_index);
884        let memory = unsafe { import.definition.as_ref() };
885        //if ! memory.shared {
886        // We should trap according to spec, but official test rely on not trapping...
887        //}
888
889        let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
890        if let Ok(mut ret) = ret {
891            if ret == 0 {
892                let memory = self.get_vmmemory_mut(memory_index);
893                ret = Self::memory_wait(memory, dst, timeout)?;
894            }
895            Ok(ret)
896        } else {
897            ret
898        }
899    }
900
901    /// Perform an Atomic.Wait64
902    pub(crate) fn local_memory_wait64(
903        &mut self,
904        memory_index: LocalMemoryIndex,
905        dst: u32,
906        val: u64,
907        timeout: i64,
908    ) -> Result<u32, Trap> {
909        let memory = self.memory(memory_index);
910        //if ! memory.shared {
911        // We should trap according to spec, but official test rely on not trapping...
912        //}
913
914        let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
915
916        if let Ok(mut ret) = ret {
917            if ret == 0 {
918                let memory = self.get_local_vmmemory_mut(memory_index);
919                ret = Self::memory_wait(memory, dst, timeout)?;
920            }
921            Ok(ret)
922        } else {
923            ret
924        }
925    }
926
927    /// Perform an Atomic.Wait64
928    pub(crate) fn imported_memory_wait64(
929        &mut self,
930        memory_index: MemoryIndex,
931        dst: u32,
932        val: u64,
933        timeout: i64,
934    ) -> Result<u32, Trap> {
935        let import = self.imported_memory(memory_index);
936        let memory = unsafe { import.definition.as_ref() };
937        //if ! memory.shared {
938        // We should trap according to spec, but official test rely on not trapping...
939        //}
940
941        let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
942
943        if let Ok(mut ret) = ret {
944            if ret == 0 {
945                let memory = self.get_vmmemory_mut(memory_index);
946                ret = Self::memory_wait(memory, dst, timeout)?;
947            }
948            Ok(ret)
949        } else {
950            ret
951        }
952    }
953
954    /// Perform an Atomic.Notify
955    pub(crate) fn local_memory_notify(
956        &mut self,
957        memory_index: LocalMemoryIndex,
958        dst: u32,
959        count: u32,
960    ) -> Result<u32, Trap> {
961        let memory = self.get_local_vmmemory_mut(memory_index);
962        // fetch the notifier
963        let location = NotifyLocation { address: dst };
964        Ok(memory.do_notify(location, count))
965    }
966
967    /// Perform an Atomic.Notify
968    pub(crate) fn imported_memory_notify(
969        &mut self,
970        memory_index: MemoryIndex,
971        dst: u32,
972        count: u32,
973    ) -> Result<u32, Trap> {
974        let memory = self.get_vmmemory_mut(memory_index);
975        // fetch the notifier
976        let location = NotifyLocation { address: dst };
977        Ok(memory.do_notify(location, count))
978    }
979}
980
981/// A handle holding an `Instance` of a WebAssembly module.
982///
983/// This is more or less a public facade of the private `Instance`,
984/// providing useful higher-level API.
985#[derive(Debug, Eq, PartialEq)]
986pub struct VMInstance {
987    /// The layout of `Instance` (which can vary).
988    instance_layout: Layout,
989
990    /// The `Instance` itself.
991    ///
992    /// `Instance` must not be dropped manually by Rust, because it's
993    /// allocated manually with `alloc` and a specific layout (Rust
994    /// would be able to drop `Instance` itself but it will imply a
995    /// memory leak because of `alloc`).
996    ///
997    /// No one in the code has a copy of the `Instance`'s
998    /// pointer. `Self` is the only one.
999    instance: NonNull<Instance>,
1000}
1001
1002/// VMInstance are created with an InstanceAllocator
1003/// and it will "consume" the memory
1004/// So the Drop here actualy free it (else it would be leaked)
1005impl Drop for VMInstance {
1006    fn drop(&mut self) {
1007        let instance_ptr = self.instance.as_ptr();
1008
1009        unsafe {
1010            // Need to drop all the actual Instance members
1011            instance_ptr.drop_in_place();
1012            // And then free the memory allocated for the Instance itself
1013            std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1014        }
1015    }
1016}
1017
1018impl VMInstance {
1019    /// Create a new `VMInstance` pointing at a new [`Instance`].
1020    ///
1021    /// # Safety
1022    ///
1023    /// This method is not necessarily inherently unsafe to call, but in general
1024    /// the APIs of an `Instance` are quite unsafe and have not been really
1025    /// audited for safety that much. As a result the unsafety here on this
1026    /// method is a low-overhead way of saying “this is an extremely unsafe type
1027    /// to work with”.
1028    ///
1029    /// Extreme care must be taken when working with `VMInstance` and it's
1030    /// recommended to have relatively intimate knowledge of how it works
1031    /// internally if you'd like to do so. If possible it's recommended to use
1032    /// the `wasmer` crate API rather than this type since that is vetted for
1033    /// safety.
1034    ///
1035    /// However the following must be taken care of before calling this function:
1036    /// - The memory at `instance.tables_ptr()` must be initialized with data for
1037    ///   all the local tables.
1038    /// - The memory at `instance.memories_ptr()` must be initialized with data for
1039    ///   all the local memories.
1040    #[allow(clippy::too_many_arguments)]
1041    pub unsafe fn new(
1042        allocator: InstanceAllocator,
1043        module: Arc<ModuleInfo>,
1044        context: &mut StoreObjects,
1045        finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1046        finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1047        finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1048        finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1049        finished_tags: BoxedSlice<LocalTagIndex, InternalStoreHandle<VMTag>>,
1050        finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1051        imports: Imports,
1052        vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1053    ) -> Result<Self, Trap> {
1054        let vmctx_globals = finished_globals
1055            .values()
1056            .map(|m| m.get(context).vmglobal())
1057            .collect::<PrimaryMap<LocalGlobalIndex, _>>()
1058            .into_boxed_slice();
1059        let passive_data = RefCell::new(
1060            module
1061                .passive_data
1062                .clone()
1063                .into_iter()
1064                .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1065                .collect::<HashMap<_, _>>(),
1066        );
1067
1068        let handle = {
1069            let offsets = allocator.offsets().clone();
1070            // use dummy value to create an instance so we can get the vmctx pointer
1071            let funcrefs = PrimaryMap::new().into_boxed_slice();
1072            let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1073            // Create the `Instance`. The unique, the One.
1074            let instance = Instance {
1075                module,
1076                context,
1077                offsets,
1078                memories: finished_memories,
1079                tables: finished_tables,
1080                tags: finished_tags,
1081                globals: finished_globals,
1082                functions: finished_functions,
1083                function_call_trampolines: finished_function_call_trampolines,
1084                passive_elements: Default::default(),
1085                passive_data,
1086                funcrefs,
1087                imported_funcrefs,
1088                vmctx: VMContext {},
1089            };
1090
1091            let mut instance_handle = allocator.into_vminstance(instance);
1092
1093            // Set the funcrefs after we've built the instance
1094            {
1095                let instance = instance_handle.instance_mut();
1096                let vmctx_ptr = instance.vmctx_ptr();
1097                (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1098                    &instance.module,
1099                    context,
1100                    &imports,
1101                    &instance.functions,
1102                    &vmshared_signatures,
1103                    &instance.function_call_trampolines,
1104                    vmctx_ptr,
1105                );
1106            }
1107
1108            instance_handle
1109        };
1110        let instance = handle.instance();
1111
1112        ptr::copy(
1113            vmshared_signatures.values().as_slice().as_ptr(),
1114            instance.signature_ids_ptr(),
1115            vmshared_signatures.len(),
1116        );
1117        ptr::copy(
1118            imports.functions.values().as_slice().as_ptr(),
1119            instance.imported_functions_ptr(),
1120            imports.functions.len(),
1121        );
1122        ptr::copy(
1123            imports.tables.values().as_slice().as_ptr(),
1124            instance.imported_tables_ptr(),
1125            imports.tables.len(),
1126        );
1127        ptr::copy(
1128            imports.memories.values().as_slice().as_ptr(),
1129            instance.imported_memories_ptr(),
1130            imports.memories.len(),
1131        );
1132        ptr::copy(
1133            imports.globals.values().as_slice().as_ptr(),
1134            instance.imported_globals_ptr(),
1135            imports.globals.len(),
1136        );
1137        // these should already be set, add asserts here? for:
1138        // - instance.tables_ptr() as *mut VMTableDefinition
1139        // - instance.memories_ptr() as *mut VMMemoryDefinition
1140        ptr::copy(
1141            vmctx_globals.values().as_slice().as_ptr(),
1142            instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1143            vmctx_globals.len(),
1144        );
1145        ptr::write(
1146            instance.builtin_functions_ptr(),
1147            VMBuiltinFunctionsArray::initialized(),
1148        );
1149
1150        // Perform infallible initialization in this constructor, while fallible
1151        // initialization is deferred to the `initialize` method.
1152        initialize_passive_elements(instance);
1153        initialize_globals(instance);
1154
1155        Ok(handle)
1156    }
1157
1158    /// Return a reference to the contained `Instance`.
1159    pub(crate) fn instance(&self) -> &Instance {
1160        unsafe { self.instance.as_ref() }
1161    }
1162
1163    /// Return a mutable reference to the contained `Instance`.
1164    pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1165        unsafe { self.instance.as_mut() }
1166    }
1167
1168    /// Finishes the instantiation process started by `Instance::new`.
1169    ///
1170    /// # Safety
1171    ///
1172    /// Only safe to call immediately after instantiation.
1173    pub unsafe fn finish_instantiation(
1174        &mut self,
1175        config: &VMConfig,
1176        trap_handler: Option<*const TrapHandlerFn<'static>>,
1177        data_initializers: &[DataInitializer<'_>],
1178    ) -> Result<(), Trap> {
1179        let instance = self.instance_mut();
1180
1181        // Apply the initializers.
1182        initialize_tables(instance)?;
1183        initialize_memories(instance, data_initializers)?;
1184
1185        // The WebAssembly spec specifies that the start function is
1186        // invoked automatically at instantiation time.
1187        instance.invoke_start_function(config, trap_handler)?;
1188        Ok(())
1189    }
1190
1191    /// Return a reference to the vmctx used by compiled wasm code.
1192    pub fn vmctx(&self) -> &VMContext {
1193        self.instance().vmctx()
1194    }
1195
1196    /// Return a raw pointer to the vmctx used by compiled wasm code.
1197    pub fn vmctx_ptr(&self) -> *mut VMContext {
1198        self.instance().vmctx_ptr()
1199    }
1200
1201    /// Return a reference to the `VMOffsets` to get offsets in the
1202    /// `Self::vmctx_ptr` region. Be careful when doing pointer
1203    /// arithmetic!
1204    pub fn vmoffsets(&self) -> &VMOffsets {
1205        self.instance().offsets()
1206    }
1207
1208    /// Return a reference-counting pointer to a module.
1209    pub fn module(&self) -> &Arc<ModuleInfo> {
1210        self.instance().module()
1211    }
1212
1213    /// Return a reference to a module.
1214    pub fn module_ref(&self) -> &ModuleInfo {
1215        self.instance().module_ref()
1216    }
1217
1218    /// Lookup an export with the given name.
1219    pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1220        let export = *self.module_ref().exports.get(field)?;
1221
1222        Some(self.lookup_by_declaration(export))
1223    }
1224
1225    /// Lookup an export with the given export declaration.
1226    pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1227        let instance = self.instance();
1228
1229        match export {
1230            ExportIndex::Function(index) => {
1231                let sig_index = &instance.module.functions[index];
1232                let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1233                    // A VMFunction is lazily created only for functions that are
1234                    // exported.
1235                    let signature = instance.module.signatures[*sig_index].clone();
1236                    let vm_function = VMFunction {
1237                        anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1238                            &instance.funcrefs[def_index],
1239                        )),
1240                        signature,
1241                        // Any function received is already static at this point as:
1242                        // 1. All locally defined functions in the Wasm have a static signature.
1243                        // 2. All the imported functions are already static (because
1244                        //    they point to the trampolines rather than the dynamic addresses).
1245                        kind: VMFunctionKind::Static,
1246                        host_data: Box::new(()),
1247                    };
1248                    InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1249                } else {
1250                    let import = instance.imported_function(index);
1251                    import.handle
1252                };
1253
1254                VMExtern::Function(handle)
1255            }
1256            ExportIndex::Table(index) => {
1257                let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1258                    instance.tables[def_index]
1259                } else {
1260                    let import = instance.imported_table(index);
1261                    import.handle
1262                };
1263                VMExtern::Table(handle)
1264            }
1265            ExportIndex::Memory(index) => {
1266                let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1267                    instance.memories[def_index]
1268                } else {
1269                    let import = instance.imported_memory(index);
1270                    import.handle
1271                };
1272                VMExtern::Memory(handle)
1273            }
1274            ExportIndex::Global(index) => {
1275                let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1276                    instance.globals[def_index]
1277                } else {
1278                    let import = instance.imported_global(index);
1279                    import.handle
1280                };
1281                VMExtern::Global(handle)
1282            }
1283
1284            ExportIndex::Tag(index) => {
1285                let handle = if let Some(def_index) = instance.module.local_tag_index(index) {
1286                    instance.tags[def_index]
1287                } else {
1288                    let import = instance.imported_tag(index);
1289                    import.handle
1290                };
1291                VMExtern::Tag(handle)
1292            }
1293        }
1294    }
1295
1296    /// Return an iterator over the exports of this instance.
1297    ///
1298    /// Specifically, it provides access to the key-value pairs, where the keys
1299    /// are export names, and the values are export declarations which can be
1300    /// resolved `lookup_by_declaration`.
1301    pub fn exports(&self) -> indexmap::map::Iter<String, ExportIndex> {
1302        self.module().exports.iter()
1303    }
1304
1305    /// Return the memory index for the given `VMMemoryDefinition` in this instance.
1306    pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1307        self.instance().memory_index(memory)
1308    }
1309
1310    /// Grow memory in this instance by the specified amount of pages.
1311    ///
1312    /// Returns `None` if memory can't be grown by the specified amount
1313    /// of pages.
1314    pub fn memory_grow<IntoPages>(
1315        &mut self,
1316        memory_index: LocalMemoryIndex,
1317        delta: IntoPages,
1318    ) -> Result<Pages, MemoryError>
1319    where
1320        IntoPages: Into<Pages>,
1321    {
1322        self.instance_mut().memory_grow(memory_index, delta)
1323    }
1324
1325    /// Return the table index for the given `VMTableDefinition` in this instance.
1326    pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1327        self.instance().table_index(table)
1328    }
1329
1330    /// Grow table in this instance by the specified amount of pages.
1331    ///
1332    /// Returns `None` if memory can't be grown by the specified amount
1333    /// of pages.
1334    pub fn table_grow(
1335        &mut self,
1336        table_index: LocalTableIndex,
1337        delta: u32,
1338        init_value: TableElement,
1339    ) -> Option<u32> {
1340        self.instance_mut()
1341            .table_grow(table_index, delta, init_value)
1342    }
1343
1344    /// Get table element reference.
1345    ///
1346    /// Returns `None` if index is out of bounds.
1347    pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1348        self.instance().table_get(table_index, index)
1349    }
1350
1351    /// Set table element reference.
1352    ///
1353    /// Returns an error if the index is out of bounds
1354    pub fn table_set(
1355        &mut self,
1356        table_index: LocalTableIndex,
1357        index: u32,
1358        val: TableElement,
1359    ) -> Result<(), Trap> {
1360        self.instance_mut().table_set(table_index, index, val)
1361    }
1362
1363    /// Get a table defined locally within this module.
1364    pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1365        self.instance_mut().get_local_table(index)
1366    }
1367}
1368
1369/// Compute the offset for a memory data initializer.
1370fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1371    let mut start = init.location.offset;
1372
1373    if let Some(base) = init.location.base {
1374        let val = unsafe {
1375            if let Some(def_index) = instance.module.local_global_index(base) {
1376                instance.global(def_index).val.u32
1377            } else {
1378                instance.imported_global(base).definition.as_ref().val.u32
1379            }
1380        };
1381        start += usize::try_from(val).unwrap();
1382    }
1383
1384    start
1385}
1386
1387#[allow(clippy::mut_from_ref)]
1388#[allow(dead_code)]
1389/// Return a byte-slice view of a memory's data.
1390unsafe fn get_memory_slice<'instance>(
1391    init: &DataInitializer<'_>,
1392    instance: &'instance Instance,
1393) -> &'instance mut [u8] {
1394    let memory = if let Some(local_memory_index) = instance
1395        .module
1396        .local_memory_index(init.location.memory_index)
1397    {
1398        instance.memory(local_memory_index)
1399    } else {
1400        let import = instance.imported_memory(init.location.memory_index);
1401        *import.definition.as_ref()
1402    };
1403    slice::from_raw_parts_mut(memory.base, memory.current_length)
1404}
1405
1406/// Compute the offset for a table element initializer.
1407fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1408    let mut start = init.offset;
1409
1410    if let Some(base) = init.base {
1411        let val = unsafe {
1412            if let Some(def_index) = instance.module.local_global_index(base) {
1413                instance.global(def_index).val.u32
1414            } else {
1415                instance.imported_global(base).definition.as_ref().val.u32
1416            }
1417        };
1418        start += usize::try_from(val).unwrap();
1419    }
1420
1421    start
1422}
1423
1424/// Initialize the table memory from the provided initializers.
1425fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1426    let module = Arc::clone(&instance.module);
1427    for init in &module.table_initializers {
1428        let start = get_table_init_start(init, instance);
1429        let table = instance.get_table_handle(init.table_index);
1430        let table = unsafe { table.get_mut(&mut *instance.context) };
1431
1432        if start
1433            .checked_add(init.elements.len())
1434            .map_or(true, |end| end > table.size() as usize)
1435        {
1436            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1437        }
1438
1439        if let wasmer_types::Type::FuncRef = table.ty().ty {
1440            for (i, func_idx) in init.elements.iter().enumerate() {
1441                let anyfunc = instance.func_ref(*func_idx);
1442                table
1443                    .set(
1444                        u32::try_from(start + i).unwrap(),
1445                        TableElement::FuncRef(anyfunc),
1446                    )
1447                    .unwrap();
1448            }
1449        } else {
1450            for i in 0..init.elements.len() {
1451                table
1452                    .set(
1453                        u32::try_from(start + i).unwrap(),
1454                        TableElement::ExternRef(None),
1455                    )
1456                    .unwrap();
1457            }
1458        }
1459    }
1460
1461    Ok(())
1462}
1463
1464/// Initialize the `Instance::passive_elements` map by resolving the
1465/// `ModuleInfo::passive_elements`'s `FunctionIndex`s into `VMCallerCheckedAnyfunc`s for
1466/// this instance.
1467fn initialize_passive_elements(instance: &Instance) {
1468    let mut passive_elements = instance.passive_elements.borrow_mut();
1469    debug_assert!(
1470        passive_elements.is_empty(),
1471        "should only be called once, at initialization time"
1472    );
1473
1474    passive_elements.extend(
1475        instance
1476            .module
1477            .passive_elements
1478            .iter()
1479            .filter(|(_, segments)| !segments.is_empty())
1480            .map(|(idx, segments)| {
1481                (
1482                    *idx,
1483                    segments.iter().map(|s| instance.func_ref(*s)).collect(),
1484                )
1485            }),
1486    );
1487}
1488
1489/// Initialize the table memory from the provided initializers.
1490fn initialize_memories(
1491    instance: &mut Instance,
1492    data_initializers: &[DataInitializer<'_>],
1493) -> Result<(), Trap> {
1494    for init in data_initializers {
1495        let memory = instance.get_vmmemory(init.location.memory_index);
1496
1497        let start = get_memory_init_start(init, instance);
1498        unsafe {
1499            let current_length = memory.vmmemory().as_ref().current_length;
1500            if start
1501                .checked_add(init.data.len())
1502                .map_or(true, |end| end > current_length)
1503            {
1504                return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1505            }
1506            memory.initialize_with_data(start, init.data)?;
1507        }
1508    }
1509
1510    Ok(())
1511}
1512
1513fn initialize_globals(instance: &Instance) {
1514    let module = Arc::clone(&instance.module);
1515    for (index, initializer) in module.global_initializers.iter() {
1516        unsafe {
1517            let to = instance.global_ptr(index).as_ptr();
1518            match initializer {
1519                GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1520                GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1521                GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1522                GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1523                GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1524                GlobalInit::GetGlobal(x) => {
1525                    let from: VMGlobalDefinition =
1526                        if let Some(def_x) = module.local_global_index(*x) {
1527                            instance.global(def_x)
1528                        } else {
1529                            instance.imported_global(*x).definition.as_ref().clone()
1530                        };
1531                    *to = from;
1532                }
1533                GlobalInit::RefNullConst => (*to).val.funcref = 0,
1534                GlobalInit::RefFunc(func_idx) => {
1535                    let funcref = instance.func_ref(*func_idx).unwrap();
1536                    (*to).val = funcref.into_raw();
1537                }
1538            }
1539        }
1540    }
1541}
1542
1543/// Eagerly builds all the `VMFuncRef`s for imported and local functions so that all
1544/// future funcref operations are just looking up this data.
1545fn build_funcrefs(
1546    module_info: &ModuleInfo,
1547    ctx: &StoreObjects,
1548    imports: &Imports,
1549    finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1550    vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1551    function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1552    vmctx_ptr: *mut VMContext,
1553) -> (
1554    BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1555    BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1556) {
1557    let mut func_refs =
1558        PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1559    let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1560
1561    // do imported functions
1562    for import in imports.functions.values() {
1563        imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1564    }
1565
1566    // do local functions
1567    for (local_index, func_ptr) in finished_functions.iter() {
1568        let index = module_info.func_index(local_index);
1569        let sig_index = module_info.functions[index];
1570        let type_index = vmshared_signatures[sig_index];
1571        let call_trampoline = function_call_trampolines[sig_index];
1572        let anyfunc = VMCallerCheckedAnyfunc {
1573            func_ptr: func_ptr.0,
1574            type_index,
1575            vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1576            call_trampoline,
1577        };
1578        func_refs.push(anyfunc);
1579    }
1580    (
1581        func_refs.into_boxed_slice(),
1582        imported_func_refs.into_boxed_slice(),
1583    )
1584}