use crate::export::Export;
use crate::externref::{StackMapRegistry, VMExternRefActivationsTable};
use crate::imports::Imports;
use crate::memory::{DefaultMemoryCreator, RuntimeLinearMemory, RuntimeMemoryCreator};
use crate::table::{Table, TableElement};
use crate::traphandlers::Trap;
use crate::vmcontext::{
VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMFunctionImport,
VMGlobalDefinition, VMGlobalImport, VMInterrupts, VMMemoryDefinition, VMMemoryImport,
VMSharedSignatureIndex, VMTableDefinition, VMTableImport,
};
use crate::{ExportFunction, ExportGlobal, ExportMemory, ExportTable};
use memoffset::offset_of;
use more_asserts::assert_lt;
use std::alloc::{self, Layout};
use std::any::Any;
use std::cell::RefCell;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::ptr::NonNull;
use std::sync::Arc;
use std::{mem, ptr, slice};
use thiserror::Error;
use wasmtime_environ::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
use wasmtime_environ::wasm::{
DataIndex, DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex,
ElemIndex, FuncIndex, GlobalIndex, GlobalInit, MemoryIndex, SignatureIndex, TableElementType,
TableIndex, WasmType,
};
use wasmtime_environ::{ir, DataInitializer, EntityIndex, Module, TableElements, VMOffsets};
#[repr(C)]
pub(crate) struct Instance {
module: Arc<Module>,
code: Arc<dyn Any>,
offsets: VMOffsets,
memories: BoxedSlice<DefinedMemoryIndex, Box<dyn RuntimeLinearMemory>>,
tables: BoxedSlice<DefinedTableIndex, Table>,
passive_elements: RefCell<HashMap<ElemIndex, Box<[*mut VMCallerCheckedAnyfunc]>>>,
passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
host_state: Box<dyn Any>,
vmctx: VMContext,
}
#[allow(clippy::cast_ptr_alignment)]
impl Instance {
unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
(self.vmctx_ptr() as *mut u8)
.add(usize::try_from(offset).unwrap())
.cast()
}
fn signature_id(&self, index: SignatureIndex) -> VMSharedSignatureIndex {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { *self.signature_ids_ptr().add(index) }
}
pub(crate) fn module(&self) -> &Module {
&self.module
}
fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
}
fn imported_function(&self, index: FuncIndex) -> &VMFunctionImport {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { &*self.imported_functions_ptr().add(index) }
}
fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
}
fn imported_table(&self, index: TableIndex) -> &VMTableImport {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { &*self.imported_tables_ptr().add(index) }
}
fn imported_tables_ptr(&self) -> *mut VMTableImport {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
}
fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { &*self.imported_memories_ptr().add(index) }
}
fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
}
fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { &*self.imported_globals_ptr().add(index) }
}
fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
}
#[allow(dead_code)]
fn table(&self, index: DefinedTableIndex) -> VMTableDefinition {
unsafe { *self.table_ptr(index) }
}
fn set_table(&self, index: DefinedTableIndex, table: VMTableDefinition) {
unsafe {
*self.table_ptr(index) = table;
}
}
fn table_ptr(&self, index: DefinedTableIndex) -> *mut VMTableDefinition {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { self.tables_ptr().add(index) }
}
fn tables_ptr(&self) -> *mut VMTableDefinition {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
}
pub(crate) fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
if let Some(defined_index) = self.module.defined_memory_index(index) {
self.memory(defined_index)
} else {
let import = self.imported_memory(index);
*unsafe { import.from.as_ref().unwrap() }
}
}
fn memory(&self, index: DefinedMemoryIndex) -> VMMemoryDefinition {
unsafe { *self.memory_ptr(index) }
}
fn set_memory(&self, index: DefinedMemoryIndex, mem: VMMemoryDefinition) {
unsafe {
*self.memory_ptr(index) = mem;
}
}
fn memory_ptr(&self, index: DefinedMemoryIndex) -> *mut VMMemoryDefinition {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { self.memories_ptr().add(index) }
}
fn memories_ptr(&self) -> *mut VMMemoryDefinition {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
}
fn global(&self, index: DefinedGlobalIndex) -> VMGlobalDefinition {
unsafe { *self.global_ptr(index) }
}
#[allow(dead_code)]
fn set_global(&self, index: DefinedGlobalIndex, global: VMGlobalDefinition) {
unsafe {
*self.global_ptr(index) = global;
}
}
fn global_ptr(&self, index: DefinedGlobalIndex) -> *mut VMGlobalDefinition {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { self.globals_ptr().add(index) }
}
pub(crate) fn defined_or_imported_global_ptr(
&self,
index: GlobalIndex,
) -> *mut VMGlobalDefinition {
if let Some(index) = self.module.defined_global_index(index) {
self.global_ptr(index)
} else {
self.imported_global(index).from
}
}
fn globals_ptr(&self) -> *mut VMGlobalDefinition {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
}
fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
}
pub fn interrupts(&self) -> *mut *const VMInterrupts {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_interrupts()) }
}
pub fn externref_activations_table(&self) -> *mut *mut VMExternRefActivationsTable {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_externref_activations_table()) }
}
pub fn stack_map_registry(&self) -> *mut *mut StackMapRegistry {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_stack_map_registry()) }
}
pub fn vmctx(&self) -> &VMContext {
&self.vmctx
}
pub fn vmctx_ptr(&self) -> *mut VMContext {
self.vmctx() as *const VMContext as *mut VMContext
}
pub fn lookup(&self, field: &str) -> Option<Export> {
let export = if let Some(export) = self.module.exports.get(field) {
export.clone()
} else {
return None;
};
Some(self.lookup_by_declaration(&export))
}
pub fn lookup_by_declaration(&self, export: &EntityIndex) -> Export {
match export {
EntityIndex::Function(index) => {
let anyfunc = self.get_caller_checked_anyfunc(*index).unwrap();
let anyfunc =
NonNull::new(anyfunc as *const VMCallerCheckedAnyfunc as *mut _).unwrap();
ExportFunction { anyfunc }.into()
}
EntityIndex::Table(index) => {
let (definition, vmctx) =
if let Some(def_index) = self.module.defined_table_index(*index) {
(self.table_ptr(def_index), self.vmctx_ptr())
} else {
let import = self.imported_table(*index);
(import.from, import.vmctx)
};
ExportTable {
definition,
vmctx,
table: self.module.table_plans[*index].clone(),
}
.into()
}
EntityIndex::Memory(index) => {
let (definition, vmctx) =
if let Some(def_index) = self.module.defined_memory_index(*index) {
(self.memory_ptr(def_index), self.vmctx_ptr())
} else {
let import = self.imported_memory(*index);
(import.from, import.vmctx)
};
ExportMemory {
definition,
vmctx,
memory: self.module.memory_plans[*index].clone(),
}
.into()
}
EntityIndex::Global(index) => ExportGlobal {
definition: if let Some(def_index) = self.module.defined_global_index(*index) {
self.global_ptr(def_index)
} else {
self.imported_global(*index).from
},
vmctx: self.vmctx_ptr(),
global: self.module.globals[*index],
}
.into(),
}
}
pub fn exports(&self) -> indexmap::map::Iter<String, EntityIndex> {
self.module.exports.iter()
}
#[inline]
pub fn host_state(&self) -> &dyn Any {
&*self.host_state
}
#[inline]
pub(crate) fn vmctx_offset() -> isize {
offset_of!(Self, vmctx) as isize
}
pub(crate) fn table_index(&self, table: &VMTableDefinition) -> DefinedTableIndex {
let offsets = &self.offsets;
let begin = unsafe {
(&self.vmctx as *const VMContext as *const u8)
.add(usize::try_from(offsets.vmctx_tables_begin()).unwrap())
} as *const VMTableDefinition;
let end: *const VMTableDefinition = table;
let index = DefinedTableIndex::new(
(end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
);
assert_lt!(index.index(), self.tables.len());
index
}
pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> DefinedMemoryIndex {
let offsets = &self.offsets;
let begin = unsafe {
(&self.vmctx as *const VMContext as *const u8)
.add(usize::try_from(offsets.vmctx_memories_begin()).unwrap())
} as *const VMMemoryDefinition;
let end: *const VMMemoryDefinition = memory;
let index = DefinedMemoryIndex::new(
(end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
);
assert_lt!(index.index(), self.memories.len());
index
}
pub(crate) fn memory_grow(&self, memory_index: DefinedMemoryIndex, delta: u32) -> Option<u32> {
let result = self
.memories
.get(memory_index)
.unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()))
.grow(delta);
self.set_memory(memory_index, self.memories[memory_index].vmmemory());
result
}
pub(crate) unsafe fn imported_memory_grow(
&self,
memory_index: MemoryIndex,
delta: u32,
) -> Option<u32> {
let import = self.imported_memory(memory_index);
let foreign_instance = (&*import.vmctx).instance();
let foreign_memory = &*import.from;
let foreign_index = foreign_instance.memory_index(foreign_memory);
foreign_instance.memory_grow(foreign_index, delta)
}
pub(crate) fn memory_size(&self, memory_index: DefinedMemoryIndex) -> u32 {
self.memories
.get(memory_index)
.unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()))
.size()
}
pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> u32 {
let import = self.imported_memory(memory_index);
let foreign_instance = (&mut *import.vmctx).instance();
let foreign_memory = &mut *import.from;
let foreign_index = foreign_instance.memory_index(foreign_memory);
foreign_instance.memory_size(foreign_index)
}
pub(crate) fn table_element_type(&self, table_index: TableIndex) -> TableElementType {
let table = self.get_table(table_index);
table.element_type()
}
pub(crate) fn table_grow(
&self,
table_index: TableIndex,
delta: u32,
init_value: TableElement,
) -> Option<u32> {
let (defined_table_index, instance) =
self.get_defined_table_index_and_instance(table_index);
instance.defined_table_grow(defined_table_index, delta, init_value)
}
fn defined_table_grow(
&self,
table_index: DefinedTableIndex,
delta: u32,
init_value: TableElement,
) -> Option<u32> {
unsafe {
let orig_size = self
.tables
.get(table_index)
.unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
.grow(delta, init_value)?;
self.set_table(table_index, self.tables[table_index].vmtable());
Some(orig_size)
}
}
pub(crate) fn defined_table_fill(
&self,
table_index: DefinedTableIndex,
dst: u32,
val: TableElement,
len: u32,
) -> Result<(), Trap> {
self.tables.get(table_index).unwrap().fill(dst, val, len)
}
fn table_get(&self, table_index: DefinedTableIndex, index: u32) -> Option<TableElement> {
self.tables
.get(table_index)
.unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
.get(index)
}
fn table_set(
&self,
table_index: DefinedTableIndex,
index: u32,
val: TableElement,
) -> Result<(), ()> {
self.tables
.get(table_index)
.unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
.set(index, val)
}
fn alloc_layout(&self) -> Layout {
let size = mem::size_of_val(self)
.checked_add(usize::try_from(self.offsets.size_of_vmctx()).unwrap())
.unwrap();
let align = mem::align_of_val(self);
Layout::from_size_align(size, align).unwrap()
}
pub(crate) fn get_caller_checked_anyfunc(
&self,
index: FuncIndex,
) -> Option<&VMCallerCheckedAnyfunc> {
if index == FuncIndex::reserved_value() {
return None;
}
Some(unsafe { &*self.anyfunc_ptr(index) })
}
unsafe fn anyfunc_ptr(&self, index: FuncIndex) -> *mut VMCallerCheckedAnyfunc {
self.vmctx_plus_offset(self.offsets.vmctx_anyfunc(index))
}
pub(crate) fn table_init(
&self,
table_index: TableIndex,
elem_index: ElemIndex,
dst: u32,
src: u32,
len: u32,
) -> Result<(), Trap> {
let table = self.get_table(table_index);
let passive_elements = self.passive_elements.borrow();
let elem = passive_elements
.get(&elem_index)
.map(|e| &**e)
.unwrap_or_else(|| &[]);
if src
.checked_add(len)
.map_or(true, |n| n as usize > elem.len())
|| dst.checked_add(len).map_or(true, |m| m > table.size())
{
return Err(Trap::wasm(ir::TrapCode::TableOutOfBounds));
}
for (dst, src) in (dst..dst + len).zip(src..src + len) {
table
.set(dst, TableElement::FuncRef(elem[src as usize]))
.expect("should never panic because we already did the bounds check above");
}
Ok(())
}
pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
let mut passive_elements = self.passive_elements.borrow_mut();
passive_elements.remove(&elem_index);
}
pub(crate) fn memory_copy(
&self,
dst_index: MemoryIndex,
dst: u32,
src_index: MemoryIndex,
src: u32,
len: u32,
) -> Result<(), Trap> {
let src_mem = self.get_memory(src_index);
let dst_mem = self.get_memory(dst_index);
if src
.checked_add(len)
.map_or(true, |n| n as usize > src_mem.current_length)
|| dst
.checked_add(len)
.map_or(true, |m| m as usize > dst_mem.current_length)
{
return Err(Trap::wasm(ir::TrapCode::HeapOutOfBounds));
}
let dst = usize::try_from(dst).unwrap();
let src = usize::try_from(src).unwrap();
unsafe {
let dst = dst_mem.base.add(dst);
let src = src_mem.base.add(src);
ptr::copy(src, dst, len as usize);
}
Ok(())
}
pub(crate) fn defined_memory_fill(
&self,
memory_index: DefinedMemoryIndex,
dst: u32,
val: u32,
len: u32,
) -> Result<(), Trap> {
let memory = self.memory(memory_index);
if dst
.checked_add(len)
.map_or(true, |m| m as usize > memory.current_length)
{
return Err(Trap::wasm(ir::TrapCode::HeapOutOfBounds));
}
let dst = isize::try_from(dst).unwrap();
let val = val as u8;
unsafe {
let dst = memory.base.offset(dst);
ptr::write_bytes(dst, val, len as usize);
}
Ok(())
}
pub(crate) fn imported_memory_fill(
&self,
memory_index: MemoryIndex,
dst: u32,
val: u32,
len: u32,
) -> Result<(), Trap> {
let import = self.imported_memory(memory_index);
unsafe {
let foreign_instance = (&*import.vmctx).instance();
let foreign_memory = &*import.from;
let foreign_index = foreign_instance.memory_index(foreign_memory);
foreign_instance.defined_memory_fill(foreign_index, dst, val, len)
}
}
pub(crate) fn memory_init(
&self,
memory_index: MemoryIndex,
data_index: DataIndex,
dst: u32,
src: u32,
len: u32,
) -> Result<(), Trap> {
let memory = self.get_memory(memory_index);
let passive_data = self.passive_data.borrow();
let data = passive_data
.get(&data_index)
.map_or(&[][..], |data| &**data);
if src
.checked_add(len)
.map_or(true, |n| n as usize > data.len())
|| dst
.checked_add(len)
.map_or(true, |m| m as usize > memory.current_length)
{
return Err(Trap::wasm(ir::TrapCode::HeapOutOfBounds));
}
let src_slice = &data[src as usize..(src + len) as usize];
unsafe {
let dst_start = memory.base.add(dst as usize);
let dst_slice = slice::from_raw_parts_mut(dst_start, len as usize);
dst_slice.copy_from_slice(src_slice);
}
Ok(())
}
pub(crate) fn data_drop(&self, data_index: DataIndex) {
let mut passive_data = self.passive_data.borrow_mut();
passive_data.remove(&data_index);
}
pub(crate) fn get_table(&self, table_index: TableIndex) -> &Table {
if let Some(defined_table_index) = self.module.defined_table_index(table_index) {
self.get_defined_table(defined_table_index)
} else {
self.get_foreign_table(table_index)
}
}
pub(crate) fn get_defined_table(&self, index: DefinedTableIndex) -> &Table {
&self.tables[index]
}
pub(crate) fn get_foreign_table(&self, index: TableIndex) -> &Table {
let import = self.imported_table(index);
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_table = unsafe { &mut *(import).from };
let foreign_index = foreign_instance.table_index(foreign_table);
&foreign_instance.tables[foreign_index]
}
pub(crate) fn get_defined_table_index_and_instance(
&self,
index: TableIndex,
) -> (DefinedTableIndex, &Instance) {
if let Some(defined_table_index) = self.module.defined_table_index(index) {
(defined_table_index, self)
} else {
let import = self.imported_table(index);
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_table_def = unsafe { &mut *(import).from };
let foreign_table_index = foreign_instance.table_index(foreign_table_def);
(foreign_table_index, foreign_instance)
}
}
}
#[derive(Hash, PartialEq, Eq)]
pub struct InstanceHandle {
instance: *mut Instance,
}
impl InstanceHandle {
pub unsafe fn new(
module: Arc<Module>,
code: Arc<dyn Any>,
finished_functions: &PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
imports: Imports,
mem_creator: Option<&dyn RuntimeMemoryCreator>,
lookup_shared_signature: &dyn Fn(SignatureIndex) -> VMSharedSignatureIndex,
host_state: Box<dyn Any>,
interrupts: *const VMInterrupts,
externref_activations_table: *mut VMExternRefActivationsTable,
stack_map_registry: *mut StackMapRegistry,
) -> Result<Self, InstantiationError> {
debug_assert!(!externref_activations_table.is_null());
debug_assert!(!stack_map_registry.is_null());
let tables = create_tables(&module);
let memories = create_memories(&module, mem_creator.unwrap_or(&DefaultMemoryCreator {}))?;
let vmctx_tables = tables
.values()
.map(Table::vmtable)
.collect::<PrimaryMap<DefinedTableIndex, _>>()
.into_boxed_slice();
let vmctx_memories = memories
.values()
.map(|a| a.vmmemory())
.collect::<PrimaryMap<DefinedMemoryIndex, _>>()
.into_boxed_slice();
let vmctx_globals = create_globals(&module);
let offsets = VMOffsets::new(mem::size_of::<*const u8>() as u8, &module);
let passive_data = RefCell::new(module.passive_data.clone());
let handle = {
let instance = Instance {
module,
code,
offsets,
memories,
tables,
passive_elements: Default::default(),
passive_data,
host_state,
vmctx: VMContext {},
};
let layout = instance.alloc_layout();
let instance_ptr = alloc::alloc(layout) as *mut Instance;
if instance_ptr.is_null() {
alloc::handle_alloc_error(layout);
}
ptr::write(instance_ptr, instance);
InstanceHandle {
instance: instance_ptr,
}
};
let instance = handle.instance();
let mut ptr = instance.signature_ids_ptr();
for (signature, _) in handle.module().signatures.iter() {
*ptr = lookup_shared_signature(signature);
ptr = ptr.add(1);
}
debug_assert_eq!(imports.functions.len(), handle.module().num_imported_funcs);
ptr::copy(
imports.functions.as_ptr(),
instance.imported_functions_ptr() as *mut VMFunctionImport,
imports.functions.len(),
);
debug_assert_eq!(imports.tables.len(), handle.module().num_imported_tables);
ptr::copy(
imports.tables.as_ptr(),
instance.imported_tables_ptr() as *mut VMTableImport,
imports.tables.len(),
);
debug_assert_eq!(
imports.memories.len(),
handle.module().num_imported_memories
);
ptr::copy(
imports.memories.as_ptr(),
instance.imported_memories_ptr() as *mut VMMemoryImport,
imports.memories.len(),
);
debug_assert_eq!(imports.globals.len(), handle.module().num_imported_globals);
ptr::copy(
imports.globals.as_ptr(),
instance.imported_globals_ptr() as *mut VMGlobalImport,
imports.globals.len(),
);
ptr::copy(
vmctx_tables.values().as_slice().as_ptr(),
instance.tables_ptr() as *mut VMTableDefinition,
vmctx_tables.len(),
);
ptr::copy(
vmctx_memories.values().as_slice().as_ptr(),
instance.memories_ptr() as *mut VMMemoryDefinition,
vmctx_memories.len(),
);
ptr::copy(
vmctx_globals.values().as_slice().as_ptr(),
instance.globals_ptr() as *mut VMGlobalDefinition,
vmctx_globals.len(),
);
ptr::write(
instance.builtin_functions_ptr() as *mut VMBuiltinFunctionsArray,
VMBuiltinFunctionsArray::initialized(),
);
*instance.interrupts() = interrupts;
*instance.externref_activations_table() = externref_activations_table;
*instance.stack_map_registry() = stack_map_registry;
for (index, sig) in instance.module.functions.iter() {
let type_index = instance.signature_id(*sig);
let (func_ptr, vmctx) =
if let Some(def_index) = instance.module.defined_func_index(index) {
(
NonNull::new(finished_functions[def_index] as *mut _).unwrap(),
instance.vmctx_ptr(),
)
} else {
let import = instance.imported_function(index);
(import.body, import.vmctx)
};
ptr::write(
instance.anyfunc_ptr(index),
VMCallerCheckedAnyfunc {
func_ptr,
type_index,
vmctx,
},
);
}
initialize_passive_elements(instance);
initialize_globals(instance);
Ok(handle)
}
pub unsafe fn initialize(
&self,
is_bulk_memory: bool,
data_initializers: &[DataInitializer<'_>],
) -> Result<(), InstantiationError> {
if !is_bulk_memory {
check_table_init_bounds(self.instance())?;
check_memory_init_bounds(self.instance(), data_initializers)?;
}
initialize_tables(self.instance())?;
initialize_memories(self.instance(), data_initializers)?;
Ok(())
}
pub unsafe fn from_vmctx(vmctx: *mut VMContext) -> Self {
let instance = (&mut *vmctx).instance();
Self {
instance: instance as *const Instance as *mut Instance,
}
}
pub fn vmctx(&self) -> &VMContext {
self.instance().vmctx()
}
pub fn vmctx_ptr(&self) -> *mut VMContext {
self.instance().vmctx_ptr()
}
pub fn module(&self) -> &Module {
self.instance().module()
}
pub fn lookup(&self, field: &str) -> Option<Export> {
self.instance().lookup(field)
}
pub fn lookup_by_declaration(&self, export: &EntityIndex) -> Export {
self.instance().lookup_by_declaration(export)
}
pub fn exports(&self) -> indexmap::map::Iter<String, EntityIndex> {
self.instance().exports()
}
pub fn host_state(&self) -> &dyn Any {
self.instance().host_state()
}
pub fn memory_index(&self, memory: &VMMemoryDefinition) -> DefinedMemoryIndex {
self.instance().memory_index(memory)
}
pub fn memory_grow(&self, memory_index: DefinedMemoryIndex, delta: u32) -> Option<u32> {
self.instance().memory_grow(memory_index, delta)
}
pub fn table_index(&self, table: &VMTableDefinition) -> DefinedTableIndex {
self.instance().table_index(table)
}
pub fn table_grow(
&self,
table_index: TableIndex,
delta: u32,
init_value: TableElement,
) -> Option<u32> {
self.instance().table_grow(table_index, delta, init_value)
}
pub fn defined_table_grow(
&self,
table_index: DefinedTableIndex,
delta: u32,
init_value: TableElement,
) -> Option<u32> {
self.instance()
.defined_table_grow(table_index, delta, init_value)
}
pub fn table_get(&self, table_index: DefinedTableIndex, index: u32) -> Option<TableElement> {
self.instance().table_get(table_index, index)
}
pub fn table_set(
&self,
table_index: DefinedTableIndex,
index: u32,
val: TableElement,
) -> Result<(), ()> {
self.instance().table_set(table_index, index, val)
}
pub fn defined_table_fill(
&self,
table_index: DefinedTableIndex,
dst: u32,
val: TableElement,
len: u32,
) -> Result<(), Trap> {
self.instance()
.defined_table_fill(table_index, dst, val, len)
}
pub fn get_defined_table(&self, index: DefinedTableIndex) -> &Table {
self.instance().get_defined_table(index)
}
pub(crate) fn instance(&self) -> &Instance {
unsafe { &*(self.instance as *const Instance) }
}
pub unsafe fn clone(&self) -> InstanceHandle {
InstanceHandle {
instance: self.instance,
}
}
pub unsafe fn dealloc(&self) {
let instance = self.instance();
let layout = instance.alloc_layout();
ptr::drop_in_place(self.instance);
alloc::dealloc(self.instance.cast(), layout);
}
}
fn check_table_init_bounds(instance: &Instance) -> Result<(), InstantiationError> {
for init in &instance.module().table_elements {
let start = get_table_init_start(init, instance);
let table = instance.get_table(init.table_index);
let size = usize::try_from(table.size()).unwrap();
if size < start + init.elements.len() {
return Err(InstantiationError::Link(LinkError(
"table out of bounds: elements segment does not fit".to_owned(),
)));
}
}
Ok(())
}
fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
let mut start = init.location.offset;
if let Some(base) = init.location.base {
let val = unsafe {
if let Some(def_index) = instance.module.defined_global_index(base) {
*instance.global(def_index).as_u32()
} else {
*(*instance.imported_global(base).from).as_u32()
}
};
start += usize::try_from(val).unwrap();
}
start
}
unsafe fn get_memory_slice<'instance>(
init: &DataInitializer<'_>,
instance: &'instance Instance,
) -> &'instance mut [u8] {
let memory = if let Some(defined_memory_index) = instance
.module
.defined_memory_index(init.location.memory_index)
{
instance.memory(defined_memory_index)
} else {
let import = instance.imported_memory(init.location.memory_index);
let foreign_instance = (&mut *(import).vmctx).instance();
let foreign_memory = &mut *(import).from;
let foreign_index = foreign_instance.memory_index(foreign_memory);
foreign_instance.memory(foreign_index)
};
slice::from_raw_parts_mut(memory.base, memory.current_length)
}
fn check_memory_init_bounds(
instance: &Instance,
data_initializers: &[DataInitializer<'_>],
) -> Result<(), InstantiationError> {
for init in data_initializers {
let start = get_memory_init_start(init, instance);
unsafe {
let mem_slice = get_memory_slice(init, instance);
if mem_slice.get_mut(start..start + init.data.len()).is_none() {
return Err(InstantiationError::Link(LinkError(
"memory out of bounds: data segment does not fit".into(),
)));
}
}
}
Ok(())
}
fn create_tables(module: &Module) -> BoxedSlice<DefinedTableIndex, Table> {
let num_imports = module.num_imported_tables;
let mut tables: PrimaryMap<DefinedTableIndex, _> =
PrimaryMap::with_capacity(module.table_plans.len() - num_imports);
for table in &module.table_plans.values().as_slice()[num_imports..] {
tables.push(Table::new(table));
}
tables.into_boxed_slice()
}
fn get_table_init_start(init: &TableElements, instance: &Instance) -> usize {
let mut start = init.offset;
if let Some(base) = init.base {
let val = unsafe {
if let Some(def_index) = instance.module.defined_global_index(base) {
*instance.global(def_index).as_u32()
} else {
*(*instance.imported_global(base).from).as_u32()
}
};
start += usize::try_from(val).unwrap();
}
start
}
fn initialize_tables(instance: &Instance) -> Result<(), InstantiationError> {
for init in &instance.module().table_elements {
let start = get_table_init_start(init, instance);
let table = instance.get_table(init.table_index);
if start
.checked_add(init.elements.len())
.map_or(true, |end| end > table.size() as usize)
{
return Err(InstantiationError::Trap(Trap::wasm(
ir::TrapCode::TableOutOfBounds,
)));
}
for (i, func_idx) in init.elements.iter().enumerate() {
let anyfunc = instance.get_caller_checked_anyfunc(*func_idx).map_or(
ptr::null_mut(),
|f: &VMCallerCheckedAnyfunc| {
f as *const VMCallerCheckedAnyfunc as *mut VMCallerCheckedAnyfunc
},
);
table
.set(u32::try_from(start + i).unwrap(), anyfunc.into())
.unwrap();
}
}
Ok(())
}
fn initialize_passive_elements(instance: &Instance) {
let mut passive_elements = instance.passive_elements.borrow_mut();
debug_assert!(
passive_elements.is_empty(),
"should only be called once, at initialization time"
);
passive_elements.extend(
instance
.module
.passive_elements
.iter()
.filter(|(_, segments)| !segments.is_empty())
.map(|(idx, segments)| {
(
*idx,
segments
.iter()
.map(|s| {
instance.get_caller_checked_anyfunc(*s).map_or(
ptr::null_mut(),
|f: &VMCallerCheckedAnyfunc| {
f as *const VMCallerCheckedAnyfunc as *mut _
},
)
})
.collect(),
)
}),
);
}
fn create_memories(
module: &Module,
mem_creator: &dyn RuntimeMemoryCreator,
) -> Result<BoxedSlice<DefinedMemoryIndex, Box<dyn RuntimeLinearMemory>>, InstantiationError> {
let num_imports = module.num_imported_memories;
let mut memories: PrimaryMap<DefinedMemoryIndex, _> =
PrimaryMap::with_capacity(module.memory_plans.len() - num_imports);
for plan in &module.memory_plans.values().as_slice()[num_imports..] {
memories.push(
mem_creator
.new_memory(plan)
.map_err(InstantiationError::Resource)?,
);
}
Ok(memories.into_boxed_slice())
}
fn initialize_memories(
instance: &Instance,
data_initializers: &[DataInitializer<'_>],
) -> Result<(), InstantiationError> {
for init in data_initializers {
let memory = instance.get_memory(init.location.memory_index);
let start = get_memory_init_start(init, instance);
if start
.checked_add(init.data.len())
.map_or(true, |end| end > memory.current_length)
{
return Err(InstantiationError::Trap(Trap::wasm(
ir::TrapCode::HeapOutOfBounds,
)));
}
unsafe {
let mem_slice = get_memory_slice(init, instance);
let end = start + init.data.len();
let to_init = &mut mem_slice[start..end];
to_init.copy_from_slice(init.data);
}
}
Ok(())
}
fn create_globals(module: &Module) -> BoxedSlice<DefinedGlobalIndex, VMGlobalDefinition> {
let num_imports = module.num_imported_globals;
let mut vmctx_globals = PrimaryMap::with_capacity(module.globals.len() - num_imports);
for _ in &module.globals.values().as_slice()[num_imports..] {
vmctx_globals.push(VMGlobalDefinition::new());
}
vmctx_globals.into_boxed_slice()
}
fn initialize_globals(instance: &Instance) {
let module = instance.module();
let num_imports = module.num_imported_globals;
for (index, global) in module.globals.iter().skip(num_imports) {
let def_index = module.defined_global_index(index).unwrap();
unsafe {
let to = instance.global_ptr(def_index);
match global.initializer {
GlobalInit::I32Const(x) => *(*to).as_i32_mut() = x,
GlobalInit::I64Const(x) => *(*to).as_i64_mut() = x,
GlobalInit::F32Const(x) => *(*to).as_f32_bits_mut() = x,
GlobalInit::F64Const(x) => *(*to).as_f64_bits_mut() = x,
GlobalInit::V128Const(x) => *(*to).as_u128_bits_mut() = x.0,
GlobalInit::GetGlobal(x) => {
let from = if let Some(def_x) = module.defined_global_index(x) {
instance.global(def_x)
} else {
*instance.imported_global(x).from
};
*to = from;
}
GlobalInit::RefFunc(f) => {
*(*to).as_anyfunc_mut() = instance.get_caller_checked_anyfunc(f).unwrap()
as *const VMCallerCheckedAnyfunc;
}
GlobalInit::RefNullConst => match global.wasm_ty {
WasmType::FuncRef => *(*to).as_anyfunc_mut() = ptr::null(),
WasmType::ExternRef => *(*to).as_externref_mut() = None,
ty => panic!("unsupported reference type for global: {:?}", ty),
},
GlobalInit::Import => panic!("locally-defined global initialized as import"),
}
}
}
}
#[derive(Error, Debug)]
#[error("Link error: {0}")]
pub struct LinkError(pub String);
#[derive(Error, Debug)]
pub enum InstantiationError {
#[error("Insufficient resources: {0}")]
Resource(String),
#[error("Failed to link module")]
Link(#[from] LinkError),
#[error("Trap occurred during instantiation")]
Trap(Trap),
}