use crate::prelude::*;
use crate::runtime::vm::const_expr::{ConstEvalContext, ConstExprEvaluator};
use crate::runtime::vm::export::Export;
use crate::runtime::vm::memory::{Memory, RuntimeMemoryCreator};
use crate::runtime::vm::table::{Table, TableElement, TableElementType};
use crate::runtime::vm::vmcontext::{
VMBuiltinFunctionsArray, VMContext, VMFuncRef, VMFunctionImport, VMGlobalDefinition,
VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMOpaqueContext, VMStoreContext,
VMTableDefinition, VMTableImport, VMTagDefinition, VMTagImport,
};
use crate::runtime::vm::{
ExportFunction, ExportGlobal, ExportGlobalKind, ExportMemory, ExportTable, ExportTag, GcStore,
Imports, ModuleRuntimeInfo, SendSyncPtr, VMGcRef, VMStore, VMStoreRawPtr, VmPtr, VmSafe,
WasmFault,
};
use crate::store::{InstanceId, StoreOpaque};
use alloc::sync::Arc;
use core::alloc::Layout;
use core::marker;
use core::ops::Range;
use core::pin::Pin;
use core::ptr::NonNull;
#[cfg(target_has_atomic = "64")]
use core::sync::atomic::AtomicU64;
use core::{mem, ptr};
#[cfg(feature = "gc")]
use wasmtime_environ::ModuleInternedTypeIndex;
use wasmtime_environ::{
DataIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex,
ElemIndex, EntityIndex, EntityRef, EntitySet, FuncIndex, GlobalIndex, HostPtr, MemoryIndex,
Module, PrimaryMap, PtrSize, TableIndex, TableInitialValue, TableSegmentElements, TagIndex,
Trap, VMCONTEXT_MAGIC, VMOffsets, VMSharedTypeIndex, WasmHeapTopType,
packed_option::ReservedValue,
};
#[cfg(feature = "wmemcheck")]
use wasmtime_wmemcheck::Wmemcheck;
mod allocator;
pub use allocator::*;
#[repr(transparent)]
pub struct InstanceAndStore {
instance: Instance,
}
impl InstanceAndStore {
#[inline]
pub(crate) unsafe fn from_vmctx<R>(
vmctx: NonNull<VMContext>,
f: impl for<'a> FnOnce(&'a mut Self) -> R,
) -> R {
const _: () = assert!(mem::size_of::<InstanceAndStore>() == mem::size_of::<Instance>());
let mut ptr = vmctx
.byte_sub(mem::size_of::<Instance>())
.cast::<InstanceAndStore>();
f(ptr.as_mut())
}
#[inline]
pub(crate) fn unpack_mut(&mut self) -> (Pin<&mut Instance>, &mut dyn VMStore) {
unsafe {
let store = &mut *self.store_ptr();
(Pin::new_unchecked(&mut self.instance), store)
}
}
#[inline]
fn store_ptr(&self) -> *mut dyn VMStore {
self.instance.store.unwrap().0.as_ptr()
}
}
#[repr(C)] pub struct Instance {
id: InstanceId,
runtime_info: ModuleRuntimeInfo,
memories: PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
tables: PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
dropped_elements: EntitySet<ElemIndex>,
dropped_data: EntitySet<DataIndex>,
#[cfg(feature = "wmemcheck")]
pub(crate) wmemcheck_state: Option<Wmemcheck>,
store: Option<VMStoreRawPtr>,
vmctx: OwnedVMContext<VMContext>,
}
impl Instance {
fn new(
req: InstanceAllocationRequest,
memories: PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
tables: PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
memory_tys: &PrimaryMap<MemoryIndex, wasmtime_environ::Memory>,
) -> InstanceHandle {
let module = req.runtime_info.env_module();
let dropped_elements = EntitySet::with_capacity(module.passive_elements.len());
let dropped_data = EntitySet::with_capacity(module.passive_data_map.len());
#[cfg(not(feature = "wmemcheck"))]
let _ = memory_tys;
let mut ret = OwnedInstance::new(Instance {
id: req.id,
runtime_info: req.runtime_info.clone(),
memories,
tables,
dropped_elements,
dropped_data,
#[cfg(feature = "wmemcheck")]
wmemcheck_state: {
if req.wmemcheck {
let size = memory_tys
.iter()
.next()
.map(|memory| memory.1.limits.min)
.unwrap_or(0)
* 64
* 1024;
Some(Wmemcheck::new(size as usize))
} else {
None
}
},
store: None,
vmctx: OwnedVMContext::new(),
});
unsafe {
ret.get_mut().initialize_vmctx(
module,
req.runtime_info.offsets(),
req.store,
req.imports,
);
}
ret
}
#[inline]
pub unsafe fn from_vmctx<R>(
vmctx: NonNull<VMContext>,
f: impl FnOnce(Pin<&mut Instance>) -> R,
) -> R {
let mut ptr = vmctx
.byte_sub(mem::size_of::<Instance>())
.cast::<Instance>();
f(Pin::new_unchecked(ptr.as_mut()))
}
pub(crate) fn env_module(&self) -> &Arc<wasmtime_environ::Module> {
self.runtime_info.env_module()
}
#[cfg(feature = "gc")]
pub(crate) fn runtime_module(&self) -> Option<&crate::Module> {
match &self.runtime_info {
ModuleRuntimeInfo::Module(m) => Some(m),
ModuleRuntimeInfo::Bare(_) => None,
}
}
#[cfg(feature = "gc")]
pub fn engine_type_index(&self, module_index: ModuleInternedTypeIndex) -> VMSharedTypeIndex {
self.runtime_info.engine_type_index(module_index)
}
#[inline]
fn offsets(&self) -> &VMOffsets<HostPtr> {
self.runtime_info.offsets()
}
fn imported_function(&self, index: FuncIndex) -> &VMFunctionImport {
unsafe { self.vmctx_plus_offset(self.offsets().vmctx_vmfunction_import(index)) }
}
fn imported_table(&self, index: TableIndex) -> &VMTableImport {
unsafe { self.vmctx_plus_offset(self.offsets().vmctx_vmtable_import(index)) }
}
fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
unsafe { self.vmctx_plus_offset(self.offsets().vmctx_vmmemory_import(index)) }
}
fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
unsafe { self.vmctx_plus_offset(self.offsets().vmctx_vmglobal_import(index)) }
}
fn imported_tag(&self, index: TagIndex) -> &VMTagImport {
unsafe { self.vmctx_plus_offset(self.offsets().vmctx_vmtag_import(index)) }
}
pub fn tag_ptr(&self, index: DefinedTagIndex) -> NonNull<VMTagDefinition> {
unsafe { self.vmctx_plus_offset_raw(self.offsets().vmctx_vmtag_definition(index)) }
}
pub fn table(&self, index: DefinedTableIndex) -> VMTableDefinition {
unsafe { self.table_ptr(index).read() }
}
fn set_table(self: Pin<&mut Self>, index: DefinedTableIndex, table: VMTableDefinition) {
unsafe {
self.table_ptr(index).write(table);
}
}
pub fn table_ptr(&self, index: DefinedTableIndex) -> NonNull<VMTableDefinition> {
unsafe { self.vmctx_plus_offset_raw(self.offsets().vmctx_vmtable_definition(index)) }
}
pub(crate) fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
if let Some(defined_index) = self.env_module().defined_memory_index(index) {
self.memory(defined_index)
} else {
let import = self.imported_memory(index);
unsafe { VMMemoryDefinition::load(import.from.as_ptr()) }
}
}
#[cfg(feature = "threads")]
pub(crate) fn get_runtime_memory(self: Pin<&mut Self>, index: MemoryIndex) -> &mut Memory {
if let Some(defined_index) = self.env_module().defined_memory_index(index) {
unsafe { &mut *self.get_defined_memory(defined_index) }
} else {
let import = self.imported_memory(index);
unsafe {
let ptr = Instance::from_vmctx(import.vmctx.as_non_null(), |i| {
i.get_defined_memory(import.index)
});
&mut *ptr
}
}
}
#[inline]
pub fn memory(&self, index: DefinedMemoryIndex) -> VMMemoryDefinition {
unsafe { VMMemoryDefinition::load(self.memory_ptr(index).as_ptr()) }
}
fn set_memory(&self, index: DefinedMemoryIndex, mem: VMMemoryDefinition) {
unsafe {
self.memory_ptr(index).write(mem);
}
}
#[inline]
pub fn memory_ptr(&self, index: DefinedMemoryIndex) -> NonNull<VMMemoryDefinition> {
unsafe {
self.vmctx_plus_offset::<VmPtr<_>>(self.offsets().vmctx_vmmemory_pointer(index))
.as_non_null()
}
}
pub fn global_ptr(&self, index: DefinedGlobalIndex) -> NonNull<VMGlobalDefinition> {
unsafe { self.vmctx_plus_offset_raw(self.offsets().vmctx_vmglobal_definition(index)) }
}
pub(crate) fn defined_or_imported_global_ptr(
self: Pin<&mut Self>,
index: GlobalIndex,
) -> NonNull<VMGlobalDefinition> {
if let Some(index) = self.env_module().defined_global_index(index) {
self.global_ptr(index)
} else {
self.imported_global(index).from.as_non_null()
}
}
pub fn all_globals(&self) -> impl ExactSizeIterator<Item = (GlobalIndex, ExportGlobal)> + '_ {
let module = self.env_module().clone();
module
.globals
.keys()
.map(move |idx| (idx, self.get_exported_global(idx)))
}
pub fn defined_globals(
&self,
) -> impl ExactSizeIterator<Item = (DefinedGlobalIndex, ExportGlobal)> + '_ {
let module = self.env_module().clone();
module
.globals
.keys()
.skip(module.num_imported_globals)
.map(move |global_idx| {
let def_idx = module.defined_global_index(global_idx).unwrap();
let global = ExportGlobal {
definition: self.global_ptr(def_idx),
kind: ExportGlobalKind::Instance(self.vmctx(), def_idx),
global: self.env_module().globals[global_idx],
};
(def_idx, global)
})
}
#[inline]
pub fn vm_store_context(&self) -> NonNull<Option<VmPtr<VMStoreContext>>> {
unsafe { self.vmctx_plus_offset_raw(self.offsets().ptr.vmctx_store_context()) }
}
#[cfg(target_has_atomic = "64")]
pub fn epoch_ptr(self: Pin<&mut Self>) -> &mut Option<VmPtr<AtomicU64>> {
let offset = self.offsets().ptr.vmctx_epoch_ptr();
unsafe { self.vmctx_plus_offset_mut(offset) }
}
pub fn gc_heap_data(self: Pin<&mut Self>) -> &mut Option<VmPtr<u8>> {
let offset = self.offsets().ptr.vmctx_gc_heap_data();
unsafe { self.vmctx_plus_offset_mut(offset) }
}
pub(crate) unsafe fn set_store(mut self: Pin<&mut Self>, store: Option<NonNull<dyn VMStore>>) {
*self.as_mut().store_mut() = store.map(VMStoreRawPtr);
if let Some(mut store) = store {
let store = store.as_mut();
self.vm_store_context()
.write(Some(store.vm_store_context_ptr().into()));
#[cfg(target_has_atomic = "64")]
{
*self.as_mut().epoch_ptr() =
Some(NonNull::from(store.engine().epoch_counter()).into());
}
if self.env_module().needs_gc_heap {
self.as_mut().set_gc_heap(Some(store.gc_store().expect(
"if we need a GC heap, then `Instance::new_raw` should have already \
allocated it for us",
)));
} else {
self.as_mut().set_gc_heap(None);
}
} else {
self.vm_store_context().write(None);
#[cfg(target_has_atomic = "64")]
{
*self.as_mut().epoch_ptr() = None;
}
self.as_mut().set_gc_heap(None);
}
}
unsafe fn set_gc_heap(self: Pin<&mut Self>, gc_store: Option<&GcStore>) {
if let Some(gc_store) = gc_store {
*self.gc_heap_data() = Some(gc_store.gc_heap.vmctx_gc_heap_data().into());
} else {
*self.gc_heap_data() = None;
}
}
#[inline]
pub fn vmctx(&self) -> NonNull<VMContext> {
InstanceLayout::vmctx(self)
}
pub fn get_exported_func(self: Pin<&mut Self>, index: FuncIndex) -> ExportFunction {
let func_ref = self.get_func_ref(index).unwrap();
ExportFunction { func_ref }
}
pub fn get_exported_table(&self, index: TableIndex) -> ExportTable {
let ty = self.env_module().tables[index];
let (definition, vmctx, index) =
if let Some(def_index) = self.env_module().defined_table_index(index) {
(self.table_ptr(def_index), self.vmctx(), def_index)
} else {
let import = self.imported_table(index);
(
import.from.as_non_null(),
import.vmctx.as_non_null(),
import.index,
)
};
ExportTable {
definition,
vmctx,
table: ty,
index,
}
}
pub fn get_exported_memory(&self, index: MemoryIndex) -> ExportMemory {
let (definition, vmctx, def_index) =
if let Some(def_index) = self.env_module().defined_memory_index(index) {
(self.memory_ptr(def_index), self.vmctx(), def_index)
} else {
let import = self.imported_memory(index);
(
import.from.as_non_null(),
import.vmctx.as_non_null(),
import.index,
)
};
ExportMemory {
definition,
vmctx,
memory: self.env_module().memories[index],
index: def_index,
}
}
fn get_exported_global(&self, index: GlobalIndex) -> ExportGlobal {
let global = self.env_module().globals[index];
if let Some(def_index) = self.env_module().defined_global_index(index) {
ExportGlobal {
definition: self.global_ptr(def_index),
kind: ExportGlobalKind::Instance(self.vmctx(), def_index),
global,
}
} else {
ExportGlobal::from_vmimport(self.imported_global(index), global)
}
}
fn get_exported_tag(&self, index: TagIndex) -> ExportTag {
let tag = self.env_module().tags[index];
let (vmctx, definition, index) =
if let Some(def_index) = self.env_module().defined_tag_index(index) {
(self.vmctx(), self.tag_ptr(def_index), def_index)
} else {
let import = self.imported_tag(index);
(
import.vmctx.as_non_null(),
import.from.as_non_null(),
import.index,
)
};
ExportTag {
definition,
vmctx,
index,
tag,
}
}
pub fn exports(&self) -> wasmparser::collections::index_map::Iter<'_, String, EntityIndex> {
self.env_module().exports.iter()
}
pub unsafe fn table_index(&self, table: &VMTableDefinition) -> DefinedTableIndex {
let index = DefinedTableIndex::new(
usize::try_from(
(table as *const VMTableDefinition)
.offset_from(self.table_ptr(DefinedTableIndex::new(0)).as_ptr()),
)
.unwrap(),
);
assert!(index.index() < self.tables.len());
index
}
pub(crate) fn memory_page_size(&self, index: MemoryIndex) -> usize {
usize::try_from(self.env_module().memories[index].page_size()).unwrap()
}
pub(crate) fn memory_grow(
self: Pin<&mut Self>,
store: &mut dyn VMStore,
index: MemoryIndex,
delta: u64,
) -> Result<Option<usize>, Error> {
match self.env_module().defined_memory_index(index) {
Some(idx) => self.defined_memory_grow(store, idx, delta),
None => {
let import = self.imported_memory(index);
unsafe {
Instance::from_vmctx(import.vmctx.as_non_null(), |i| {
i.defined_memory_grow(store, import.index, delta)
})
}
}
}
}
fn defined_memory_grow(
mut self: Pin<&mut Self>,
store: &mut dyn VMStore,
idx: DefinedMemoryIndex,
delta: u64,
) -> Result<Option<usize>, Error> {
let memory = &mut self.as_mut().memories_mut()[idx].1;
let result = unsafe { memory.grow(delta, Some(store)) };
if memory.as_shared_memory().is_none() {
let vmmemory = memory.vmmemory();
self.set_memory(idx, vmmemory);
}
result
}
pub(crate) fn table_element_type(
self: Pin<&mut Self>,
table_index: TableIndex,
) -> TableElementType {
unsafe { (*self.get_table(table_index)).element_type() }
}
pub(crate) fn table_grow(
self: Pin<&mut Self>,
store: &mut dyn VMStore,
table_index: TableIndex,
delta: u64,
init_value: TableElement,
) -> Result<Option<usize>, Error> {
self.with_defined_table_index_and_instance(table_index, |i, instance| {
instance.defined_table_grow(store, i, delta, init_value)
})
}
fn defined_table_grow(
mut self: Pin<&mut Self>,
store: &mut dyn VMStore,
table_index: DefinedTableIndex,
delta: u64,
init_value: TableElement,
) -> Result<Option<usize>, Error> {
let table = &mut self
.as_mut()
.tables_mut()
.get_mut(table_index)
.unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
.1;
let result = unsafe { table.grow(delta, init_value, store) };
let element = table.vmtable();
self.set_table(table_index, element);
result
}
fn alloc_layout(offsets: &VMOffsets<HostPtr>) -> Layout {
let size = mem::size_of::<Self>()
.checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
.unwrap();
let align = mem::align_of::<Self>();
Layout::from_size_align(size, align).unwrap()
}
fn type_ids_array(&self) -> NonNull<VmPtr<VMSharedTypeIndex>> {
unsafe { self.vmctx_plus_offset_raw(self.offsets().ptr.vmctx_type_ids_array()) }
}
unsafe fn construct_func_ref(
self: Pin<&mut Self>,
index: FuncIndex,
type_index: VMSharedTypeIndex,
into: *mut VMFuncRef,
) {
let func_ref = if let Some(def_index) = self.env_module().defined_func_index(index) {
VMFuncRef {
array_call: self
.runtime_info
.array_to_wasm_trampoline(def_index)
.expect("should have array-to-Wasm trampoline for escaping function")
.into(),
wasm_call: Some(self.runtime_info.function(def_index).into()),
vmctx: VMOpaqueContext::from_vmcontext(self.vmctx()).into(),
type_index,
}
} else {
let import = self.imported_function(index);
VMFuncRef {
array_call: import.array_call,
wasm_call: Some(import.wasm_call),
vmctx: import.vmctx,
type_index,
}
};
unsafe {
ptr::write(into, func_ref);
}
}
pub(crate) fn get_func_ref(
self: Pin<&mut Self>,
index: FuncIndex,
) -> Option<NonNull<VMFuncRef>> {
if index == FuncIndex::reserved_value() {
return None;
}
let func = &self.env_module().functions[index];
let sig = func.signature.unwrap_engine_type_index();
let func_ref = unsafe {
self.vmctx_plus_offset_raw::<VMFuncRef>(self.offsets().vmctx_func_ref(func.func_ref))
};
unsafe {
self.construct_func_ref(index, sig, func_ref.as_ptr());
}
Some(func_ref)
}
pub(crate) fn passive_element_segment<'a>(
&self,
storage: &'a mut Option<(Arc<wasmtime_environ::Module>, TableSegmentElements)>,
elem_index: ElemIndex,
) -> &'a TableSegmentElements {
debug_assert!(storage.is_none());
*storage = Some((
self.env_module().clone(),
TableSegmentElements::Expressions(Box::new([])),
));
let (module, empty) = storage.as_ref().unwrap();
match module.passive_elements_map.get(&elem_index) {
Some(index) if !self.dropped_elements.contains(elem_index) => {
&module.passive_elements[*index]
}
_ => empty,
}
}
pub(crate) fn table_init(
self: Pin<&mut Self>,
store: &mut StoreOpaque,
table_index: TableIndex,
elem_index: ElemIndex,
dst: u64,
src: u64,
len: u64,
) -> Result<(), Trap> {
let mut storage = None;
let elements = self.passive_element_segment(&mut storage, elem_index);
let mut const_evaluator = ConstExprEvaluator::default();
Self::table_init_segment(
store,
self.id,
&mut const_evaluator,
table_index,
elements,
dst,
src,
len,
)
}
pub(crate) fn table_init_segment(
store: &mut StoreOpaque,
id: InstanceId,
const_evaluator: &mut ConstExprEvaluator,
table_index: TableIndex,
elements: &TableSegmentElements,
dst: u64,
src: u64,
len: u64,
) -> Result<(), Trap> {
let mut instance = store.instance_mut(id);
let table = unsafe { &mut *instance.as_mut().get_table(table_index) };
let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?;
let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?;
let module = instance.env_module().clone();
match elements {
TableSegmentElements::Functions(funcs) => {
let elements = funcs
.get(src..)
.and_then(|s| s.get(..len))
.ok_or(Trap::TableOutOfBounds)?;
table.init_func(
dst,
elements
.iter()
.map(|idx| instance.as_mut().get_func_ref(*idx)),
)?;
}
TableSegmentElements::Expressions(exprs) => {
let exprs = exprs
.get(src..)
.and_then(|s| s.get(..len))
.ok_or(Trap::TableOutOfBounds)?;
let top = module.tables[table_index].ref_type.heap_type.top();
let mut context = ConstEvalContext::new(id);
match top {
WasmHeapTopType::Extern => table.init_gc_refs(
dst,
exprs.iter().map(|expr| unsafe {
let raw = const_evaluator
.eval(store, &mut context, expr)
.expect("const expr should be valid");
VMGcRef::from_raw_u32(raw.get_externref())
}),
)?,
WasmHeapTopType::Any => table.init_gc_refs(
dst,
exprs.iter().map(|expr| unsafe {
let raw = const_evaluator
.eval(store, &mut context, expr)
.expect("const expr should be valid");
VMGcRef::from_raw_u32(raw.get_anyref())
}),
)?,
WasmHeapTopType::Func => table.init_func(
dst,
exprs.iter().map(|expr| unsafe {
NonNull::new(
const_evaluator
.eval(store, &mut context, expr)
.expect("const expr should be valid")
.get_funcref()
.cast(),
)
}),
)?,
WasmHeapTopType::Cont => todo!(), }
}
}
Ok(())
}
pub(crate) fn elem_drop(self: Pin<&mut Self>, elem_index: ElemIndex) {
self.dropped_elements_mut().insert(elem_index);
}
pub fn get_defined_memory(self: Pin<&mut Self>, index: DefinedMemoryIndex) -> *mut Memory {
unsafe { &raw mut (*self.memories_mut().get_raw_mut(index).unwrap()).1 }
}
pub(crate) fn memory_copy(
self: Pin<&mut Self>,
dst_index: MemoryIndex,
dst: u64,
src_index: MemoryIndex,
src: u64,
len: u64,
) -> Result<(), Trap> {
let src_mem = self.get_memory(src_index);
let dst_mem = self.get_memory(dst_index);
let src = self.validate_inbounds(src_mem.current_length(), src, len)?;
let dst = self.validate_inbounds(dst_mem.current_length(), dst, len)?;
let len = usize::try_from(len).unwrap();
unsafe {
let dst = dst_mem.base.as_ptr().add(dst);
let src = src_mem.base.as_ptr().add(src);
ptr::copy(src, dst, len);
}
Ok(())
}
fn validate_inbounds(&self, max: usize, ptr: u64, len: u64) -> Result<usize, Trap> {
let oob = || Trap::MemoryOutOfBounds;
let end = ptr
.checked_add(len)
.and_then(|i| usize::try_from(i).ok())
.ok_or_else(oob)?;
if end > max {
Err(oob())
} else {
Ok(ptr.try_into().unwrap())
}
}
pub(crate) fn memory_fill(
self: Pin<&mut Self>,
memory_index: MemoryIndex,
dst: u64,
val: u8,
len: u64,
) -> Result<(), Trap> {
let memory = self.get_memory(memory_index);
let dst = self.validate_inbounds(memory.current_length(), dst, len)?;
let len = usize::try_from(len).unwrap();
unsafe {
let dst = memory.base.as_ptr().add(dst);
ptr::write_bytes(dst, val, len);
}
Ok(())
}
pub(crate) fn wasm_data_range(&self, index: DataIndex) -> Range<u32> {
match self.env_module().passive_data_map.get(&index) {
Some(range) if !self.dropped_data.contains(index) => range.clone(),
_ => 0..0,
}
}
pub(crate) fn wasm_data(&self, range: Range<u32>) -> &[u8] {
let start = usize::try_from(range.start).unwrap();
let end = usize::try_from(range.end).unwrap();
&self.runtime_info.wasm_data()[start..end]
}
pub(crate) fn memory_init(
self: Pin<&mut Self>,
memory_index: MemoryIndex,
data_index: DataIndex,
dst: u64,
src: u32,
len: u32,
) -> Result<(), Trap> {
let range = self.wasm_data_range(data_index);
self.memory_init_segment(memory_index, range, dst, src, len)
}
pub(crate) fn memory_init_segment(
self: Pin<&mut Self>,
memory_index: MemoryIndex,
range: Range<u32>,
dst: u64,
src: u32,
len: u32,
) -> Result<(), Trap> {
let memory = self.get_memory(memory_index);
let data = self.wasm_data(range);
let dst = self.validate_inbounds(memory.current_length(), dst, len.into())?;
let src = self.validate_inbounds(data.len(), src.into(), len.into())?;
let len = len as usize;
unsafe {
let src_start = data.as_ptr().add(src);
let dst_start = memory.base.as_ptr().add(dst);
ptr::copy_nonoverlapping(src_start, dst_start, len);
}
Ok(())
}
pub(crate) fn data_drop(self: Pin<&mut Self>, data_index: DataIndex) {
self.dropped_data_mut().insert(data_index);
}
pub(crate) fn get_table_with_lazy_init(
self: Pin<&mut Self>,
table_index: TableIndex,
range: impl Iterator<Item = u64>,
) -> *mut Table {
self.with_defined_table_index_and_instance(table_index, |idx, instance| {
instance.get_defined_table_with_lazy_init(idx, range)
})
}
pub fn get_defined_table_with_lazy_init(
mut self: Pin<&mut Self>,
idx: DefinedTableIndex,
range: impl Iterator<Item = u64>,
) -> *mut Table {
let elt_ty = self.tables[idx].1.element_type();
if elt_ty == TableElementType::Func {
for i in range {
let value = match self.tables[idx].1.get(None, i) {
Some(value) => value,
None => {
break;
}
};
if !value.is_uninit() {
continue;
}
let module = self.env_module();
let precomputed = match &module.table_initialization.initial_values[idx] {
TableInitialValue::Null { precomputed } => precomputed,
TableInitialValue::Expr(_) => unreachable!(),
};
let func_index = precomputed.get(usize::try_from(i).unwrap()).cloned();
let func_ref =
func_index.and_then(|func_index| self.as_mut().get_func_ref(func_index));
self.as_mut().tables_mut()[idx]
.1
.set(i, TableElement::FuncRef(func_ref))
.expect("Table type should match and index should be in-bounds");
}
}
unsafe { &raw mut (*self.tables_mut().get_raw_mut(idx).unwrap()).1 }
}
pub(crate) fn get_table(self: Pin<&mut Self>, table_index: TableIndex) -> *mut Table {
self.with_defined_table_index_and_instance(table_index, |idx, instance| unsafe {
&raw mut (*instance.tables_mut().get_raw_mut(idx).unwrap()).1
})
}
pub(crate) fn get_defined_table(self: Pin<&mut Self>, index: DefinedTableIndex) -> *mut Table {
unsafe { &raw mut (*self.tables_mut().get_raw_mut(index).unwrap()).1 }
}
pub(crate) fn with_defined_table_index_and_instance<R>(
self: Pin<&mut Self>,
index: TableIndex,
f: impl FnOnce(DefinedTableIndex, Pin<&mut Instance>) -> R,
) -> R {
if let Some(defined_table_index) = self.env_module().defined_table_index(index) {
f(defined_table_index, self)
} else {
let import = self.imported_table(index);
unsafe {
Instance::from_vmctx(import.vmctx.as_non_null(), |foreign_instance| {
let foreign_table_def = import.from.as_ptr();
let foreign_table_index = foreign_instance.table_index(&*foreign_table_def);
f(foreign_table_index, foreign_instance)
})
}
}
}
unsafe fn initialize_vmctx(
mut self: Pin<&mut Self>,
module: &Module,
offsets: &VMOffsets<HostPtr>,
store: StorePtr,
imports: Imports,
) {
assert!(ptr::eq(module, self.env_module().as_ref()));
self.vmctx_plus_offset_raw::<u32>(offsets.ptr.vmctx_magic())
.write(VMCONTEXT_MAGIC);
self.as_mut().set_store(store.as_raw());
let types = NonNull::from(self.runtime_info.type_ids());
self.type_ids_array().write(types.cast().into());
static BUILTINS: VMBuiltinFunctionsArray = VMBuiltinFunctionsArray::INIT;
let ptr = BUILTINS.expose_provenance();
self.vmctx_plus_offset_raw(offsets.ptr.vmctx_builtin_functions())
.write(VmPtr::from(ptr));
debug_assert_eq!(imports.functions.len(), module.num_imported_funcs);
ptr::copy_nonoverlapping(
imports.functions.as_ptr(),
self.vmctx_plus_offset_raw(offsets.vmctx_imported_functions_begin())
.as_ptr(),
imports.functions.len(),
);
debug_assert_eq!(imports.tables.len(), module.num_imported_tables);
ptr::copy_nonoverlapping(
imports.tables.as_ptr(),
self.vmctx_plus_offset_raw(offsets.vmctx_imported_tables_begin())
.as_ptr(),
imports.tables.len(),
);
debug_assert_eq!(imports.memories.len(), module.num_imported_memories);
ptr::copy_nonoverlapping(
imports.memories.as_ptr(),
self.vmctx_plus_offset_raw(offsets.vmctx_imported_memories_begin())
.as_ptr(),
imports.memories.len(),
);
debug_assert_eq!(imports.globals.len(), module.num_imported_globals);
ptr::copy_nonoverlapping(
imports.globals.as_ptr(),
self.vmctx_plus_offset_raw(offsets.vmctx_imported_globals_begin())
.as_ptr(),
imports.globals.len(),
);
debug_assert_eq!(imports.tags.len(), module.num_imported_tags);
ptr::copy_nonoverlapping(
imports.tags.as_ptr(),
self.vmctx_plus_offset_raw(offsets.vmctx_imported_tags_begin())
.as_ptr(),
imports.tags.len(),
);
let mut ptr = self.vmctx_plus_offset_raw(offsets.vmctx_tables_begin());
let tables = self.as_mut().tables_mut();
for i in 0..module.num_defined_tables() {
ptr.write(tables[DefinedTableIndex::new(i)].1.vmtable());
ptr = ptr.add(1);
}
let mut ptr = self.vmctx_plus_offset_raw(offsets.vmctx_memories_begin());
let mut owned_ptr = self.vmctx_plus_offset_raw(offsets.vmctx_owned_memories_begin());
let memories = self.as_mut().memories_mut();
for i in 0..module.num_defined_memories() {
let defined_memory_index = DefinedMemoryIndex::new(i);
let memory_index = module.memory_index(defined_memory_index);
if module.memories[memory_index].shared {
let def_ptr = memories[defined_memory_index]
.1
.as_shared_memory()
.unwrap()
.vmmemory_ptr();
ptr.write(VmPtr::from(def_ptr));
} else {
owned_ptr.write(memories[defined_memory_index].1.vmmemory());
ptr.write(VmPtr::from(owned_ptr));
owned_ptr = owned_ptr.add(1);
}
ptr = ptr.add(1);
}
for (index, _init) in module.global_initializers.iter() {
self.global_ptr(index).write(VMGlobalDefinition::new());
}
let mut ptr = self.vmctx_plus_offset_raw(offsets.vmctx_tags_begin());
for i in 0..module.num_defined_tags() {
let defined_index = DefinedTagIndex::new(i);
let tag_index = module.tag_index(defined_index);
let tag = module.tags[tag_index];
ptr.write(VMTagDefinition::new(
tag.signature.unwrap_engine_type_index(),
));
ptr = ptr.add(1);
}
}
pub fn wasm_fault(&self, addr: usize) -> Option<WasmFault> {
let mut fault = None;
for (_, (_, memory)) in self.memories.iter() {
let accessible = memory.wasm_accessible();
if accessible.start <= addr && addr < accessible.end {
assert!(fault.is_none());
fault = Some(WasmFault {
memory_size: memory.byte_size(),
wasm_address: u64::try_from(addr - accessible.start).unwrap(),
});
}
}
fault
}
pub fn id(&self) -> InstanceId {
self.id
}
pub fn all_memories<'a>(
&'a self,
) -> impl ExactSizeIterator<Item = (MemoryIndex, ExportMemory)> + 'a {
let indices = (0..self.env_module().memories.len())
.map(|i| MemoryIndex::new(i))
.collect::<Vec<_>>();
indices
.into_iter()
.map(|i| (i, self.get_exported_memory(i)))
}
pub fn defined_memories<'a>(&'a self) -> impl ExactSizeIterator<Item = ExportMemory> + 'a {
let num_imported = self.env_module().num_imported_memories;
self.all_memories()
.skip(num_imported)
.map(|(_i, memory)| memory)
}
pub fn get_export_by_index_mut(self: Pin<&mut Self>, export: EntityIndex) -> Export {
match export {
EntityIndex::Function(i) => Export::Function(self.get_exported_func(i)),
EntityIndex::Global(i) => Export::Global(self.get_exported_global(i)),
EntityIndex::Table(i) => Export::Table(self.get_exported_table(i)),
EntityIndex::Memory(i) => Export::Memory(self.get_exported_memory(i)),
EntityIndex::Tag(i) => Export::Tag(self.get_exported_tag(i)),
}
}
fn store_mut(self: Pin<&mut Self>) -> &mut Option<VMStoreRawPtr> {
unsafe { &mut self.get_unchecked_mut().store }
}
fn dropped_elements_mut(self: Pin<&mut Self>) -> &mut EntitySet<ElemIndex> {
unsafe { &mut self.get_unchecked_mut().dropped_elements }
}
fn dropped_data_mut(self: Pin<&mut Self>) -> &mut EntitySet<DataIndex> {
unsafe { &mut self.get_unchecked_mut().dropped_data }
}
fn memories_mut(
self: Pin<&mut Self>,
) -> &mut PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)> {
unsafe { &mut self.get_unchecked_mut().memories }
}
fn tables_mut(
self: Pin<&mut Self>,
) -> &mut PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)> {
unsafe { &mut self.get_unchecked_mut().tables }
}
#[cfg(feature = "wmemcheck")]
pub(super) fn wmemcheck_state_mut(self: Pin<&mut Self>) -> &mut Option<Wmemcheck> {
unsafe { &mut self.get_unchecked_mut().wmemcheck_state }
}
}
unsafe impl InstanceLayout for Instance {
const INIT_ZEROED: bool = false;
type VMContext = VMContext;
fn layout(&self) -> Layout {
Self::alloc_layout(self.runtime_info.offsets())
}
fn owned_vmctx(&self) -> &OwnedVMContext<VMContext> {
&self.vmctx
}
fn owned_vmctx_mut(&mut self) -> &mut OwnedVMContext<VMContext> {
&mut self.vmctx
}
}
pub type InstanceHandle = OwnedInstance<Instance>;
#[derive(Debug)]
#[repr(transparent)] pub struct OwnedInstance<T: InstanceLayout> {
instance: SendSyncPtr<T>,
_marker: marker::PhantomData<Box<(T, OwnedVMContext<T::VMContext>)>>,
}
#[repr(align(16))] pub struct OwnedVMContext<T> {
vmctx_self_reference: SendSyncPtr<T>,
_marker: core::marker::PhantomPinned,
}
impl<T> OwnedVMContext<T> {
pub fn new() -> OwnedVMContext<T> {
OwnedVMContext {
vmctx_self_reference: SendSyncPtr::new(NonNull::dangling()),
_marker: core::marker::PhantomPinned,
}
}
}
pub unsafe trait InstanceLayout {
const INIT_ZEROED: bool;
type VMContext;
fn layout(&self) -> Layout;
fn owned_vmctx(&self) -> &OwnedVMContext<Self::VMContext>;
fn owned_vmctx_mut(&mut self) -> &mut OwnedVMContext<Self::VMContext>;
#[inline]
fn vmctx(&self) -> NonNull<Self::VMContext> {
let owned_vmctx = self.owned_vmctx();
let owned_vmctx_raw = NonNull::from(owned_vmctx);
let addr = unsafe { owned_vmctx_raw.add(1) };
owned_vmctx
.vmctx_self_reference
.as_non_null()
.with_addr(addr.addr())
}
unsafe fn vmctx_plus_offset_raw<T: VmSafe>(&self, offset: impl Into<u32>) -> NonNull<T> {
unsafe {
self.vmctx()
.byte_add(usize::try_from(offset.into()).unwrap())
.cast()
}
}
unsafe fn vmctx_plus_offset<T: VmSafe>(&self, offset: impl Into<u32>) -> &T {
unsafe { self.vmctx_plus_offset_raw(offset).as_ref() }
}
unsafe fn vmctx_plus_offset_mut<T: VmSafe>(
self: Pin<&mut Self>,
offset: impl Into<u32>,
) -> &mut T {
unsafe { self.vmctx_plus_offset_raw(offset).as_mut() }
}
}
impl<T: InstanceLayout> OwnedInstance<T> {
pub(super) fn new(mut instance: T) -> OwnedInstance<T> {
let layout = instance.layout();
debug_assert!(layout.size() >= size_of_val(&instance));
debug_assert!(layout.align() >= align_of_val(&instance));
let ptr = unsafe {
assert!(layout.size() > 0);
if T::INIT_ZEROED {
alloc::alloc::alloc_zeroed(layout)
} else {
alloc::alloc::alloc(layout)
}
};
if ptr.is_null() {
alloc::alloc::handle_alloc_error(layout);
}
let instance_ptr = NonNull::new(ptr.cast::<T>()).unwrap();
let vmctx_self_reference = unsafe { instance_ptr.add(1).cast() };
instance.owned_vmctx_mut().vmctx_self_reference = vmctx_self_reference.into();
unsafe {
instance_ptr.write(instance);
}
let ret = OwnedInstance {
instance: SendSyncPtr::new(instance_ptr),
_marker: marker::PhantomData,
};
debug_assert_eq!(
vmctx_self_reference.addr(),
unsafe { NonNull::from(ret.get().owned_vmctx()).add(1).addr() }
);
debug_assert_eq!(vmctx_self_reference.addr(), ret.get().vmctx().addr());
ret
}
pub fn get(&self) -> &T {
unsafe { self.instance.as_non_null().as_ref() }
}
pub fn get_mut(&mut self) -> Pin<&mut T> {
unsafe { Pin::new_unchecked(self.instance.as_non_null().as_mut()) }
}
}
impl<T: InstanceLayout> Drop for OwnedInstance<T> {
fn drop(&mut self) {
unsafe {
let layout = self.get().layout();
ptr::drop_in_place(self.instance.as_ptr());
alloc::alloc::dealloc(self.instance.as_ptr().cast(), layout);
}
}
}