use crate::prelude::*;
use crate::runtime::vm::const_expr::{ConstEvalContext, ConstExprEvaluator};
use crate::runtime::vm::imports::Imports;
use crate::runtime::vm::instance::{Instance, InstanceHandle};
use crate::runtime::vm::memory::Memory;
use crate::runtime::vm::mpk::ProtectionKey;
use crate::runtime::vm::table::Table;
use crate::runtime::vm::{CompiledModuleId, ModuleRuntimeInfo, VMFuncRef, VMGcRef, VMStore};
use crate::store::{AutoAssertNoGc, InstanceId, StoreOpaque};
use crate::vm::VMGlobalDefinition;
use core::ptr::NonNull;
use core::{mem, ptr};
use wasmtime_environ::{
DefinedMemoryIndex, DefinedTableIndex, HostPtr, InitMemory, MemoryInitialization,
MemoryInitializer, Module, PrimaryMap, SizeOverflow, TableInitialValue, Trap, Tunables,
VMOffsets, WasmHeapTopType,
};
#[cfg(feature = "gc")]
use crate::runtime::vm::{GcHeap, GcRuntime};
#[cfg(feature = "component-model")]
use wasmtime_environ::{
StaticModuleIndex,
component::{Component, VMComponentOffsets},
};
mod on_demand;
pub use self::on_demand::OnDemandInstanceAllocator;
#[cfg(feature = "pooling-allocator")]
mod pooling;
#[cfg(feature = "pooling-allocator")]
pub use self::pooling::{
InstanceLimits, PoolConcurrencyLimitError, PoolingInstanceAllocator,
PoolingInstanceAllocatorConfig,
};
pub struct InstanceAllocationRequest<'a> {
pub id: InstanceId,
pub runtime_info: &'a ModuleRuntimeInfo,
pub imports: Imports<'a>,
pub store: StorePtr,
#[cfg_attr(not(feature = "wmemcheck"), allow(dead_code))]
pub wmemcheck: bool,
#[cfg_attr(
not(feature = "pooling-allocator"),
expect(
dead_code,
reason = "easier to keep this field than remove it, not perf-critical to remove"
)
)]
pub pkey: Option<ProtectionKey>,
pub tunables: &'a Tunables,
}
pub struct StorePtr(Option<NonNull<dyn VMStore>>);
unsafe impl Send for StorePtr {}
unsafe impl Sync for StorePtr {}
impl StorePtr {
pub fn empty() -> Self {
Self(None)
}
pub fn new(ptr: NonNull<dyn VMStore>) -> Self {
Self(Some(ptr))
}
pub fn as_raw(&self) -> Option<NonNull<dyn VMStore>> {
self.0
}
pub(crate) unsafe fn get(&mut self) -> Option<&mut dyn VMStore> {
let ptr = self.0?.as_mut();
Some(ptr)
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub struct MemoryAllocationIndex(u32);
impl Default for MemoryAllocationIndex {
fn default() -> Self {
MemoryAllocationIndex(u32::MAX)
}
}
impl MemoryAllocationIndex {
#[cfg(feature = "pooling-allocator")]
pub fn index(&self) -> usize {
self.0 as usize
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub struct TableAllocationIndex(u32);
impl Default for TableAllocationIndex {
fn default() -> Self {
TableAllocationIndex(u32::MAX)
}
}
impl TableAllocationIndex {
#[cfg(feature = "pooling-allocator")]
pub fn index(&self) -> usize {
self.0 as usize
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub struct GcHeapAllocationIndex(u32);
impl Default for GcHeapAllocationIndex {
fn default() -> Self {
GcHeapAllocationIndex(u32::MAX)
}
}
impl GcHeapAllocationIndex {
pub fn index(&self) -> usize {
self.0 as usize
}
}
pub unsafe trait InstanceAllocatorImpl {
#[cfg(feature = "component-model")]
fn validate_component_impl<'a>(
&self,
component: &Component,
offsets: &VMComponentOffsets<HostPtr>,
get_module: &'a dyn Fn(StaticModuleIndex) -> &'a Module,
) -> Result<()>;
fn validate_module_impl(&self, module: &Module, offsets: &VMOffsets<HostPtr>) -> Result<()>;
#[cfg(feature = "gc")]
fn validate_memory_impl(&self, memory: &wasmtime_environ::Memory) -> Result<()>;
#[cfg(feature = "component-model")]
fn increment_component_instance_count(&self) -> Result<()>;
#[cfg(feature = "component-model")]
fn decrement_component_instance_count(&self);
fn increment_core_instance_count(&self) -> Result<()>;
fn decrement_core_instance_count(&self);
unsafe fn allocate_memory(
&self,
request: &mut InstanceAllocationRequest,
ty: &wasmtime_environ::Memory,
tunables: &Tunables,
memory_index: Option<DefinedMemoryIndex>,
) -> Result<(MemoryAllocationIndex, Memory)>;
unsafe fn deallocate_memory(
&self,
memory_index: Option<DefinedMemoryIndex>,
allocation_index: MemoryAllocationIndex,
memory: Memory,
);
unsafe fn allocate_table(
&self,
req: &mut InstanceAllocationRequest,
table: &wasmtime_environ::Table,
tunables: &Tunables,
table_index: DefinedTableIndex,
) -> Result<(TableAllocationIndex, Table)>;
unsafe fn deallocate_table(
&self,
table_index: DefinedTableIndex,
allocation_index: TableAllocationIndex,
table: Table,
);
#[cfg(feature = "async")]
fn allocate_fiber_stack(&self) -> Result<wasmtime_fiber::FiberStack>;
#[cfg(feature = "async")]
unsafe fn deallocate_fiber_stack(&self, stack: wasmtime_fiber::FiberStack);
#[cfg(feature = "gc")]
fn allocate_gc_heap(
&self,
engine: &crate::Engine,
gc_runtime: &dyn GcRuntime,
memory_alloc_index: MemoryAllocationIndex,
memory: Memory,
) -> Result<(GcHeapAllocationIndex, Box<dyn GcHeap>)>;
#[cfg(feature = "gc")]
#[must_use = "it is the caller's responsibility to deallocate the GC heap's underlying memory \
storage after the GC heap is deallocated"]
fn deallocate_gc_heap(
&self,
allocation_index: GcHeapAllocationIndex,
gc_heap: Box<dyn GcHeap>,
) -> (MemoryAllocationIndex, Memory);
fn purge_module(&self, module: CompiledModuleId);
fn next_available_pkey(&self) -> Option<ProtectionKey>;
fn restrict_to_pkey(&self, pkey: ProtectionKey);
fn allow_all_pkeys(&self);
}
pub trait InstanceAllocator: InstanceAllocatorImpl {
#[cfg(feature = "component-model")]
fn validate_component<'a>(
&self,
component: &Component,
offsets: &VMComponentOffsets<HostPtr>,
get_module: &'a dyn Fn(StaticModuleIndex) -> &'a Module,
) -> Result<()> {
InstanceAllocatorImpl::validate_component_impl(self, component, offsets, get_module)
}
fn validate_module(&self, module: &Module, offsets: &VMOffsets<HostPtr>) -> Result<()> {
InstanceAllocatorImpl::validate_module_impl(self, module, offsets)
}
#[cfg(feature = "gc")]
fn validate_memory(&self, memory: &wasmtime_environ::Memory) -> Result<()> {
InstanceAllocatorImpl::validate_memory_impl(self, memory)
}
unsafe fn allocate_module(
&self,
mut request: InstanceAllocationRequest,
) -> Result<InstanceHandle> {
let module = request.runtime_info.env_module();
#[cfg(debug_assertions)]
InstanceAllocatorImpl::validate_module_impl(self, module, request.runtime_info.offsets())
.expect("module should have already been validated before allocation");
self.increment_core_instance_count()?;
let num_defined_memories = module.num_defined_memories();
let mut memories = PrimaryMap::with_capacity(num_defined_memories);
let num_defined_tables = module.num_defined_tables();
let mut tables = PrimaryMap::with_capacity(num_defined_tables);
match (|| {
self.allocate_memories(&mut request, &mut memories)?;
self.allocate_tables(&mut request, &mut tables)?;
Ok(())
})() {
Ok(_) => Ok(Instance::new(request, memories, tables, &module.memories)),
Err(e) => {
self.deallocate_memories(&mut memories);
self.deallocate_tables(&mut tables);
self.decrement_core_instance_count();
Err(e)
}
}
}
unsafe fn deallocate_module(&self, handle: &mut InstanceHandle) {
self.deallocate_memories(handle.get_mut().memories_mut());
self.deallocate_tables(handle.get_mut().tables_mut());
self.decrement_core_instance_count();
}
unsafe fn allocate_memories(
&self,
request: &mut InstanceAllocationRequest,
memories: &mut PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
) -> Result<()> {
let module = request.runtime_info.env_module();
#[cfg(debug_assertions)]
InstanceAllocatorImpl::validate_module_impl(self, module, request.runtime_info.offsets())
.expect("module should have already been validated before allocation");
for (memory_index, ty) in module.memories.iter().skip(module.num_imported_memories) {
let memory_index = module
.defined_memory_index(memory_index)
.expect("should be a defined memory since we skipped imported ones");
memories.push(self.allocate_memory(
request,
ty,
request.tunables,
Some(memory_index),
)?);
}
Ok(())
}
unsafe fn deallocate_memories(
&self,
memories: &mut PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
) {
for (memory_index, (allocation_index, memory)) in mem::take(memories) {
self.deallocate_memory(Some(memory_index), allocation_index, memory);
}
}
unsafe fn allocate_tables(
&self,
request: &mut InstanceAllocationRequest,
tables: &mut PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
) -> Result<()> {
let module = request.runtime_info.env_module();
#[cfg(debug_assertions)]
InstanceAllocatorImpl::validate_module_impl(self, module, request.runtime_info.offsets())
.expect("module should have already been validated before allocation");
for (index, table) in module.tables.iter().skip(module.num_imported_tables) {
let def_index = module
.defined_table_index(index)
.expect("should be a defined table since we skipped imported ones");
tables.push(self.allocate_table(request, table, request.tunables, def_index)?);
}
Ok(())
}
unsafe fn deallocate_tables(
&self,
tables: &mut PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
) {
for (table_index, (allocation_index, table)) in mem::take(tables) {
self.deallocate_table(table_index, allocation_index, table);
}
}
}
impl<T: InstanceAllocatorImpl> InstanceAllocator for T {}
fn check_table_init_bounds(
store: &mut StoreOpaque,
instance: InstanceId,
module: &Module,
) -> Result<()> {
let mut const_evaluator = ConstExprEvaluator::default();
for segment in module.table_initialization.segments.iter() {
let table = unsafe { &*store.instance_mut(instance).get_table(segment.table_index) };
let mut context = ConstEvalContext::new(instance);
let start = unsafe {
const_evaluator
.eval(store, &mut context, &segment.offset)
.expect("const expression should be valid")
};
let start = usize::try_from(start.get_u32()).unwrap();
let end = start.checked_add(usize::try_from(segment.elements.len()).unwrap());
match end {
Some(end) if end <= table.size() => {
}
_ => {
bail!("table out of bounds: elements segment does not fit")
}
}
}
Ok(())
}
fn initialize_tables(
store: &mut StoreOpaque,
context: &mut ConstEvalContext,
const_evaluator: &mut ConstExprEvaluator,
module: &Module,
) -> Result<()> {
for (table, init) in module.table_initialization.initial_values.iter() {
match init {
TableInitialValue::Null { precomputed: _ } => {}
TableInitialValue::Expr(expr) => {
let raw = unsafe {
const_evaluator
.eval(store, context, expr)
.expect("const expression should be valid")
};
let idx = module.table_index(table);
let table = unsafe {
store
.instance_mut(context.instance)
.get_defined_table(table)
.as_mut()
.unwrap()
};
match module.tables[idx].ref_type.heap_type.top() {
WasmHeapTopType::Extern => {
let gc_ref = VMGcRef::from_raw_u32(raw.get_externref());
let gc_store = store.gc_store_mut()?;
let items = (0..table.size())
.map(|_| gc_ref.as_ref().map(|r| gc_store.clone_gc_ref(r)));
table.init_gc_refs(0, items)?;
}
WasmHeapTopType::Any => {
let gc_ref = VMGcRef::from_raw_u32(raw.get_anyref());
let gc_store = store.gc_store_mut()?;
let items = (0..table.size())
.map(|_| gc_ref.as_ref().map(|r| gc_store.clone_gc_ref(r)));
table.init_gc_refs(0, items)?;
}
WasmHeapTopType::Func => {
let funcref = NonNull::new(raw.get_funcref().cast::<VMFuncRef>());
let items = (0..table.size()).map(|_| funcref);
table.init_func(0, items)?;
}
WasmHeapTopType::Cont => todo!(), }
}
}
}
for segment in module.table_initialization.segments.iter() {
let start = unsafe {
const_evaluator
.eval(store, context, &segment.offset)
.expect("const expression should be valid")
};
Instance::table_init_segment(
store,
context.instance,
const_evaluator,
segment.table_index,
&segment.elements,
start.get_u64(),
0,
segment.elements.len(),
)?;
}
Ok(())
}
fn get_memory_init_start(
store: &mut StoreOpaque,
init: &MemoryInitializer,
instance: InstanceId,
) -> Result<u64> {
let mut context = ConstEvalContext::new(instance);
let mut const_evaluator = ConstExprEvaluator::default();
unsafe { const_evaluator.eval(store, &mut context, &init.offset) }.map(|v| {
match store.instance(instance).env_module().memories[init.memory_index].idx_type {
wasmtime_environ::IndexType::I32 => v.get_u32().into(),
wasmtime_environ::IndexType::I64 => v.get_u64(),
}
})
}
fn check_memory_init_bounds(
store: &mut StoreOpaque,
instance: InstanceId,
initializers: &[MemoryInitializer],
) -> Result<()> {
for init in initializers {
let memory = store.instance_mut(instance).get_memory(init.memory_index);
let start = get_memory_init_start(store, init, instance)?;
let end = usize::try_from(start)
.ok()
.and_then(|start| start.checked_add(init.data.len()));
match end {
Some(end) if end <= memory.current_length() => {
}
_ => {
bail!("memory out of bounds: data segment does not fit")
}
}
}
Ok(())
}
fn initialize_memories(
store: &mut StoreOpaque,
context: &mut ConstEvalContext,
const_evaluator: &mut ConstExprEvaluator,
module: &Module,
) -> Result<()> {
struct InitMemoryAtInstantiation<'a> {
module: &'a Module,
store: &'a mut StoreOpaque,
context: &'a mut ConstEvalContext,
const_evaluator: &'a mut ConstExprEvaluator,
}
impl InitMemory for InitMemoryAtInstantiation<'_> {
fn memory_size_in_bytes(
&mut self,
memory: wasmtime_environ::MemoryIndex,
) -> Result<u64, SizeOverflow> {
let len = self
.store
.instance(self.context.instance)
.get_memory(memory)
.current_length();
let len = u64::try_from(len).unwrap();
Ok(len)
}
fn eval_offset(
&mut self,
memory: wasmtime_environ::MemoryIndex,
expr: &wasmtime_environ::ConstExpr,
) -> Option<u64> {
let val = unsafe { self.const_evaluator.eval(self.store, self.context, expr) }
.expect("const expression should be valid");
Some(
match self
.store
.instance(self.context.instance)
.env_module()
.memories[memory]
.idx_type
{
wasmtime_environ::IndexType::I32 => val.get_u32().into(),
wasmtime_environ::IndexType::I64 => val.get_u64(),
},
)
}
fn write(
&mut self,
memory_index: wasmtime_environ::MemoryIndex,
init: &wasmtime_environ::StaticMemoryInitializer,
) -> bool {
let instance = self.store.instance_mut(self.context.instance);
if let Some(memory_index) = self.module.defined_memory_index(memory_index) {
if !instance.memories[memory_index].1.needs_init() {
return true;
}
}
let memory = instance.get_memory(memory_index);
unsafe {
let src = instance.wasm_data(init.data.clone());
let offset = usize::try_from(init.offset).unwrap();
let dst = memory.base.as_ptr().add(offset);
assert!(offset + src.len() <= memory.current_length());
ptr::copy_nonoverlapping(src.as_ptr(), dst, src.len())
}
true
}
}
let ok = module
.memory_initialization
.init_memory(&mut InitMemoryAtInstantiation {
module,
store,
context,
const_evaluator,
});
if !ok {
return Err(Trap::MemoryOutOfBounds.into());
}
Ok(())
}
fn check_init_bounds(store: &mut StoreOpaque, instance: InstanceId, module: &Module) -> Result<()> {
check_table_init_bounds(store, instance, module)?;
match &module.memory_initialization {
MemoryInitialization::Segmented(initializers) => {
check_memory_init_bounds(store, instance, initializers)?;
}
MemoryInitialization::Static { .. } => {}
}
Ok(())
}
fn initialize_globals(
store: &mut StoreOpaque,
context: &mut ConstEvalContext,
const_evaluator: &mut ConstExprEvaluator,
module: &Module,
) -> Result<()> {
assert!(core::ptr::eq(
&**store.instance(context.instance).env_module(),
module
));
let mut store = AutoAssertNoGc::new(store);
for (index, init) in module.global_initializers.iter() {
let raw = unsafe {
const_evaluator
.eval(&mut store, context, init)
.expect("should be a valid const expr")
};
let instance = store.instance_mut(context.instance);
let to = instance.global_ptr(index);
let wasm_ty = module.globals[module.global_index(index)].wasm_ty;
#[cfg(feature = "wmemcheck")]
if index.as_u32() == 0 && wasm_ty == wasmtime_environ::WasmValType::I32 {
if let Some(wmemcheck) = instance.wmemcheck_state_mut() {
let size = usize::try_from(raw.get_i32()).unwrap();
wmemcheck.set_stack_size(size);
}
}
unsafe {
to.write(VMGlobalDefinition::from_val_raw(&mut store, wasm_ty, raw)?);
};
}
Ok(())
}
pub fn initialize_instance(
store: &mut StoreOpaque,
instance: InstanceId,
module: &Module,
is_bulk_memory: bool,
) -> Result<()> {
if !is_bulk_memory {
check_init_bounds(store, instance, module)?;
}
let mut context = ConstEvalContext::new(instance);
let mut const_evaluator = ConstExprEvaluator::default();
initialize_globals(store, &mut context, &mut const_evaluator, module)?;
initialize_tables(store, &mut context, &mut const_evaluator, module)?;
initialize_memories(store, &mut context, &mut const_evaluator, &module)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn allocator_traits_are_object_safe() {
fn _instance_allocator(_: &dyn InstanceAllocatorImpl) {}
fn _instance_allocator_ext(_: &dyn InstanceAllocator) {}
}
}