use crate::imports::Imports;
use crate::instance::{Instance, InstanceHandle};
use crate::memory::Memory;
use crate::mpk::ProtectionKey;
use crate::table::{Table, TableElementType};
use crate::{CompiledModuleId, ModuleRuntimeInfo, Store, VMGcRef, I31};
use anyhow::{anyhow, bail, Result};
use std::{alloc, any::Any, mem, ptr, sync::Arc};
use wasmtime_environ::{
DefinedMemoryIndex, DefinedTableIndex, HostPtr, InitMemory, MemoryInitialization,
MemoryInitializer, MemoryPlan, Module, PrimaryMap, TableInitialValue, TablePlan, TableSegment,
Trap, VMOffsets, WasmValType, WASM_PAGE_SIZE,
};
#[cfg(feature = "gc")]
use crate::{GcHeap, GcRuntime};
#[cfg(feature = "component-model")]
use wasmtime_environ::{
component::{Component, VMComponentOffsets},
StaticModuleIndex,
};
mod on_demand;
pub use self::on_demand::OnDemandInstanceAllocator;
#[cfg(feature = "pooling-allocator")]
mod pooling;
#[cfg(feature = "pooling-allocator")]
pub use self::pooling::{InstanceLimits, PoolingInstanceAllocator, PoolingInstanceAllocatorConfig};
pub struct InstanceAllocationRequest<'a> {
pub runtime_info: &'a Arc<dyn ModuleRuntimeInfo>,
pub imports: Imports<'a>,
pub host_state: Box<dyn Any + Send + Sync>,
pub store: StorePtr,
pub wmemcheck: bool,
pub pkey: Option<ProtectionKey>,
}
pub struct StorePtr(Option<*mut dyn Store>);
impl StorePtr {
pub fn empty() -> Self {
Self(None)
}
pub fn new(ptr: *mut dyn Store) -> Self {
Self(Some(ptr))
}
pub fn as_raw(&self) -> Option<*mut dyn Store> {
self.0.clone()
}
pub(crate) unsafe fn get(&mut self) -> Option<&mut dyn Store> {
match self.0 {
Some(ptr) => Some(&mut *ptr),
None => None,
}
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub struct MemoryAllocationIndex(u32);
impl Default for MemoryAllocationIndex {
fn default() -> Self {
MemoryAllocationIndex(u32::MAX)
}
}
impl MemoryAllocationIndex {
pub fn index(&self) -> usize {
self.0 as usize
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub struct TableAllocationIndex(u32);
impl Default for TableAllocationIndex {
fn default() -> Self {
TableAllocationIndex(u32::MAX)
}
}
impl TableAllocationIndex {
pub fn index(&self) -> usize {
self.0 as usize
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub struct GcHeapAllocationIndex(u32);
impl Default for GcHeapAllocationIndex {
fn default() -> Self {
GcHeapAllocationIndex(u32::MAX)
}
}
impl GcHeapAllocationIndex {
pub fn index(&self) -> usize {
self.0 as usize
}
}
pub unsafe trait InstanceAllocatorImpl {
#[cfg(feature = "component-model")]
fn validate_component_impl<'a>(
&self,
component: &Component,
offsets: &VMComponentOffsets<HostPtr>,
get_module: &'a dyn Fn(StaticModuleIndex) -> &'a Module,
) -> Result<()>;
fn validate_module_impl(&self, module: &Module, offsets: &VMOffsets<HostPtr>) -> Result<()>;
fn increment_component_instance_count(&self) -> Result<()>;
fn decrement_component_instance_count(&self);
fn increment_core_instance_count(&self) -> Result<()>;
fn decrement_core_instance_count(&self);
unsafe fn allocate_memory(
&self,
request: &mut InstanceAllocationRequest,
memory_plan: &MemoryPlan,
memory_index: DefinedMemoryIndex,
) -> Result<(MemoryAllocationIndex, Memory)>;
unsafe fn deallocate_memory(
&self,
memory_index: DefinedMemoryIndex,
allocation_index: MemoryAllocationIndex,
memory: Memory,
);
unsafe fn allocate_table(
&self,
req: &mut InstanceAllocationRequest,
table_plan: &TablePlan,
table_index: DefinedTableIndex,
) -> Result<(TableAllocationIndex, Table)>;
unsafe fn deallocate_table(
&self,
table_index: DefinedTableIndex,
allocation_index: TableAllocationIndex,
table: Table,
);
#[cfg(feature = "async")]
fn allocate_fiber_stack(&self) -> Result<wasmtime_fiber::FiberStack>;
#[cfg(feature = "async")]
unsafe fn deallocate_fiber_stack(&self, stack: &wasmtime_fiber::FiberStack);
#[cfg(feature = "gc")]
fn allocate_gc_heap(
&self,
gc_runtime: &dyn GcRuntime,
) -> Result<(GcHeapAllocationIndex, Box<dyn GcHeap>)>;
#[cfg(feature = "gc")]
fn deallocate_gc_heap(&self, allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>);
fn purge_module(&self, module: CompiledModuleId);
fn next_available_pkey(&self) -> Option<ProtectionKey>;
fn restrict_to_pkey(&self, pkey: ProtectionKey);
fn allow_all_pkeys(&self);
}
pub trait InstanceAllocator: InstanceAllocatorImpl {
#[cfg(feature = "component-model")]
fn validate_component<'a>(
&self,
component: &Component,
offsets: &VMComponentOffsets<HostPtr>,
get_module: &'a dyn Fn(StaticModuleIndex) -> &'a Module,
) -> Result<()> {
InstanceAllocatorImpl::validate_component_impl(self, component, offsets, get_module)
}
fn validate_module(&self, module: &Module, offsets: &VMOffsets<HostPtr>) -> Result<()> {
InstanceAllocatorImpl::validate_module_impl(self, module, offsets)
}
unsafe fn allocate_module(
&self,
mut request: InstanceAllocationRequest,
) -> Result<InstanceHandle> {
let module = request.runtime_info.module();
#[cfg(debug_assertions)]
InstanceAllocatorImpl::validate_module_impl(self, module, request.runtime_info.offsets())
.expect("module should have already been validated before allocation");
self.increment_core_instance_count()?;
let num_defined_memories = module.memory_plans.len() - module.num_imported_memories;
let mut memories = PrimaryMap::with_capacity(num_defined_memories);
let num_defined_tables = module.table_plans.len() - module.num_imported_tables;
let mut tables = PrimaryMap::with_capacity(num_defined_tables);
match (|| {
self.allocate_memories(&mut request, &mut memories)?;
self.allocate_tables(&mut request, &mut tables)?;
Ok(())
})() {
Ok(_) => Ok(Instance::new(
request,
memories,
tables,
&module.memory_plans,
)),
Err(e) => {
self.deallocate_memories(&mut memories);
self.deallocate_tables(&mut tables);
self.decrement_core_instance_count();
Err(e)
}
}
}
unsafe fn deallocate_module(&self, handle: &mut InstanceHandle) {
self.deallocate_memories(&mut handle.instance_mut().memories);
self.deallocate_tables(&mut handle.instance_mut().tables);
let layout = Instance::alloc_layout(handle.instance().offsets());
let ptr = handle.instance.take().unwrap();
ptr::drop_in_place(ptr.as_ptr());
alloc::dealloc(ptr.as_ptr().cast(), layout);
self.decrement_core_instance_count();
}
unsafe fn allocate_memories(
&self,
request: &mut InstanceAllocationRequest,
memories: &mut PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
) -> Result<()> {
let module = request.runtime_info.module();
#[cfg(debug_assertions)]
InstanceAllocatorImpl::validate_module_impl(self, module, request.runtime_info.offsets())
.expect("module should have already been validated before allocation");
for (memory_index, memory_plan) in module
.memory_plans
.iter()
.skip(module.num_imported_memories)
{
let memory_index = module
.defined_memory_index(memory_index)
.expect("should be a defined memory since we skipped imported ones");
memories.push(self.allocate_memory(request, memory_plan, memory_index)?);
}
Ok(())
}
unsafe fn deallocate_memories(
&self,
memories: &mut PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
) {
for (memory_index, (allocation_index, memory)) in mem::take(memories) {
self.deallocate_memory(memory_index, allocation_index, memory);
}
}
unsafe fn allocate_tables(
&self,
request: &mut InstanceAllocationRequest,
tables: &mut PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
) -> Result<()> {
let module = request.runtime_info.module();
#[cfg(debug_assertions)]
InstanceAllocatorImpl::validate_module_impl(self, module, request.runtime_info.offsets())
.expect("module should have already been validated before allocation");
for (index, plan) in module.table_plans.iter().skip(module.num_imported_tables) {
let def_index = module
.defined_table_index(index)
.expect("should be a defined table since we skipped imported ones");
tables.push(self.allocate_table(request, plan, def_index)?);
}
Ok(())
}
unsafe fn deallocate_tables(
&self,
tables: &mut PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
) {
for (table_index, (allocation_index, table)) in mem::take(tables) {
self.deallocate_table(table_index, allocation_index, table);
}
}
}
impl<T: InstanceAllocatorImpl> InstanceAllocator for T {}
fn get_table_init_start(init: &TableSegment, instance: &mut Instance) -> Result<u32> {
match init.base {
Some(base) => {
let val = unsafe { *(*instance.defined_or_imported_global_ptr(base)).as_u32() };
init.offset
.checked_add(val)
.ok_or_else(|| anyhow!("element segment global base overflows"))
}
None => Ok(init.offset),
}
}
fn check_table_init_bounds(instance: &mut Instance, module: &Module) -> Result<()> {
for segment in module.table_initialization.segments.iter() {
let table = unsafe { &*instance.get_table(segment.table_index) };
let start = get_table_init_start(segment, instance)?;
let start = usize::try_from(start).unwrap();
let end = start.checked_add(usize::try_from(segment.elements.len()).unwrap());
match end {
Some(end) if end <= table.size() as usize => {
}
_ => {
bail!("table out of bounds: elements segment does not fit")
}
}
}
Ok(())
}
fn initialize_tables(instance: &mut Instance, module: &Module) -> Result<()> {
for (table, init) in module.table_initialization.initial_values.iter() {
match init {
TableInitialValue::Null { precomputed: _ } => {}
TableInitialValue::FuncRef(idx) => {
let funcref = instance.get_func_ref(*idx).unwrap();
let table = unsafe { &mut *instance.get_defined_table(table) };
let init = (0..table.size()).map(|_| funcref);
table.init_func(0, init)?;
}
TableInitialValue::GlobalGet(idx) => unsafe {
let global = instance.defined_or_imported_global_ptr(*idx);
let table = &mut *instance.get_defined_table(table);
match table.element_type() {
TableElementType::Func => {
let funcref = (*global).as_func_ref();
let init = (0..table.size()).map(|_| funcref);
table.init_func(0, init)?;
}
TableElementType::GcRef => {
let gc_ref = (*global).as_gc_ref();
let gc_ref = gc_ref.map(|r| r.unchecked_copy());
let init = (0..table.size()).map(|_| {
gc_ref
.as_ref()
.map(|r| (*instance.store()).gc_store().clone_gc_ref(r))
});
table.init_gc_refs(0, init)?;
}
}
},
TableInitialValue::I31Ref(value) => {
let value = VMGcRef::from_i31(I31::wrapping_i32(*value));
let table = unsafe { &mut *instance.get_defined_table(table) };
let init = (0..table.size()).map(|_| {
Some(value.unchecked_copy())
});
table.init_gc_refs(0, init)?;
}
}
}
for segment in module.table_initialization.segments.iter() {
let start = get_table_init_start(segment, instance)?;
instance.table_init_segment(
segment.table_index,
&segment.elements,
start,
0,
segment.elements.len(),
)?;
}
Ok(())
}
fn get_memory_init_start(init: &MemoryInitializer, instance: &mut Instance) -> Result<u64> {
match init.base {
Some(base) => {
let mem64 = instance.module().memory_plans[init.memory_index]
.memory
.memory64;
let val = unsafe {
let global = instance.defined_or_imported_global_ptr(base);
if mem64 {
*(*global).as_u64()
} else {
u64::from(*(*global).as_u32())
}
};
init.offset
.checked_add(val)
.ok_or_else(|| anyhow!("data segment global base overflows"))
}
None => Ok(init.offset),
}
}
fn check_memory_init_bounds(
instance: &mut Instance,
initializers: &[MemoryInitializer],
) -> Result<()> {
for init in initializers {
let memory = instance.get_memory(init.memory_index);
let start = get_memory_init_start(init, instance)?;
let end = usize::try_from(start)
.ok()
.and_then(|start| start.checked_add(init.data.len()));
match end {
Some(end) if end <= memory.current_length() => {
}
_ => {
bail!("memory out of bounds: data segment does not fit")
}
}
}
Ok(())
}
fn initialize_memories(instance: &mut Instance, module: &Module) -> Result<()> {
let memory_size_in_pages = &|instance: &mut Instance, memory| {
(instance.get_memory(memory).current_length() as u64) / u64::from(WASM_PAGE_SIZE)
};
let get_global_as_u64 = &mut |instance: &mut Instance, global| unsafe {
let def = instance.defined_or_imported_global_ptr(global);
if module.globals[global].wasm_ty == WasmValType::I64 {
*(*def).as_u64()
} else {
u64::from(*(*def).as_u32())
}
};
let ok = module.memory_initialization.init_memory(
instance,
InitMemory::Runtime {
memory_size_in_pages,
get_global_as_u64,
},
|instance, memory_index, init| {
if let Some(memory_index) = module.defined_memory_index(memory_index) {
if !instance.memories[memory_index].1.needs_init() {
return true;
}
}
let memory = instance.get_memory(memory_index);
unsafe {
let src = instance.wasm_data(init.data.clone());
let dst = memory.base.add(usize::try_from(init.offset).unwrap());
ptr::copy_nonoverlapping(src.as_ptr(), dst, src.len())
}
true
},
);
if !ok {
return Err(Trap::MemoryOutOfBounds.into());
}
Ok(())
}
fn check_init_bounds(instance: &mut Instance, module: &Module) -> Result<()> {
check_table_init_bounds(instance, module)?;
match &module.memory_initialization {
MemoryInitialization::Segmented(initializers) => {
check_memory_init_bounds(instance, initializers)?;
}
MemoryInitialization::Static { .. } => {}
}
Ok(())
}
pub(super) fn initialize_instance(
instance: &mut Instance,
module: &Module,
is_bulk_memory: bool,
) -> Result<()> {
if !is_bulk_memory {
check_init_bounds(instance, module)?;
}
initialize_tables(instance, module)?;
initialize_memories(instance, &module)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn allocator_traits_are_object_safe() {
fn _instance_allocator(_: &dyn InstanceAllocatorImpl) {}
fn _instance_allocator_ext(_: &dyn InstanceAllocator) {}
}
}