use crate::prelude::*;
use crate::runtime::vm::{
self, InterpreterRef, SendSyncPtr, StoreBox, VMArrayCallHostFuncContext,
VMCommonStackInformation, VMContext, VMFuncRef, VMFunctionImport, VMOpaqueContext,
VMStoreContext,
};
use crate::store::{Asyncness, AutoAssertNoGc, InstanceId, StoreId, StoreOpaque};
use crate::type_registry::RegisteredType;
use crate::{
AsContext, AsContextMut, CallHook, Engine, Extern, FuncType, Instance, ModuleExport, Ref,
StoreContext, StoreContextMut, Val, ValRaw, ValType,
};
use alloc::sync::Arc;
use core::convert::Infallible;
use core::ffi::c_void;
#[cfg(feature = "async")]
use core::future::Future;
use core::mem::{self, MaybeUninit};
use core::ptr::NonNull;
use wasmtime_environ::VMSharedTypeIndex;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct NoFunc {
_inner: Infallible,
}
impl NoFunc {
#[inline]
pub fn null() -> Option<NoFunc> {
None
}
#[inline]
pub fn null_ref() -> Ref {
Ref::Func(None)
}
#[inline]
pub fn null_val() -> Val {
Val::FuncRef(None)
}
}
#[derive(Copy, Clone, Debug)]
#[repr(C)] pub struct Func {
store: StoreId,
unsafe_func_ref: SendSyncPtr<VMFuncRef>,
}
const _: () = {
#[repr(C)]
struct C(u64, *mut u8);
assert!(core::mem::size_of::<C>() == core::mem::size_of::<Func>());
assert!(core::mem::align_of::<C>() == core::mem::align_of::<Func>());
assert!(core::mem::offset_of!(Func, store) == 0);
};
macro_rules! for_each_function_signature {
($mac:ident) => {
$mac!(0);
$mac!(1 A1);
$mac!(2 A1 A2);
$mac!(3 A1 A2 A3);
$mac!(4 A1 A2 A3 A4);
$mac!(5 A1 A2 A3 A4 A5);
$mac!(6 A1 A2 A3 A4 A5 A6);
$mac!(7 A1 A2 A3 A4 A5 A6 A7);
$mac!(8 A1 A2 A3 A4 A5 A6 A7 A8);
$mac!(9 A1 A2 A3 A4 A5 A6 A7 A8 A9);
$mac!(10 A1 A2 A3 A4 A5 A6 A7 A8 A9 A10);
$mac!(11 A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11);
$mac!(12 A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11 A12);
$mac!(13 A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11 A12 A13);
$mac!(14 A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11 A12 A13 A14);
$mac!(15 A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11 A12 A13 A14 A15);
$mac!(16 A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11 A12 A13 A14 A15 A16);
$mac!(17 A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11 A12 A13 A14 A15 A16 A17);
};
}
mod typed;
use crate::runtime::vm::VMStackChain;
pub use typed::*;
impl Func {
pub fn new<T: 'static>(
mut store: impl AsContextMut<Data = T>,
ty: FuncType,
func: impl Fn(Caller<'_, T>, &[Val], &mut [Val]) -> Result<()> + Send + Sync + 'static,
) -> Self {
let store = store.as_context_mut().0;
let host = HostFunc::new(store.engine(), ty, func);
unsafe { host.into_func(store) }
}
pub unsafe fn new_unchecked<T: 'static>(
mut store: impl AsContextMut<Data = T>,
ty: FuncType,
func: impl Fn(Caller<'_, T>, &mut [MaybeUninit<ValRaw>]) -> Result<()> + Send + Sync + 'static,
) -> Self {
let store = store.as_context_mut().0;
let host = unsafe { HostFunc::new_unchecked(store.engine(), ty, func) };
unsafe { host.into_func(store) }
}
#[cfg(feature = "async")]
pub fn new_async<T, F>(mut store: impl AsContextMut<Data = T>, ty: FuncType, func: F) -> Func
where
F: for<'a> Fn(
Caller<'a, T>,
&'a [Val],
&'a mut [Val],
) -> Box<dyn Future<Output = Result<()>> + Send + 'a>
+ Send
+ Sync
+ 'static,
T: Send + 'static,
{
let store = store.as_context_mut().0;
let host = HostFunc::new_async(store.engine(), ty, func);
unsafe { host.into_func(store) }
}
pub(crate) unsafe fn from_vm_func_ref(store: StoreId, func_ref: NonNull<VMFuncRef>) -> Func {
unsafe {
debug_assert!(func_ref.as_ref().type_index != VMSharedTypeIndex::default());
}
Func {
store,
unsafe_func_ref: func_ref.into(),
}
}
pub fn wrap<T, Params, Results>(
mut store: impl AsContextMut<Data = T>,
func: impl IntoFunc<T, Params, Results>,
) -> Func
where
T: 'static,
{
let store = store.as_context_mut().0;
let engine = store.engine();
let host = func.into_func(engine);
unsafe { host.into_func(store) }
}
#[cfg(feature = "async")]
pub fn wrap_async<T, F, P, R>(mut store: impl AsContextMut<Data = T>, func: F) -> Func
where
F: for<'a> Fn(Caller<'a, T>, P) -> Box<dyn Future<Output = R> + Send + 'a>
+ Send
+ Sync
+ 'static,
P: WasmTyList,
R: WasmRet,
T: Send + 'static,
{
let store = store.as_context_mut().0;
let host = HostFunc::wrap_async(store.engine(), func);
unsafe { host.into_func(store) }
}
pub fn ty(&self, store: impl AsContext) -> FuncType {
self.load_ty(&store.as_context().0)
}
pub(crate) fn load_ty(&self, store: &StoreOpaque) -> FuncType {
FuncType::from_shared_type_index(store.engine(), self.type_index(store))
}
pub fn matches_ty(&self, store: impl AsContext, func_ty: &FuncType) -> bool {
self._matches_ty(store.as_context().0, func_ty)
}
pub(crate) fn _matches_ty(&self, store: &StoreOpaque, func_ty: &FuncType) -> bool {
let actual_ty = self.load_ty(store);
actual_ty.matches(func_ty)
}
pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, func_ty: &FuncType) -> Result<()> {
if !self.comes_from_same_store(store) {
bail!("function used with wrong store");
}
if self._matches_ty(store, func_ty) {
Ok(())
} else {
let actual_ty = self.load_ty(store);
bail!("type mismatch: expected {func_ty}, found {actual_ty}")
}
}
pub(crate) fn type_index(&self, data: &StoreOpaque) -> VMSharedTypeIndex {
unsafe { self.vm_func_ref(data).as_ref().type_index }
}
pub fn call(
&self,
mut store: impl AsContextMut,
params: &[Val],
results: &mut [Val],
) -> Result<()> {
let mut store = store.as_context_mut();
store.0.validate_sync_call()?;
self.call_impl_check_args(&mut store, params, results)?;
unsafe { self.call_impl_do_call(&mut store, params, results) }
}
pub unsafe fn call_unchecked(
&self,
mut store: impl AsContextMut,
params_and_returns: *mut [ValRaw],
) -> Result<()> {
let mut store = store.as_context_mut();
let func_ref = self.vm_func_ref(store.0);
let params_and_returns = NonNull::new(params_and_returns).unwrap_or(NonNull::from(&mut []));
unsafe { Self::call_unchecked_raw(&mut store, func_ref, params_and_returns) }
}
pub(crate) unsafe fn call_unchecked_raw<T>(
store: &mut StoreContextMut<'_, T>,
func_ref: NonNull<VMFuncRef>,
params_and_returns: NonNull<[ValRaw]>,
) -> Result<()> {
invoke_wasm_and_catch_traps(store, |caller, vm| unsafe {
VMFuncRef::array_call(func_ref, vm, caller, params_and_returns)
})
}
pub unsafe fn from_raw(mut store: impl AsContextMut, raw: *mut c_void) -> Option<Func> {
unsafe { Self::_from_raw(store.as_context_mut().0, raw) }
}
pub(crate) unsafe fn _from_raw(store: &mut StoreOpaque, raw: *mut c_void) -> Option<Func> {
unsafe {
Some(Func::from_vm_func_ref(
store.id(),
NonNull::new(raw.cast())?,
))
}
}
pub fn to_raw(&self, mut store: impl AsContextMut) -> *mut c_void {
self.vm_func_ref(store.as_context_mut().0).as_ptr().cast()
}
#[cfg(feature = "async")]
pub async fn call_async(
&self,
mut store: impl AsContextMut<Data: Send>,
params: &[Val],
results: &mut [Val],
) -> Result<()> {
let mut store = store.as_context_mut();
self.call_impl_check_args(&mut store, params, results)?;
let result = store
.on_fiber(|store| unsafe { self.call_impl_do_call(store, params, results) })
.await??;
Ok(result)
}
fn call_impl_check_args<T>(
&self,
store: &mut StoreContextMut<'_, T>,
params: &[Val],
results: &mut [Val],
) -> Result<()> {
let ty = self.load_ty(store.0);
if ty.params().len() != params.len() {
bail!(
"expected {} arguments, got {}",
ty.params().len(),
params.len()
);
}
if ty.results().len() != results.len() {
bail!(
"expected {} results, got {}",
ty.results().len(),
results.len()
);
}
for (ty, arg) in ty.params().zip(params) {
arg.ensure_matches_ty(store.0, &ty)
.context("argument type mismatch")?;
if !arg.comes_from_same_store(store.0) {
bail!("cross-`Store` values are not currently supported");
}
}
Ok(())
}
unsafe fn call_impl_do_call<T>(
&self,
store: &mut StoreContextMut<'_, T>,
params: &[Val],
results: &mut [Val],
) -> Result<()> {
let ty = self.load_ty(store.0);
let values_vec_size = params.len().max(ty.results().len());
let mut values_vec = store.0.take_wasm_val_raw_storage();
debug_assert!(values_vec.is_empty());
values_vec.resize_with(values_vec_size, || ValRaw::v128(0));
for (arg, slot) in params.iter().cloned().zip(&mut values_vec) {
*slot = arg.to_raw(&mut *store)?;
}
unsafe {
self.call_unchecked(
&mut *store,
core::ptr::slice_from_raw_parts_mut(values_vec.as_mut_ptr(), values_vec_size),
)?;
}
for ((i, slot), val) in results.iter_mut().enumerate().zip(&values_vec) {
let ty = ty.results().nth(i).unwrap();
*slot = unsafe { Val::from_raw(&mut *store, *val, ty) };
}
values_vec.truncate(0);
store.0.save_wasm_val_raw_storage(values_vec);
Ok(())
}
#[inline]
pub(crate) fn vm_func_ref(&self, store: &StoreOpaque) -> NonNull<VMFuncRef> {
self.store.assert_belongs_to(store.id());
self.unsafe_func_ref.as_non_null()
}
pub(crate) fn vmimport(&self, store: &StoreOpaque) -> VMFunctionImport {
unsafe {
let f = self.vm_func_ref(store);
VMFunctionImport {
wasm_call: f.as_ref().wasm_call.unwrap(),
array_call: f.as_ref().array_call,
vmctx: f.as_ref().vmctx,
}
}
}
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
self.store == store.id()
}
pub fn typed<Params, Results>(
&self,
store: impl AsContext,
) -> Result<TypedFunc<Params, Results>>
where
Params: WasmParams,
Results: WasmResults,
{
let store = store.as_context().0;
let ty = self.load_ty(store);
Params::typecheck(store.engine(), ty.params(), TypeCheckPosition::Param)
.context("type mismatch with parameters")?;
Results::typecheck(store.engine(), ty.results(), TypeCheckPosition::Result)
.context("type mismatch with results")?;
unsafe { Ok(TypedFunc::_new_unchecked(store, *self)) }
}
#[cfg_attr(
not(test),
expect(dead_code, reason = "Not used yet, but added for consistency")
)]
pub(crate) fn hash_key(&self, store: &mut StoreOpaque) -> impl core::hash::Hash + Eq + use<> {
self.vm_func_ref(store).as_ptr().addr()
}
}
pub(crate) fn invoke_wasm_and_catch_traps<T>(
store: &mut StoreContextMut<'_, T>,
closure: impl FnMut(NonNull<VMContext>, Option<InterpreterRef<'_>>) -> bool,
) -> Result<()> {
let mut initial_stack_csi = VMCommonStackInformation::running_default();
let mut previous_runtime_state = EntryStoreContext::enter_wasm(store, &mut initial_stack_csi);
if let Err(trap) = store.0.call_hook(CallHook::CallingWasm) {
return Err(trap);
}
let result = crate::runtime::vm::catch_traps(store, &mut previous_runtime_state, closure);
#[cfg(feature = "component-model")]
if result.is_err() {
store.0.set_trapped();
}
core::mem::drop(previous_runtime_state);
store.0.call_hook(CallHook::ReturningFromWasm)?;
result
}
pub(crate) struct EntryStoreContext {
pub stack_limit: Option<usize>,
pub last_wasm_exit_pc: usize,
pub last_wasm_exit_trampoline_fp: usize,
pub last_wasm_entry_fp: usize,
pub last_wasm_entry_sp: usize,
pub last_wasm_entry_trap_handler: usize,
pub stack_chain: VMStackChain,
vm_store_context: *const VMStoreContext,
}
impl EntryStoreContext {
pub fn enter_wasm<T>(
store: &mut StoreContextMut<'_, T>,
initial_stack_information: *mut VMCommonStackInformation,
) -> Self {
let stack_limit;
if unsafe { *store.0.vm_store_context().stack_limit.get() } != usize::MAX
&& !store.0.can_block()
{
stack_limit = None;
}
else if cfg!(miri) {
stack_limit = None;
} else {
#[cfg(has_host_compiler_backend)]
let stack_pointer = crate::runtime::vm::get_stack_pointer();
#[cfg(not(has_host_compiler_backend))]
let stack_pointer = {
use wasmtime_environ::TripleExt;
debug_assert!(store.engine().target().is_pulley());
usize::MAX
};
let wasm_stack_limit = stack_pointer
.checked_sub(store.engine().config().max_wasm_stack)
.unwrap();
let prev_stack = unsafe {
mem::replace(
&mut *store.0.vm_store_context().stack_limit.get(),
wasm_stack_limit,
)
};
stack_limit = Some(prev_stack);
}
unsafe {
let vm_store_context = store.0.vm_store_context();
let new_stack_chain = VMStackChain::InitialStack(initial_stack_information);
*vm_store_context.stack_chain.get() = new_stack_chain;
Self {
stack_limit,
last_wasm_exit_pc: *(*vm_store_context).last_wasm_exit_pc.get(),
last_wasm_exit_trampoline_fp: *(*vm_store_context)
.last_wasm_exit_trampoline_fp
.get(),
last_wasm_entry_fp: *(*vm_store_context).last_wasm_entry_fp.get(),
last_wasm_entry_sp: *(*vm_store_context).last_wasm_entry_sp.get(),
last_wasm_entry_trap_handler: *(*vm_store_context)
.last_wasm_entry_trap_handler
.get(),
stack_chain: (*(*vm_store_context).stack_chain.get()).clone(),
vm_store_context,
}
}
}
#[inline]
fn exit_wasm(&mut self) {
unsafe {
if let Some(limit) = self.stack_limit {
*(&*self.vm_store_context).stack_limit.get() = limit;
}
*(*self.vm_store_context).last_wasm_exit_trampoline_fp.get() =
self.last_wasm_exit_trampoline_fp;
*(*self.vm_store_context).last_wasm_exit_pc.get() = self.last_wasm_exit_pc;
*(*self.vm_store_context).last_wasm_entry_fp.get() = self.last_wasm_entry_fp;
*(*self.vm_store_context).last_wasm_entry_sp.get() = self.last_wasm_entry_sp;
*(*self.vm_store_context).last_wasm_entry_trap_handler.get() =
self.last_wasm_entry_trap_handler;
*(*self.vm_store_context).stack_chain.get() = self.stack_chain.clone();
}
}
}
impl Drop for EntryStoreContext {
#[inline]
fn drop(&mut self) {
self.exit_wasm();
}
}
pub unsafe trait WasmRet {
#[doc(hidden)]
fn compatible_with_store(&self, store: &StoreOpaque) -> bool;
#[doc(hidden)]
unsafe fn store(
self,
store: &mut AutoAssertNoGc<'_>,
ptr: &mut [MaybeUninit<ValRaw>],
) -> Result<()>;
#[doc(hidden)]
fn func_type(engine: &Engine, params: impl Iterator<Item = ValType>) -> FuncType;
#[doc(hidden)]
fn may_gc() -> bool;
#[doc(hidden)]
type Fallible: WasmRet;
#[doc(hidden)]
fn into_fallible(self) -> Self::Fallible;
#[doc(hidden)]
fn fallible_from_error(error: Error) -> Self::Fallible;
}
unsafe impl<T> WasmRet for T
where
T: WasmTy,
{
type Fallible = Result<T>;
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
<Self as WasmTy>::compatible_with_store(self, store)
}
unsafe fn store(
self,
store: &mut AutoAssertNoGc<'_>,
ptr: &mut [MaybeUninit<ValRaw>],
) -> Result<()> {
debug_assert!(ptr.len() > 0);
unsafe { <Self as WasmTy>::store(self, store, ptr.get_unchecked_mut(0)) }
}
fn may_gc() -> bool {
T::may_gc()
}
fn func_type(engine: &Engine, params: impl Iterator<Item = ValType>) -> FuncType {
FuncType::new(engine, params, Some(<Self as WasmTy>::valtype()))
}
fn into_fallible(self) -> Result<T> {
Ok(self)
}
fn fallible_from_error(error: Error) -> Result<T> {
Err(error)
}
}
unsafe impl<T> WasmRet for Result<T>
where
T: WasmRet,
{
type Fallible = Self;
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
match self {
Ok(x) => <T as WasmRet>::compatible_with_store(x, store),
Err(_) => true,
}
}
unsafe fn store(
self,
store: &mut AutoAssertNoGc<'_>,
ptr: &mut [MaybeUninit<ValRaw>],
) -> Result<()> {
unsafe { self.and_then(|val| val.store(store, ptr)) }
}
fn may_gc() -> bool {
T::may_gc()
}
fn func_type(engine: &Engine, params: impl Iterator<Item = ValType>) -> FuncType {
T::func_type(engine, params)
}
fn into_fallible(self) -> Result<T> {
self
}
fn fallible_from_error(error: Error) -> Result<T> {
Err(error)
}
}
macro_rules! impl_wasm_host_results {
($n:tt $($t:ident)*) => (
#[allow(non_snake_case, reason = "macro-generated code")]
unsafe impl<$($t),*> WasmRet for ($($t,)*)
where
$($t: WasmTy,)*
{
type Fallible = Result<Self>;
#[inline]
fn compatible_with_store(&self, _store: &StoreOpaque) -> bool {
let ($($t,)*) = self;
$( $t.compatible_with_store(_store) && )* true
}
#[inline]
unsafe fn store(
self,
_store: &mut AutoAssertNoGc<'_>,
_ptr: &mut [MaybeUninit<ValRaw>],
) -> Result<()> {
let ($($t,)*) = self;
let mut _cur = 0;
$(
debug_assert!(_cur < _ptr.len());
unsafe {
let val = _ptr.get_unchecked_mut(_cur);
_cur += 1;
WasmTy::store($t, _store, val)?;
}
)*
Ok(())
}
#[doc(hidden)]
fn may_gc() -> bool {
$( $t::may_gc() || )* false
}
fn func_type(engine: &Engine, params: impl Iterator<Item = ValType>) -> FuncType {
FuncType::new(
engine,
params,
IntoIterator::into_iter([$($t::valtype(),)*]),
)
}
#[inline]
fn into_fallible(self) -> Result<Self> {
Ok(self)
}
#[inline]
fn fallible_from_error(error: Error) -> Result<Self> {
Err(error)
}
}
)
}
for_each_function_signature!(impl_wasm_host_results);
pub trait IntoFunc<T, Params, Results>: Send + Sync + 'static {
#[doc(hidden)]
fn into_func(self, engine: &Engine) -> HostFunc;
}
macro_rules! impl_into_func {
($num:tt $arg:ident) => {
#[expect(non_snake_case, reason = "macro-generated code")]
impl<T, F, $arg, R> IntoFunc<T, $arg, R> for F
where
F: Fn($arg) -> R + Send + Sync + 'static,
$arg: WasmTy,
R: WasmRet,
T: 'static,
{
fn into_func(self, engine: &Engine) -> HostFunc {
let f = move |_: Caller<'_, T>, $arg: $arg| {
self($arg)
};
f.into_func(engine)
}
}
#[expect(non_snake_case, reason = "macro-generated code")]
impl<T, F, $arg, R> IntoFunc<T, (Caller<'_, T>, $arg), R> for F
where
F: Fn(Caller<'_, T>, $arg) -> R + Send + Sync + 'static,
$arg: WasmTy,
R: WasmRet,
T: 'static,
{
fn into_func(self, engine: &Engine) -> HostFunc {
HostFunc::wrap(engine, move |caller: Caller<'_, T>, ($arg,)| {
self(caller, $arg)
})
}
}
};
($num:tt $($args:ident)*) => {
#[allow(non_snake_case, reason = "macro-generated code")]
impl<T, F, $($args,)* R> IntoFunc<T, ($($args,)*), R> for F
where
F: Fn($($args),*) -> R + Send + Sync + 'static,
$($args: WasmTy,)*
R: WasmRet,
T: 'static,
{
fn into_func(self, engine: &Engine) -> HostFunc {
let f = move |_: Caller<'_, T>, $($args:$args),*| {
self($($args),*)
};
f.into_func(engine)
}
}
#[allow(non_snake_case, reason = "macro-generated code")]
impl<T, F, $($args,)* R> IntoFunc<T, (Caller<'_, T>, $($args,)*), R> for F
where
F: Fn(Caller<'_, T>, $($args),*) -> R + Send + Sync + 'static,
$($args: WasmTy,)*
R: WasmRet,
T: 'static,
{
fn into_func(self, engine: &Engine) -> HostFunc {
HostFunc::wrap(engine, move |caller: Caller<'_, T>, ( $( $args ),* )| {
self(caller, $( $args ),* )
})
}
}
}
}
for_each_function_signature!(impl_into_func);
pub unsafe trait WasmTyList {
fn valtypes() -> impl Iterator<Item = ValType>;
#[doc(hidden)]
unsafe fn load(store: &mut AutoAssertNoGc<'_>, values: &mut [MaybeUninit<ValRaw>]) -> Self;
#[doc(hidden)]
fn may_gc() -> bool;
}
macro_rules! impl_wasm_ty_list {
($num:tt $($args:ident)*) => (
#[allow(non_snake_case, reason = "macro-generated code")]
unsafe impl<$($args),*> WasmTyList for ($($args,)*)
where
$($args: WasmTy,)*
{
fn valtypes() -> impl Iterator<Item = ValType> {
IntoIterator::into_iter([$($args::valtype(),)*])
}
unsafe fn load(_store: &mut AutoAssertNoGc<'_>, _values: &mut [MaybeUninit<ValRaw>]) -> Self {
let mut _cur = 0;
($({
debug_assert!(_cur < _values.len());
unsafe {
let ptr = _values.get_unchecked(_cur).assume_init_ref();
_cur += 1;
$args::load(_store, ptr)
}
},)*)
}
fn may_gc() -> bool {
$( $args::may_gc() || )* false
}
}
);
}
for_each_function_signature!(impl_wasm_ty_list);
pub struct Caller<'a, T: 'static> {
pub(crate) store: StoreContextMut<'a, T>,
caller: Instance,
}
impl<T> Caller<'_, T> {
fn sub_caller(&mut self) -> Caller<'_, T> {
Caller {
store: self.store.as_context_mut(),
caller: self.caller,
}
}
pub fn get_export(&mut self, name: &str) -> Option<Extern> {
self.caller.get_export(&mut self.store, name)
}
pub fn get_module_export(&mut self, export: &ModuleExport) -> Option<Extern> {
self.caller.get_module_export(&mut self.store, export)
}
pub fn data(&self) -> &T {
self.store.data()
}
pub fn data_mut(&mut self) -> &mut T {
self.store.data_mut()
}
pub fn engine(&self) -> &Engine {
self.store.engine()
}
#[cfg(feature = "gc")]
pub fn gc(&mut self, why: Option<&crate::GcHeapOutOfMemory<()>>) -> Result<()> {
self.store.gc(why)
}
#[cfg(all(feature = "async", feature = "gc"))]
pub async fn gc_async(&mut self, why: Option<&crate::GcHeapOutOfMemory<()>>)
where
T: Send + 'static,
{
self.store.gc_async(why).await;
}
pub fn get_fuel(&self) -> Result<u64> {
self.store.get_fuel()
}
pub fn set_fuel(&mut self, fuel: u64) -> Result<()> {
self.store.set_fuel(fuel)
}
#[cfg(feature = "async")]
pub fn fuel_async_yield_interval(&mut self, interval: Option<u64>) -> Result<()> {
self.store.fuel_async_yield_interval(interval)
}
#[cfg(feature = "debug")]
pub fn debug_frames(&mut self) -> Option<crate::DebugFrameCursor<'_, T>> {
self.store.as_context_mut().debug_frames()
}
}
impl<T: 'static> AsContext for Caller<'_, T> {
type Data = T;
fn as_context(&self) -> StoreContext<'_, T> {
self.store.as_context()
}
}
impl<T: 'static> AsContextMut for Caller<'_, T> {
fn as_context_mut(&mut self) -> StoreContextMut<'_, T> {
self.store.as_context_mut()
}
}
impl<'a, T: 'static> From<Caller<'a, T>> for StoreContextMut<'a, T> {
fn from(caller: Caller<'a, T>) -> Self {
caller.store
}
}
#[doc(hidden)]
pub struct HostFunc {
ctx: StoreBox<VMArrayCallHostFuncContext>,
asyncness: Asyncness,
engine: Engine,
}
struct HostFuncState<F> {
func: F,
_ty: RegisteredType,
}
impl core::fmt::Debug for HostFunc {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("HostFunc").finish_non_exhaustive()
}
}
impl HostFunc {
fn new_raw(
engine: &Engine,
ctx: StoreBox<VMArrayCallHostFuncContext>,
asyncness: Asyncness,
) -> Self {
HostFunc {
ctx,
engine: engine.clone(),
asyncness,
}
}
fn vmctx_sync<F, T>(
engine: &Engine,
ty: FuncType,
func: F,
) -> StoreBox<VMArrayCallHostFuncContext>
where
F: Fn(Caller<'_, T>, &mut [MaybeUninit<ValRaw>]) -> Result<()> + Send + Sync + 'static,
T: 'static,
{
assert!(ty.comes_from_same_engine(engine));
unsafe {
VMArrayCallHostFuncContext::new(
Self::array_call_trampoline::<T, F>,
ty.type_index(),
Box::new(HostFuncState {
func,
_ty: ty.into_registered_type(),
}),
)
}
}
#[cfg(feature = "async")]
fn vmctx_async<F, T, U>(
engine: &Engine,
ty: FuncType,
ctx: U,
func: F,
) -> StoreBox<VMArrayCallHostFuncContext>
where
F: for<'a> Fn(
Caller<'a, T>,
&'a mut [MaybeUninit<ValRaw>],
&'a U,
) -> Box<dyn Future<Output = Result<()>> + Send + 'a>
+ Send
+ Sync
+ 'static,
T: 'static,
U: Send + Sync + 'static,
{
Self::vmctx_sync(engine, ty, move |Caller { store, caller }, args| {
store.with_blocking(|store, cx| {
cx.block_on(core::pin::Pin::from(func(
Caller { store, caller },
args,
&ctx,
)))
})?
})
}
unsafe extern "C" fn array_call_trampoline<T, F>(
callee_vmctx: NonNull<VMOpaqueContext>,
caller_vmctx: NonNull<VMContext>,
args: NonNull<ValRaw>,
args_len: usize,
) -> bool
where
F: Fn(Caller<'_, T>, &mut [MaybeUninit<ValRaw>]) -> Result<()> + 'static,
T: 'static,
{
let run = |store: &mut dyn crate::vm::VMStore, instance: InstanceId| {
let mut store = unsafe { store.unchecked_context_mut() };
store.0.call_hook(CallHook::CallingHost)?;
let state = unsafe {
let vmctx = VMArrayCallHostFuncContext::from_opaque(callee_vmctx);
vmctx.as_ref().host_state()
};
let state = unsafe {
debug_assert!(state.is::<HostFuncState<F>>());
&*(state as *const _ as *const HostFuncState<F>)
};
let (gc_lifo_scope, ret) = {
let gc_lifo_scope = store.0.gc_roots().enter_lifo_scope();
let mut args = NonNull::slice_from_raw_parts(args.cast(), args_len);
let args = unsafe { args.as_mut() };
let ret = (state.func)(
Caller {
caller: Instance::from_wasmtime(instance, store.0),
store: store.as_context_mut(),
},
args,
);
(gc_lifo_scope, ret)
};
store.0.exit_gc_lifo_scope(gc_lifo_scope);
store.0.call_hook(CallHook::ReturningFromHost)?;
ret
};
unsafe { vm::Instance::enter_host_from_wasm(caller_vmctx, run) }
}
pub unsafe fn new_unchecked<T>(
engine: &Engine,
ty: FuncType,
func: impl Fn(Caller<'_, T>, &mut [MaybeUninit<ValRaw>]) -> Result<()> + Send + Sync + 'static,
) -> Self
where
T: 'static,
{
HostFunc::new_raw(engine, Self::vmctx_sync(engine, ty, func), Asyncness::No)
}
pub fn new<T>(
engine: &Engine,
ty: FuncType,
func: impl Fn(Caller<'_, T>, &[Val], &mut [Val]) -> Result<()> + Send + Sync + 'static,
) -> Self
where
T: 'static,
{
HostFunc::new_raw(
engine,
Self::vmctx_sync(engine, ty.clone(), move |mut caller, values| {
let mut vec = unsafe { Self::load_untyped_params(caller.store.0, &ty, values) };
let (params, results) = vec.split_at_mut(ty.params().len());
func(caller.sub_caller(), params, results)?;
Self::store_untyped_results(caller.store, &ty, vec, values)
}),
Asyncness::No,
)
}
#[cfg(feature = "async")]
pub fn new_async<T, F>(engine: &Engine, ty: FuncType, func: F) -> Self
where
F: for<'a> Fn(
Caller<'a, T>,
&'a [Val],
&'a mut [Val],
) -> Box<dyn Future<Output = Result<()>> + Send + 'a>
+ Send
+ Sync
+ 'static,
T: Send + 'static,
{
HostFunc::new_raw(
engine,
Self::vmctx_async(
engine,
ty.clone(),
(ty, func),
move |mut caller, values, (ty, func)| {
Box::new(async move {
let mut vec =
unsafe { Self::load_untyped_params(caller.store.0, &ty, values) };
let (params, results) = vec.split_at_mut(ty.params().len());
core::pin::Pin::from(func(caller.sub_caller(), params, results)).await?;
Self::store_untyped_results(caller.store, &ty, vec, values)
})
},
),
Asyncness::Yes,
)
}
unsafe fn load_untyped_params(
store: &mut StoreOpaque,
ty: &FuncType,
params: &mut [MaybeUninit<ValRaw>],
) -> Vec<Val> {
let mut val_vec = store.take_hostcall_val_storage();
debug_assert!(val_vec.is_empty());
let nparams = ty.params().len();
val_vec.reserve(nparams + ty.results().len());
let mut store = AutoAssertNoGc::new(store);
for (i, ty) in ty.params().enumerate() {
val_vec.push(unsafe { Val::_from_raw(&mut store, params[i].assume_init(), &ty) })
}
val_vec.extend((0..ty.results().len()).map(|_| Val::null_func_ref()));
val_vec
}
fn store_untyped_results<T>(
mut store: StoreContextMut<'_, T>,
ty: &FuncType,
mut args_then_results: Vec<Val>,
storage: &mut [MaybeUninit<ValRaw>],
) -> Result<()> {
let results = &args_then_results[ty.params().len()..];
for (i, (ret, ty)) in results.iter().zip(ty.results()).enumerate() {
ret.ensure_matches_ty(store.0, &ty)
.context("function attempted to return an incompatible value")?;
storage[i].write(ret.to_raw(store.as_context_mut())?);
}
args_then_results.truncate(0);
store.0.save_hostcall_val_storage(args_then_results);
Ok(())
}
pub fn wrap<T, F, P, R>(engine: &Engine, func: F) -> Self
where
F: Fn(Caller<'_, T>, P) -> R + Send + Sync + 'static,
P: WasmTyList,
R: WasmRet,
T: 'static,
{
let ty = R::func_type(engine, None::<ValType>.into_iter().chain(P::valtypes()));
let ctx = Self::vmctx_sync(engine, ty, move |mut caller, args| {
let params = unsafe { Self::load_typed_params(caller.store.0, args) };
let ret = func(caller.sub_caller(), params).into_fallible();
unsafe { Self::store_typed_results(caller.store.0, ret, args) }
});
HostFunc::new_raw(engine, ctx, Asyncness::No)
}
#[cfg(feature = "async")]
pub fn wrap_async<T, F, P, R>(engine: &Engine, func: F) -> Self
where
F: for<'a> Fn(Caller<'a, T>, P) -> Box<dyn Future<Output = R> + Send + 'a>
+ Send
+ Sync
+ 'static,
P: WasmTyList,
R: WasmRet,
T: Send + 'static,
{
let ty = R::func_type(engine, None::<ValType>.into_iter().chain(P::valtypes()));
let ctx = Self::vmctx_async(engine, ty, func, move |mut caller, args, func| {
Box::new(async move {
let params = unsafe { Self::load_typed_params(caller.store.0, args) };
let ret = core::pin::Pin::from(func(caller.sub_caller(), params)).await;
unsafe { Self::store_typed_results(caller.store.0, ret.into_fallible(), args) }
})
});
HostFunc::new_raw(engine, ctx, Asyncness::Yes)
}
unsafe fn load_typed_params<P>(store: &mut StoreOpaque, params: &mut [MaybeUninit<ValRaw>]) -> P
where
P: WasmTyList,
{
let mut store = if P::may_gc() {
AutoAssertNoGc::new(store)
} else {
unsafe { AutoAssertNoGc::disabled(store) }
};
unsafe { P::load(&mut store, params) }
}
unsafe fn store_typed_results<R>(
store: &mut StoreOpaque,
ret: R,
storage: &mut [MaybeUninit<ValRaw>],
) -> Result<()>
where
R: WasmRet,
{
ensure!(
ret.compatible_with_store(store),
"host function attempted to return cross-`Store` value to Wasm",
);
let mut store = if R::may_gc() {
AutoAssertNoGc::new(store)
} else {
unsafe { AutoAssertNoGc::disabled(store) }
};
unsafe {
ret.store(&mut store, storage)?;
}
Ok(())
}
pub unsafe fn to_func(self: &Arc<Self>, store: &mut StoreOpaque) -> Func {
self.validate_store(store);
let (funcrefs, modules) = store.func_refs_and_modules();
let funcref = funcrefs.push_arc_host(self.clone(), modules);
unsafe { Func::from_vm_func_ref(store.id(), funcref) }
}
pub unsafe fn to_func_store_rooted(
self: &Arc<Self>,
store: &mut StoreOpaque,
rooted_func_ref: Option<NonNull<VMFuncRef>>,
) -> Func {
self.validate_store(store);
match rooted_func_ref {
Some(funcref) => {
unsafe {
debug_assert!(funcref.as_ref().wasm_call.is_some());
}
unsafe { Func::from_vm_func_ref(store.id(), funcref) }
}
None => {
debug_assert!(self.func_ref().wasm_call.is_some());
unsafe { Func::from_vm_func_ref(store.id(), self.func_ref().into()) }
}
}
}
unsafe fn into_func(self, store: &mut StoreOpaque) -> Func {
self.validate_store(store);
store.set_async_required(self.asyncness);
let (funcrefs, modules) = store.func_refs_and_modules();
let funcref = funcrefs.push_box_host(Box::new(self), modules);
unsafe { Func::from_vm_func_ref(store.id(), funcref) }
}
fn validate_store(&self, store: &mut StoreOpaque) {
assert!(
Engine::same(&self.engine, store.engine()),
"cannot use a store with a different engine than a linker was created with",
);
}
pub(crate) fn sig_index(&self) -> VMSharedTypeIndex {
self.func_ref().type_index
}
pub(crate) fn func_ref(&self) -> &VMFuncRef {
unsafe { self.ctx.get().as_ref().func_ref() }
}
pub(crate) fn asyncness(&self) -> Asyncness {
self.asyncness
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{Module, Store};
#[test]
#[cfg_attr(miri, ignore)]
fn hash_key_is_stable_across_duplicate_store_data_entries() -> Result<()> {
let mut store = Store::<()>::default();
let module = Module::new(
store.engine(),
r#"
(module
(func (export "f")
nop
)
)
"#,
)?;
let instance = Instance::new(&mut store, &module, &[])?;
let f1 = instance.get_func(&mut store, "f").unwrap();
let f2 = instance.get_func(&mut store, "f").unwrap();
assert!(
f1.hash_key(&mut store.as_context_mut().0)
== f2.hash_key(&mut store.as_context_mut().0)
);
let instance2 = Instance::new(&mut store, &module, &[])?;
let f3 = instance2.get_func(&mut store, "f").unwrap();
assert!(
f1.hash_key(&mut store.as_context_mut().0)
!= f3.hash_key(&mut store.as_context_mut().0)
);
Ok(())
}
}