mod vm_host_func_context;
pub use self::vm_host_func_context::VMArrayCallHostFuncContext;
use crate::prelude::*;
use crate::runtime::vm::{InterpreterRef, VMGcRef, VmPtr, VmSafe, f32x4, f64x2, i8x16};
use crate::store::StoreOpaque;
use crate::vm::stack_switching::VMStackChain;
use core::cell::UnsafeCell;
use core::ffi::c_void;
use core::fmt;
use core::marker;
use core::mem::{self, MaybeUninit};
use core::ops::Range;
use core::ptr::{self, NonNull};
use core::sync::atomic::{AtomicUsize, Ordering};
use wasmtime_environ::{
BuiltinFunctionIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex,
DefinedTagIndex, VMCONTEXT_MAGIC, VMSharedTypeIndex, WasmHeapTopType, WasmValType,
};
pub type VMArrayCallNative = unsafe extern "C" fn(
NonNull<VMOpaqueContext>,
NonNull<VMContext>,
NonNull<ValRaw>,
usize,
) -> bool;
#[repr(transparent)]
pub struct VMArrayCallFunction(VMFunctionBody);
#[repr(transparent)]
pub struct VMWasmCallFunction(VMFunctionBody);
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMFunctionImport {
pub wasm_call: VmPtr<VMWasmCallFunction>,
pub array_call: VmPtr<VMArrayCallFunction>,
pub vmctx: VmPtr<VMOpaqueContext>,
}
unsafe impl VmSafe for VMFunctionImport {}
#[cfg(test)]
mod test_vmfunction_import {
use super::VMFunctionImport;
use core::mem::offset_of;
use std::mem::size_of;
use wasmtime_environ::{HostPtr, Module, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmfunction_import_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMFunctionImport>(),
usize::from(offsets.size_of_vmfunction_import())
);
assert_eq!(
offset_of!(VMFunctionImport, wasm_call),
usize::from(offsets.vmfunction_import_wasm_call())
);
assert_eq!(
offset_of!(VMFunctionImport, array_call),
usize::from(offsets.vmfunction_import_array_call())
);
assert_eq!(
offset_of!(VMFunctionImport, vmctx),
usize::from(offsets.vmfunction_import_vmctx())
);
}
}
#[repr(C)]
pub struct VMFunctionBody(u8);
unsafe impl VmSafe for VMFunctionBody {}
#[cfg(test)]
mod test_vmfunction_body {
use super::VMFunctionBody;
use std::mem::size_of;
#[test]
fn check_vmfunction_body_offsets() {
assert_eq!(size_of::<VMFunctionBody>(), 1);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMTableImport {
pub from: VmPtr<VMTableDefinition>,
pub vmctx: VmPtr<VMContext>,
pub index: DefinedTableIndex,
}
unsafe impl VmSafe for VMTableImport {}
#[cfg(test)]
mod test_vmtable {
use super::VMTableImport;
use core::mem::offset_of;
use std::mem::size_of;
use wasmtime_environ::component::{Component, VMComponentOffsets};
use wasmtime_environ::{HostPtr, Module, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmtable_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMTableImport>(),
usize::from(offsets.size_of_vmtable_import())
);
assert_eq!(
offset_of!(VMTableImport, from),
usize::from(offsets.vmtable_import_from())
);
assert_eq!(
offset_of!(VMTableImport, vmctx),
usize::from(offsets.vmtable_import_vmctx())
);
assert_eq!(
offset_of!(VMTableImport, index),
usize::from(offsets.vmtable_import_index())
);
}
#[test]
fn ensure_sizes_match() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let vm_offsets = VMOffsets::new(HostPtr, &module);
let component = Component::default();
let vm_component_offsets = VMComponentOffsets::new(HostPtr, &component);
assert_eq!(
vm_offsets.size_of_vmtable_import(),
vm_component_offsets.size_of_vmtable_import()
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMMemoryImport {
pub from: VmPtr<VMMemoryDefinition>,
pub vmctx: VmPtr<VMContext>,
pub index: DefinedMemoryIndex,
}
unsafe impl VmSafe for VMMemoryImport {}
#[cfg(test)]
mod test_vmmemory_import {
use super::VMMemoryImport;
use core::mem::offset_of;
use std::mem::size_of;
use wasmtime_environ::{HostPtr, Module, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmmemory_import_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMMemoryImport>(),
usize::from(offsets.size_of_vmmemory_import())
);
assert_eq!(
offset_of!(VMMemoryImport, from),
usize::from(offsets.vmmemory_import_from())
);
assert_eq!(
offset_of!(VMMemoryImport, vmctx),
usize::from(offsets.vmmemory_import_vmctx())
);
assert_eq!(
offset_of!(VMMemoryImport, index),
usize::from(offsets.vmmemory_import_index())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMGlobalImport {
pub from: VmPtr<VMGlobalDefinition>,
pub vmctx: Option<VmPtr<VMOpaqueContext>>,
pub kind: VMGlobalKind,
}
unsafe impl VmSafe for VMGlobalImport {}
#[derive(Debug, Copy, Clone)]
#[repr(C, u32)]
pub enum VMGlobalKind {
Host(DefinedGlobalIndex),
Instance(DefinedGlobalIndex),
#[cfg(feature = "component-model")]
ComponentFlags(wasmtime_environ::component::RuntimeComponentInstanceIndex),
#[cfg(feature = "component-model")]
TaskMayBlock,
}
unsafe impl VmSafe for VMGlobalKind {}
#[cfg(test)]
mod test_vmglobal_import {
use super::VMGlobalImport;
use core::mem::offset_of;
use std::mem::size_of;
use wasmtime_environ::{HostPtr, Module, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmglobal_import_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMGlobalImport>(),
usize::from(offsets.size_of_vmglobal_import())
);
assert_eq!(
offset_of!(VMGlobalImport, from),
usize::from(offsets.vmglobal_import_from())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMTagImport {
pub from: VmPtr<VMTagDefinition>,
pub vmctx: VmPtr<VMContext>,
pub index: DefinedTagIndex,
}
unsafe impl VmSafe for VMTagImport {}
#[cfg(test)]
mod test_vmtag_import {
use super::VMTagImport;
use core::mem::{offset_of, size_of};
use wasmtime_environ::{HostPtr, Module, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmtag_import_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMTagImport>(),
usize::from(offsets.size_of_vmtag_import())
);
assert_eq!(
offset_of!(VMTagImport, from),
usize::from(offsets.vmtag_import_from())
);
assert_eq!(
offset_of!(VMTagImport, vmctx),
usize::from(offsets.vmtag_import_vmctx())
);
assert_eq!(
offset_of!(VMTagImport, index),
usize::from(offsets.vmtag_import_index())
);
}
}
#[derive(Debug)]
#[repr(C)]
pub struct VMMemoryDefinition {
pub base: VmPtr<u8>,
pub current_length: AtomicUsize,
}
unsafe impl VmSafe for VMMemoryDefinition {}
impl VMMemoryDefinition {
#[inline]
pub fn current_length(&self) -> usize {
self.current_length.load(Ordering::Relaxed)
}
#[inline]
pub unsafe fn load(ptr: *mut Self) -> Self {
let other = unsafe { &*ptr };
VMMemoryDefinition {
base: other.base,
current_length: other.current_length().into(),
}
}
}
#[cfg(test)]
mod test_vmmemory_definition {
use super::VMMemoryDefinition;
use core::mem::offset_of;
use std::mem::size_of;
use wasmtime_environ::{HostPtr, Module, PtrSize, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmmemory_definition_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMMemoryDefinition>(),
usize::from(offsets.ptr.size_of_vmmemory_definition())
);
assert_eq!(
offset_of!(VMMemoryDefinition, base),
usize::from(offsets.ptr.vmmemory_definition_base())
);
assert_eq!(
offset_of!(VMMemoryDefinition, current_length),
usize::from(offsets.ptr.vmmemory_definition_current_length())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMTableDefinition {
pub base: VmPtr<u8>,
pub current_elements: usize,
}
unsafe impl VmSafe for VMTableDefinition {}
#[cfg(test)]
mod test_vmtable_definition {
use super::VMTableDefinition;
use core::mem::offset_of;
use std::mem::size_of;
use wasmtime_environ::{HostPtr, Module, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmtable_definition_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMTableDefinition>(),
usize::from(offsets.size_of_vmtable_definition())
);
assert_eq!(
offset_of!(VMTableDefinition, base),
usize::from(offsets.vmtable_definition_base())
);
assert_eq!(
offset_of!(VMTableDefinition, current_elements),
usize::from(offsets.vmtable_definition_current_elements())
);
}
}
#[derive(Debug)]
#[repr(C, align(16))]
pub struct VMGlobalDefinition {
storage: [u8; 16],
}
unsafe impl VmSafe for VMGlobalDefinition {}
#[cfg(test)]
mod test_vmglobal_definition {
use super::VMGlobalDefinition;
use std::mem::{align_of, size_of};
use wasmtime_environ::{HostPtr, Module, PtrSize, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmglobal_definition_alignment() {
assert!(align_of::<VMGlobalDefinition>() >= align_of::<i32>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<i64>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<f32>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<f64>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<[u8; 16]>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<[f32; 4]>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<[f64; 2]>());
}
#[test]
fn check_vmglobal_definition_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMGlobalDefinition>(),
usize::from(offsets.ptr.size_of_vmglobal_definition())
);
}
#[test]
fn check_vmglobal_begins_aligned() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
}
#[test]
#[cfg(feature = "gc")]
fn check_vmglobal_can_contain_gc_ref() {
assert!(size_of::<crate::runtime::vm::VMGcRef>() <= size_of::<VMGlobalDefinition>());
}
}
impl VMGlobalDefinition {
pub fn new() -> Self {
Self { storage: [0; 16] }
}
pub unsafe fn from_val_raw(
store: &mut StoreOpaque,
wasm_ty: WasmValType,
raw: ValRaw,
) -> Result<Self> {
let mut global = Self::new();
unsafe {
match wasm_ty {
WasmValType::I32 => *global.as_i32_mut() = raw.get_i32(),
WasmValType::I64 => *global.as_i64_mut() = raw.get_i64(),
WasmValType::F32 => *global.as_f32_bits_mut() = raw.get_f32(),
WasmValType::F64 => *global.as_f64_bits_mut() = raw.get_f64(),
WasmValType::V128 => global.set_u128(raw.get_v128()),
WasmValType::Ref(r) => match r.heap_type.top() {
WasmHeapTopType::Extern => {
let r = VMGcRef::from_raw_u32(raw.get_externref());
global.init_gc_ref(store, r.as_ref())
}
WasmHeapTopType::Any => {
let r = VMGcRef::from_raw_u32(raw.get_anyref());
global.init_gc_ref(store, r.as_ref())
}
WasmHeapTopType::Func => *global.as_func_ref_mut() = raw.get_funcref().cast(),
WasmHeapTopType::Cont => *global.as_func_ref_mut() = raw.get_funcref().cast(), WasmHeapTopType::Exn => {
let r = VMGcRef::from_raw_u32(raw.get_exnref());
global.init_gc_ref(store, r.as_ref())
}
},
}
}
Ok(global)
}
pub unsafe fn to_val_raw(
&self,
store: &mut StoreOpaque,
wasm_ty: WasmValType,
) -> Result<ValRaw> {
unsafe {
Ok(match wasm_ty {
WasmValType::I32 => ValRaw::i32(*self.as_i32()),
WasmValType::I64 => ValRaw::i64(*self.as_i64()),
WasmValType::F32 => ValRaw::f32(*self.as_f32_bits()),
WasmValType::F64 => ValRaw::f64(*self.as_f64_bits()),
WasmValType::V128 => ValRaw::v128(self.get_u128()),
WasmValType::Ref(r) => match r.heap_type.top() {
WasmHeapTopType::Extern => ValRaw::externref(match self.as_gc_ref() {
Some(r) => store.clone_gc_ref(r).as_raw_u32(),
None => 0,
}),
WasmHeapTopType::Any => ValRaw::anyref({
match self.as_gc_ref() {
Some(r) => store.clone_gc_ref(r).as_raw_u32(),
None => 0,
}
}),
WasmHeapTopType::Exn => ValRaw::exnref({
match self.as_gc_ref() {
Some(r) => store.clone_gc_ref(r).as_raw_u32(),
None => 0,
}
}),
WasmHeapTopType::Func => ValRaw::funcref(self.as_func_ref().cast()),
WasmHeapTopType::Cont => todo!(), },
})
}
}
pub unsafe fn as_i32(&self) -> &i32 {
unsafe { &*(self.storage.as_ref().as_ptr().cast::<i32>()) }
}
pub unsafe fn as_i32_mut(&mut self) -> &mut i32 {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<i32>()) }
}
pub unsafe fn as_u32(&self) -> &u32 {
unsafe { &*(self.storage.as_ref().as_ptr().cast::<u32>()) }
}
pub unsafe fn as_u32_mut(&mut self) -> &mut u32 {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u32>()) }
}
pub unsafe fn as_i64(&self) -> &i64 {
unsafe { &*(self.storage.as_ref().as_ptr().cast::<i64>()) }
}
pub unsafe fn as_i64_mut(&mut self) -> &mut i64 {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<i64>()) }
}
pub unsafe fn as_u64(&self) -> &u64 {
unsafe { &*(self.storage.as_ref().as_ptr().cast::<u64>()) }
}
pub unsafe fn as_u64_mut(&mut self) -> &mut u64 {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u64>()) }
}
pub unsafe fn as_f32(&self) -> &f32 {
unsafe { &*(self.storage.as_ref().as_ptr().cast::<f32>()) }
}
pub unsafe fn as_f32_mut(&mut self) -> &mut f32 {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<f32>()) }
}
pub unsafe fn as_f32_bits(&self) -> &u32 {
unsafe { &*(self.storage.as_ref().as_ptr().cast::<u32>()) }
}
pub unsafe fn as_f32_bits_mut(&mut self) -> &mut u32 {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u32>()) }
}
pub unsafe fn as_f64(&self) -> &f64 {
unsafe { &*(self.storage.as_ref().as_ptr().cast::<f64>()) }
}
pub unsafe fn as_f64_mut(&mut self) -> &mut f64 {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<f64>()) }
}
pub unsafe fn as_f64_bits(&self) -> &u64 {
unsafe { &*(self.storage.as_ref().as_ptr().cast::<u64>()) }
}
pub unsafe fn as_f64_bits_mut(&mut self) -> &mut u64 {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u64>()) }
}
pub unsafe fn get_u128(&self) -> u128 {
unsafe { u128::from_le(*(self.storage.as_ref().as_ptr().cast::<u128>())) }
}
pub unsafe fn set_u128(&mut self, val: u128) {
unsafe {
*self.storage.as_mut().as_mut_ptr().cast::<u128>() = val.to_le();
}
}
pub unsafe fn as_u128_bits(&self) -> &[u8; 16] {
unsafe { &*(self.storage.as_ref().as_ptr().cast::<[u8; 16]>()) }
}
pub unsafe fn as_u128_bits_mut(&mut self) -> &mut [u8; 16] {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<[u8; 16]>()) }
}
pub unsafe fn as_gc_ref(&self) -> Option<&VMGcRef> {
let raw_ptr = self.storage.as_ref().as_ptr().cast::<Option<VMGcRef>>();
let ret = unsafe { (*raw_ptr).as_ref() };
assert!(cfg!(feature = "gc") || ret.is_none());
ret
}
pub unsafe fn init_gc_ref(&mut self, store: &mut StoreOpaque, gc_ref: Option<&VMGcRef>) {
let dest = unsafe {
&mut *(self
.storage
.as_mut()
.as_mut_ptr()
.cast::<MaybeUninit<Option<VMGcRef>>>())
};
store.init_gc_ref(dest, gc_ref)
}
pub unsafe fn write_gc_ref(&mut self, store: &mut StoreOpaque, gc_ref: Option<&VMGcRef>) {
let dest = unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<Option<VMGcRef>>()) };
store.write_gc_ref(dest, gc_ref)
}
pub unsafe fn as_func_ref(&self) -> *mut VMFuncRef {
unsafe { *(self.storage.as_ref().as_ptr().cast::<*mut VMFuncRef>()) }
}
pub unsafe fn as_func_ref_mut(&mut self) -> &mut *mut VMFuncRef {
unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<*mut VMFuncRef>()) }
}
}
#[cfg(test)]
mod test_vmshared_type_index {
use super::VMSharedTypeIndex;
use std::mem::size_of;
use wasmtime_environ::{HostPtr, Module, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmshared_type_index() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMSharedTypeIndex>(),
usize::from(offsets.size_of_vmshared_type_index())
);
}
}
#[derive(Debug)]
#[repr(C)]
pub struct VMTagDefinition {
pub type_index: VMSharedTypeIndex,
}
impl VMTagDefinition {
pub fn new(type_index: VMSharedTypeIndex) -> Self {
Self { type_index }
}
}
unsafe impl VmSafe for VMTagDefinition {}
#[cfg(test)]
mod test_vmtag_definition {
use super::VMTagDefinition;
use std::mem::size_of;
use wasmtime_environ::{HostPtr, Module, PtrSize, StaticModuleIndex, VMOffsets};
#[test]
fn check_vmtag_definition_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMTagDefinition>(),
usize::from(offsets.ptr.size_of_vmtag_definition())
);
}
#[test]
fn check_vmtag_begins_aligned() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(offsets.vmctx_tags_begin() % 16, 0);
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct VMFuncRef {
pub array_call: VmPtr<VMArrayCallFunction>,
pub wasm_call: Option<VmPtr<VMWasmCallFunction>>,
pub type_index: VMSharedTypeIndex,
pub vmctx: VmPtr<VMOpaqueContext>,
}
unsafe impl VmSafe for VMFuncRef {}
impl VMFuncRef {
#[inline]
pub unsafe fn array_call(
me: NonNull<VMFuncRef>,
pulley: Option<InterpreterRef<'_>>,
caller: NonNull<VMContext>,
args_and_results: NonNull<[ValRaw]>,
) -> bool {
match pulley {
Some(vm) => unsafe { Self::array_call_interpreted(me, vm, caller, args_and_results) },
None => unsafe { Self::array_call_native(me, caller, args_and_results) },
}
}
unsafe fn array_call_interpreted(
me: NonNull<VMFuncRef>,
vm: InterpreterRef<'_>,
caller: NonNull<VMContext>,
args_and_results: NonNull<[ValRaw]>,
) -> bool {
unsafe {
if me.as_ref().vmctx.as_non_null().as_ref().magic
== wasmtime_environ::VM_ARRAY_CALL_HOST_FUNC_MAGIC
{
return Self::array_call_native(me, caller, args_and_results);
}
vm.call(
me.as_ref().array_call.as_non_null().cast(),
me.as_ref().vmctx.as_non_null(),
caller,
args_and_results,
)
}
}
#[inline]
unsafe fn array_call_native(
me: NonNull<VMFuncRef>,
caller: NonNull<VMContext>,
args_and_results: NonNull<[ValRaw]>,
) -> bool {
unsafe {
union GetNativePointer {
native: VMArrayCallNative,
ptr: NonNull<VMArrayCallFunction>,
}
let native = GetNativePointer {
ptr: me.as_ref().array_call.as_non_null(),
}
.native;
native(
me.as_ref().vmctx.as_non_null(),
caller,
args_and_results.cast(),
args_and_results.len(),
)
}
}
}
#[cfg(test)]
mod test_vm_func_ref {
use super::VMFuncRef;
use core::mem::offset_of;
use std::mem::size_of;
use wasmtime_environ::{HostPtr, Module, PtrSize, StaticModuleIndex, VMOffsets};
#[test]
fn check_vm_func_ref_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
size_of::<VMFuncRef>(),
usize::from(offsets.ptr.size_of_vm_func_ref())
);
assert_eq!(
offset_of!(VMFuncRef, array_call),
usize::from(offsets.ptr.vm_func_ref_array_call())
);
assert_eq!(
offset_of!(VMFuncRef, wasm_call),
usize::from(offsets.ptr.vm_func_ref_wasm_call())
);
assert_eq!(
offset_of!(VMFuncRef, type_index),
usize::from(offsets.ptr.vm_func_ref_type_index())
);
assert_eq!(
offset_of!(VMFuncRef, vmctx),
usize::from(offsets.ptr.vm_func_ref_vmctx())
);
}
}
macro_rules! define_builtin_array {
(
$(
$( #[$attr:meta] )*
$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
)*
) => {
#[repr(C)]
#[allow(improper_ctypes_definitions, reason = "__m128i known not FFI-safe")]
pub struct VMBuiltinFunctionsArray {
$(
$name: unsafe extern "C" fn(
$(define_builtin_array!(@ty $param)),*
) $( -> define_builtin_array!(@ty $result))?,
)*
}
impl VMBuiltinFunctionsArray {
pub const INIT: VMBuiltinFunctionsArray = VMBuiltinFunctionsArray {
$(
$name: crate::runtime::vm::libcalls::raw::$name,
)*
};
pub fn expose_provenance(&self) -> NonNull<Self>{
$(
(self.$name as *mut u8).expose_provenance();
)*
NonNull::from(self)
}
}
};
(@ty u32) => (u32);
(@ty u64) => (u64);
(@ty f32) => (f32);
(@ty f64) => (f64);
(@ty u8) => (u8);
(@ty i8x16) => (i8x16);
(@ty f32x4) => (f32x4);
(@ty f64x2) => (f64x2);
(@ty bool) => (bool);
(@ty pointer) => (*mut u8);
(@ty size) => (usize);
(@ty vmctx) => (NonNull<VMContext>);
}
unsafe impl VmSafe for VMBuiltinFunctionsArray {}
wasmtime_environ::foreach_builtin_function!(define_builtin_array);
const _: () = {
assert!(
mem::size_of::<VMBuiltinFunctionsArray>()
== mem::size_of::<usize>() * (BuiltinFunctionIndex::len() as usize)
)
};
#[derive(Debug)]
#[repr(C)]
pub struct VMStoreContext {
pub fuel_consumed: UnsafeCell<i64>,
pub epoch_deadline: UnsafeCell<u64>,
pub execution_version: u64,
pub stack_limit: UnsafeCell<usize>,
pub gc_heap: VMMemoryDefinition,
pub last_wasm_exit_trampoline_fp: UnsafeCell<usize>,
pub last_wasm_exit_pc: UnsafeCell<usize>,
pub last_wasm_entry_sp: UnsafeCell<usize>,
pub last_wasm_entry_fp: UnsafeCell<usize>,
pub last_wasm_entry_trap_handler: UnsafeCell<usize>,
pub stack_chain: UnsafeCell<VMStackChain>,
pub store_data: VmPtr<()>,
pub async_guard_range: Range<*mut u8>,
}
impl VMStoreContext {
pub(crate) unsafe fn last_wasm_exit_fp(&self) -> usize {
unsafe {
let trampoline_fp = *self.last_wasm_exit_trampoline_fp.get();
Self::wasm_exit_fp_from_trampoline_fp(trampoline_fp)
}
}
pub(crate) unsafe fn wasm_exit_fp_from_trampoline_fp(trampoline_fp: usize) -> usize {
if trampoline_fp != 0 {
unsafe { *(trampoline_fp as *const usize) }
} else {
0
}
}
}
unsafe impl Send for VMStoreContext {}
unsafe impl Sync for VMStoreContext {}
unsafe impl VmSafe for VMStoreContext {}
impl Default for VMStoreContext {
fn default() -> VMStoreContext {
VMStoreContext {
fuel_consumed: UnsafeCell::new(0),
epoch_deadline: UnsafeCell::new(0),
execution_version: 0,
stack_limit: UnsafeCell::new(usize::max_value()),
gc_heap: VMMemoryDefinition {
base: NonNull::dangling().into(),
current_length: AtomicUsize::new(0),
},
last_wasm_exit_trampoline_fp: UnsafeCell::new(0),
last_wasm_exit_pc: UnsafeCell::new(0),
last_wasm_entry_fp: UnsafeCell::new(0),
last_wasm_entry_sp: UnsafeCell::new(0),
last_wasm_entry_trap_handler: UnsafeCell::new(0),
stack_chain: UnsafeCell::new(VMStackChain::Absent),
async_guard_range: ptr::null_mut()..ptr::null_mut(),
store_data: VmPtr::dangling(),
}
}
}
#[cfg(test)]
mod test_vmstore_context {
use super::{VMMemoryDefinition, VMStoreContext};
use core::mem::offset_of;
use wasmtime_environ::{HostPtr, Module, PtrSize, StaticModuleIndex, VMOffsets};
#[test]
fn field_offsets() {
let module = Module::new(StaticModuleIndex::from_u32(0));
let offsets = VMOffsets::new(HostPtr, &module);
assert_eq!(
offset_of!(VMStoreContext, stack_limit),
usize::from(offsets.ptr.vmstore_context_stack_limit())
);
assert_eq!(
offset_of!(VMStoreContext, fuel_consumed),
usize::from(offsets.ptr.vmstore_context_fuel_consumed())
);
assert_eq!(
offset_of!(VMStoreContext, epoch_deadline),
usize::from(offsets.ptr.vmstore_context_epoch_deadline())
);
assert_eq!(
offset_of!(VMStoreContext, execution_version),
usize::from(offsets.ptr.vmstore_context_execution_version())
);
assert_eq!(
offset_of!(VMStoreContext, gc_heap),
usize::from(offsets.ptr.vmstore_context_gc_heap())
);
assert_eq!(
offset_of!(VMStoreContext, gc_heap) + offset_of!(VMMemoryDefinition, base),
usize::from(offsets.ptr.vmstore_context_gc_heap_base())
);
assert_eq!(
offset_of!(VMStoreContext, gc_heap) + offset_of!(VMMemoryDefinition, current_length),
usize::from(offsets.ptr.vmstore_context_gc_heap_current_length())
);
assert_eq!(
offset_of!(VMStoreContext, last_wasm_exit_trampoline_fp),
usize::from(offsets.ptr.vmstore_context_last_wasm_exit_trampoline_fp())
);
assert_eq!(
offset_of!(VMStoreContext, last_wasm_exit_pc),
usize::from(offsets.ptr.vmstore_context_last_wasm_exit_pc())
);
assert_eq!(
offset_of!(VMStoreContext, last_wasm_entry_fp),
usize::from(offsets.ptr.vmstore_context_last_wasm_entry_fp())
);
assert_eq!(
offset_of!(VMStoreContext, last_wasm_entry_sp),
usize::from(offsets.ptr.vmstore_context_last_wasm_entry_sp())
);
assert_eq!(
offset_of!(VMStoreContext, last_wasm_entry_trap_handler),
usize::from(offsets.ptr.vmstore_context_last_wasm_entry_trap_handler())
);
assert_eq!(
offset_of!(VMStoreContext, stack_chain),
usize::from(offsets.ptr.vmstore_context_stack_chain())
);
assert_eq!(
offset_of!(VMStoreContext, store_data),
usize::from(offsets.ptr.vmstore_context_store_data())
);
}
}
#[derive(Debug)]
#[repr(C, align(16))] pub struct VMContext {
_magic: u32,
}
impl VMContext {
#[inline]
pub unsafe fn from_opaque(opaque: NonNull<VMOpaqueContext>) -> NonNull<VMContext> {
unsafe {
debug_assert_eq!(opaque.as_ref().magic, VMCONTEXT_MAGIC);
}
opaque.cast()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union ValRaw {
i32: i32,
i64: i64,
f32: u32,
f64: u64,
v128: [u8; 16],
funcref: *mut c_void,
externref: u32,
anyref: u32,
exnref: u32,
}
const _: () = {
assert!(mem::size_of::<ValRaw>() == 16);
assert!(mem::align_of::<ValRaw>() == mem::align_of::<u64>());
};
unsafe impl Send for ValRaw {}
unsafe impl Sync for ValRaw {}
impl fmt::Debug for ValRaw {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct Hex<T>(T);
impl<T: fmt::LowerHex> fmt::Debug for Hex<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let bytes = mem::size_of::<T>();
let hex_digits_per_byte = 2;
let hex_digits = bytes * hex_digits_per_byte;
write!(f, "0x{:0width$x}", self.0, width = hex_digits)
}
}
unsafe {
f.debug_struct("ValRaw")
.field("i32", &Hex(self.i32))
.field("i64", &Hex(self.i64))
.field("f32", &Hex(self.f32))
.field("f64", &Hex(self.f64))
.field("v128", &Hex(u128::from_le_bytes(self.v128)))
.field("funcref", &self.funcref)
.field("externref", &Hex(self.externref))
.field("anyref", &Hex(self.anyref))
.field("exnref", &Hex(self.exnref))
.finish()
}
}
}
impl ValRaw {
pub fn null() -> ValRaw {
unsafe {
let raw = mem::MaybeUninit::<Self>::zeroed().assume_init();
debug_assert_eq!(raw.get_anyref(), 0);
debug_assert_eq!(raw.get_exnref(), 0);
debug_assert_eq!(raw.get_externref(), 0);
debug_assert_eq!(raw.get_funcref(), ptr::null_mut());
raw
}
}
#[inline]
pub fn i32(i: i32) -> ValRaw {
ValRaw::u64(i.cast_unsigned().into())
}
#[inline]
pub fn i64(i: i64) -> ValRaw {
ValRaw { i64: i.to_le() }
}
#[inline]
pub fn u32(i: u32) -> ValRaw {
ValRaw::u64(i.into())
}
#[inline]
pub fn u64(i: u64) -> ValRaw {
ValRaw::i64(i as i64)
}
#[inline]
pub fn f32(i: u32) -> ValRaw {
ValRaw::u64(i.into())
}
#[inline]
pub fn f64(i: u64) -> ValRaw {
ValRaw { f64: i.to_le() }
}
#[inline]
pub fn v128(i: u128) -> ValRaw {
ValRaw {
v128: i.to_le_bytes(),
}
}
#[inline]
pub fn funcref(i: *mut c_void) -> ValRaw {
ValRaw {
funcref: i.map_addr(|i| i.to_le()),
}
}
#[inline]
pub fn externref(e: u32) -> ValRaw {
assert!(cfg!(feature = "gc") || e == 0);
ValRaw {
externref: e.to_le(),
}
}
#[inline]
pub fn anyref(r: u32) -> ValRaw {
assert!(cfg!(feature = "gc") || r == 0);
ValRaw { anyref: r.to_le() }
}
#[inline]
pub fn exnref(r: u32) -> ValRaw {
assert!(cfg!(feature = "gc") || r == 0);
ValRaw { exnref: r.to_le() }
}
#[inline]
pub fn get_i32(&self) -> i32 {
unsafe { i32::from_le(self.i32) }
}
#[inline]
pub fn get_i64(&self) -> i64 {
unsafe { i64::from_le(self.i64) }
}
#[inline]
pub fn get_u32(&self) -> u32 {
self.get_i32().cast_unsigned()
}
#[inline]
pub fn get_u64(&self) -> u64 {
self.get_i64().cast_unsigned()
}
#[inline]
pub fn get_f32(&self) -> u32 {
unsafe { u32::from_le(self.f32) }
}
#[inline]
pub fn get_f64(&self) -> u64 {
unsafe { u64::from_le(self.f64) }
}
#[inline]
pub fn get_v128(&self) -> u128 {
unsafe { u128::from_le_bytes(self.v128) }
}
#[inline]
pub fn get_funcref(&self) -> *mut c_void {
let addr = unsafe { usize::from_le(self.funcref.addr()) };
core::ptr::with_exposed_provenance_mut(addr)
}
#[inline]
pub fn get_externref(&self) -> u32 {
let externref = u32::from_le(unsafe { self.externref });
assert!(cfg!(feature = "gc") || externref == 0);
externref
}
#[inline]
pub fn get_anyref(&self) -> u32 {
let anyref = u32::from_le(unsafe { self.anyref });
assert!(cfg!(feature = "gc") || anyref == 0);
anyref
}
#[inline]
pub fn get_exnref(&self) -> u32 {
let exnref = u32::from_le(unsafe { self.exnref });
assert!(cfg!(feature = "gc") || exnref == 0);
exnref
}
}
pub struct VMOpaqueContext {
pub(crate) magic: u32,
_marker: marker::PhantomPinned,
}
impl VMOpaqueContext {
#[inline]
pub fn from_vmcontext(ptr: NonNull<VMContext>) -> NonNull<VMOpaqueContext> {
ptr.cast()
}
#[inline]
pub fn from_vm_array_call_host_func_context(
ptr: NonNull<VMArrayCallHostFuncContext>,
) -> NonNull<VMOpaqueContext> {
ptr.cast()
}
}