mod vm_host_func_context;
use crate::{GcStore, VMGcRef};
use sptr::Strict;
use std::cell::UnsafeCell;
use std::ffi::c_void;
use std::marker;
use std::mem;
use std::ptr::{self, NonNull};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::u32;
pub use vm_host_func_context::{VMArrayCallHostFuncContext, VMNativeCallHostFuncContext};
use wasmtime_environ::{BuiltinFunctionIndex, DefinedMemoryIndex, Unsigned, VMCONTEXT_MAGIC};
pub type VMArrayCallFunction =
unsafe extern "C" fn(*mut VMOpaqueContext, *mut VMOpaqueContext, *mut ValRaw, usize);
#[repr(transparent)]
pub struct VMNativeCallFunction(VMFunctionBody);
#[repr(transparent)]
pub struct VMWasmCallFunction(VMFunctionBody);
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMFunctionImport {
pub wasm_call: NonNull<VMWasmCallFunction>,
pub native_call: NonNull<VMNativeCallFunction>,
pub array_call: VMArrayCallFunction,
pub vmctx: *mut VMOpaqueContext,
}
unsafe impl Send for VMFunctionImport {}
unsafe impl Sync for VMFunctionImport {}
#[cfg(test)]
mod test_vmfunction_import {
use super::VMFunctionImport;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmfunction_import_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMFunctionImport>(),
usize::from(offsets.size_of_vmfunction_import())
);
assert_eq!(
offset_of!(VMFunctionImport, wasm_call),
usize::from(offsets.vmfunction_import_wasm_call())
);
assert_eq!(
offset_of!(VMFunctionImport, native_call),
usize::from(offsets.vmfunction_import_native_call())
);
assert_eq!(
offset_of!(VMFunctionImport, array_call),
usize::from(offsets.vmfunction_import_array_call())
);
assert_eq!(
offset_of!(VMFunctionImport, vmctx),
usize::from(offsets.vmfunction_import_vmctx())
);
}
}
#[repr(C)]
pub struct VMFunctionBody(u8);
#[cfg(test)]
mod test_vmfunction_body {
use super::VMFunctionBody;
use std::mem::size_of;
#[test]
fn check_vmfunction_body_offsets() {
assert_eq!(size_of::<VMFunctionBody>(), 1);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMTableImport {
pub from: *mut VMTableDefinition,
pub vmctx: *mut VMContext,
}
unsafe impl Send for VMTableImport {}
unsafe impl Sync for VMTableImport {}
#[cfg(test)]
mod test_vmtable_import {
use super::VMTableImport;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmtable_import_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMTableImport>(),
usize::from(offsets.size_of_vmtable_import())
);
assert_eq!(
offset_of!(VMTableImport, from),
usize::from(offsets.vmtable_import_from())
);
assert_eq!(
offset_of!(VMTableImport, vmctx),
usize::from(offsets.vmtable_import_vmctx())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMMemoryImport {
pub from: *mut VMMemoryDefinition,
pub vmctx: *mut VMContext,
pub index: DefinedMemoryIndex,
}
unsafe impl Send for VMMemoryImport {}
unsafe impl Sync for VMMemoryImport {}
#[cfg(test)]
mod test_vmmemory_import {
use super::VMMemoryImport;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmmemory_import_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMMemoryImport>(),
usize::from(offsets.size_of_vmmemory_import())
);
assert_eq!(
offset_of!(VMMemoryImport, from),
usize::from(offsets.vmmemory_import_from())
);
assert_eq!(
offset_of!(VMMemoryImport, vmctx),
usize::from(offsets.vmmemory_import_vmctx())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMGlobalImport {
pub from: *mut VMGlobalDefinition,
}
unsafe impl Send for VMGlobalImport {}
unsafe impl Sync for VMGlobalImport {}
#[cfg(test)]
mod test_vmglobal_import {
use super::VMGlobalImport;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmglobal_import_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMGlobalImport>(),
usize::from(offsets.size_of_vmglobal_import())
);
assert_eq!(
offset_of!(VMGlobalImport, from),
usize::from(offsets.vmglobal_import_from())
);
}
}
#[derive(Debug)]
#[repr(C)]
pub struct VMMemoryDefinition {
pub base: *mut u8,
pub current_length: AtomicUsize,
}
impl VMMemoryDefinition {
pub fn current_length(&self) -> usize {
self.current_length.load(Ordering::Relaxed)
}
pub unsafe fn load(ptr: *mut Self) -> Self {
let other = &*ptr;
VMMemoryDefinition {
base: other.base,
current_length: other.current_length().into(),
}
}
}
#[cfg(test)]
mod test_vmmemory_definition {
use super::VMMemoryDefinition;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, PtrSize, VMOffsets};
#[test]
fn check_vmmemory_definition_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMMemoryDefinition>(),
usize::from(offsets.ptr.size_of_vmmemory_definition())
);
assert_eq!(
offset_of!(VMMemoryDefinition, base),
usize::from(offsets.ptr.vmmemory_definition_base())
);
assert_eq!(
offset_of!(VMMemoryDefinition, current_length),
usize::from(offsets.ptr.vmmemory_definition_current_length())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMTableDefinition {
pub base: *mut u8,
pub current_elements: u32,
}
#[cfg(test)]
mod test_vmtable_definition {
use super::VMTableDefinition;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmtable_definition_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMTableDefinition>(),
usize::from(offsets.size_of_vmtable_definition())
);
assert_eq!(
offset_of!(VMTableDefinition, base),
usize::from(offsets.vmtable_definition_base())
);
assert_eq!(
offset_of!(VMTableDefinition, current_elements),
usize::from(offsets.vmtable_definition_current_elements())
);
}
}
#[derive(Debug)]
#[repr(C, align(16))]
pub struct VMGlobalDefinition {
storage: [u8; 16],
}
#[cfg(test)]
mod test_vmglobal_definition {
use super::VMGlobalDefinition;
use std::mem::{align_of, size_of};
use wasmtime_environ::{Module, PtrSize, VMOffsets};
#[test]
fn check_vmglobal_definition_alignment() {
assert!(align_of::<VMGlobalDefinition>() >= align_of::<i32>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<i64>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<f32>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<f64>());
assert!(align_of::<VMGlobalDefinition>() >= align_of::<[u8; 16]>());
}
#[test]
fn check_vmglobal_definition_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMGlobalDefinition>(),
usize::from(offsets.ptr.size_of_vmglobal_definition())
);
}
#[test]
fn check_vmglobal_begins_aligned() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
}
#[test]
#[cfg(feature = "gc")]
fn check_vmglobal_can_contain_gc_ref() {
assert!(size_of::<crate::VMGcRef>() <= size_of::<VMGlobalDefinition>());
}
}
impl VMGlobalDefinition {
pub fn new() -> Self {
Self { storage: [0; 16] }
}
pub unsafe fn as_i32(&self) -> &i32 {
&*(self.storage.as_ref().as_ptr().cast::<i32>())
}
pub unsafe fn as_i32_mut(&mut self) -> &mut i32 {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<i32>())
}
pub unsafe fn as_u32(&self) -> &u32 {
&*(self.storage.as_ref().as_ptr().cast::<u32>())
}
pub unsafe fn as_u32_mut(&mut self) -> &mut u32 {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<u32>())
}
pub unsafe fn as_i64(&self) -> &i64 {
&*(self.storage.as_ref().as_ptr().cast::<i64>())
}
pub unsafe fn as_i64_mut(&mut self) -> &mut i64 {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<i64>())
}
pub unsafe fn as_u64(&self) -> &u64 {
&*(self.storage.as_ref().as_ptr().cast::<u64>())
}
pub unsafe fn as_u64_mut(&mut self) -> &mut u64 {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<u64>())
}
pub unsafe fn as_f32(&self) -> &f32 {
&*(self.storage.as_ref().as_ptr().cast::<f32>())
}
pub unsafe fn as_f32_mut(&mut self) -> &mut f32 {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<f32>())
}
pub unsafe fn as_f32_bits(&self) -> &u32 {
&*(self.storage.as_ref().as_ptr().cast::<u32>())
}
pub unsafe fn as_f32_bits_mut(&mut self) -> &mut u32 {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<u32>())
}
pub unsafe fn as_f64(&self) -> &f64 {
&*(self.storage.as_ref().as_ptr().cast::<f64>())
}
pub unsafe fn as_f64_mut(&mut self) -> &mut f64 {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<f64>())
}
pub unsafe fn as_f64_bits(&self) -> &u64 {
&*(self.storage.as_ref().as_ptr().cast::<u64>())
}
pub unsafe fn as_f64_bits_mut(&mut self) -> &mut u64 {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<u64>())
}
pub unsafe fn as_u128(&self) -> &u128 {
&*(self.storage.as_ref().as_ptr().cast::<u128>())
}
pub unsafe fn as_u128_mut(&mut self) -> &mut u128 {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<u128>())
}
pub unsafe fn as_u128_bits(&self) -> &[u8; 16] {
&*(self.storage.as_ref().as_ptr().cast::<[u8; 16]>())
}
pub unsafe fn as_u128_bits_mut(&mut self) -> &mut [u8; 16] {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<[u8; 16]>())
}
pub unsafe fn as_gc_ref(&self) -> Option<&VMGcRef> {
let raw_ptr = self.storage.as_ref().as_ptr().cast::<Option<VMGcRef>>();
let ret = (*raw_ptr).as_ref();
assert!(cfg!(feature = "gc") || ret.is_none());
ret
}
pub unsafe fn init_gc_ref(&mut self, gc_ref: Option<VMGcRef>) {
assert!(cfg!(feature = "gc") || gc_ref.is_none());
let raw_ptr = self.storage.as_mut().as_mut_ptr().cast::<Option<VMGcRef>>();
ptr::write(raw_ptr, gc_ref);
}
pub unsafe fn write_gc_ref(&mut self, gc_store: &mut GcStore, gc_ref: Option<&VMGcRef>) {
assert!(cfg!(feature = "gc") || gc_ref.is_none());
let dest = &mut *(self.storage.as_mut().as_mut_ptr().cast::<Option<VMGcRef>>());
assert!(cfg!(feature = "gc") || dest.is_none());
gc_store.write_gc_ref(dest, gc_ref)
}
pub unsafe fn as_func_ref(&self) -> *mut VMFuncRef {
*(self.storage.as_ref().as_ptr().cast::<*mut VMFuncRef>())
}
pub unsafe fn as_func_ref_mut(&mut self) -> &mut *mut VMFuncRef {
&mut *(self.storage.as_mut().as_mut_ptr().cast::<*mut VMFuncRef>())
}
}
#[repr(C)]
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub struct VMSharedTypeIndex(u32);
#[cfg(test)]
mod test_vmshared_type_index {
use super::VMSharedTypeIndex;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmshared_type_index() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMSharedTypeIndex>(),
usize::from(offsets.size_of_vmshared_type_index())
);
}
}
impl VMSharedTypeIndex {
#[inline]
pub fn new(value: u32) -> Self {
assert_ne!(
value,
u32::MAX,
"u32::MAX is reserved for the default value"
);
Self(value)
}
#[inline]
pub fn bits(&self) -> u32 {
self.0
}
}
impl Default for VMSharedTypeIndex {
#[inline]
fn default() -> Self {
Self(u32::MAX)
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct VMFuncRef {
pub native_call: NonNull<VMNativeCallFunction>,
pub array_call: VMArrayCallFunction,
pub wasm_call: Option<NonNull<VMWasmCallFunction>>,
pub type_index: VMSharedTypeIndex,
pub vmctx: *mut VMOpaqueContext,
}
unsafe impl Send for VMFuncRef {}
unsafe impl Sync for VMFuncRef {}
#[cfg(test)]
mod test_vm_func_ref {
use super::VMFuncRef;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, PtrSize, VMOffsets};
#[test]
fn check_vm_func_ref_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMFuncRef>(),
usize::from(offsets.ptr.size_of_vm_func_ref())
);
assert_eq!(
offset_of!(VMFuncRef, native_call),
usize::from(offsets.ptr.vm_func_ref_native_call())
);
assert_eq!(
offset_of!(VMFuncRef, array_call),
usize::from(offsets.ptr.vm_func_ref_array_call())
);
assert_eq!(
offset_of!(VMFuncRef, wasm_call),
usize::from(offsets.ptr.vm_func_ref_wasm_call())
);
assert_eq!(
offset_of!(VMFuncRef, type_index),
usize::from(offsets.ptr.vm_func_ref_type_index())
);
assert_eq!(
offset_of!(VMFuncRef, vmctx),
usize::from(offsets.ptr.vm_func_ref_vmctx())
);
}
}
macro_rules! define_builtin_array {
(
$(
$( #[$attr:meta] )*
$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
)*
) => {
#[repr(C)]
pub struct VMBuiltinFunctionsArray {
$(
$name: unsafe extern "C" fn(
$(define_builtin_array!(@ty $param)),*
) $( -> define_builtin_array!(@ty $result))?,
)*
}
impl VMBuiltinFunctionsArray {
#[allow(unused_doc_comments)]
pub const INIT: VMBuiltinFunctionsArray = VMBuiltinFunctionsArray {
$(
$name: crate::libcalls::raw::$name,
)*
};
}
};
(@ty i32) => (u32);
(@ty i64) => (u64);
(@ty reference) => (*mut u8);
(@ty pointer) => (*mut u8);
(@ty vmctx) => (*mut VMContext);
}
wasmtime_environ::foreach_builtin_function!(define_builtin_array);
const _: () = {
assert!(
mem::size_of::<VMBuiltinFunctionsArray>()
== mem::size_of::<usize>()
* (BuiltinFunctionIndex::builtin_functions_total_number() as usize)
)
};
#[derive(Debug, Copy, Clone)]
#[repr(C, align(16))]
pub struct VMInvokeArgument([u8; 16]);
#[cfg(test)]
mod test_vm_invoke_argument {
use super::VMInvokeArgument;
use std::mem::{align_of, size_of};
use wasmtime_environ::{Module, PtrSize, VMOffsets};
#[test]
fn check_vm_invoke_argument_alignment() {
assert_eq!(align_of::<VMInvokeArgument>(), 16);
}
#[test]
fn check_vmglobal_definition_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMInvokeArgument>(),
usize::from(offsets.ptr.size_of_vmglobal_definition())
);
}
}
impl VMInvokeArgument {
pub fn new() -> Self {
Self([0; 16])
}
}
#[derive(Debug)]
#[repr(C)]
pub struct VMRuntimeLimits {
pub stack_limit: UnsafeCell<usize>,
pub fuel_consumed: UnsafeCell<i64>,
pub epoch_deadline: UnsafeCell<u64>,
pub last_wasm_exit_fp: UnsafeCell<usize>,
pub last_wasm_exit_pc: UnsafeCell<usize>,
pub last_wasm_entry_sp: UnsafeCell<usize>,
}
unsafe impl Send for VMRuntimeLimits {}
unsafe impl Sync for VMRuntimeLimits {}
impl Default for VMRuntimeLimits {
fn default() -> VMRuntimeLimits {
VMRuntimeLimits {
stack_limit: UnsafeCell::new(usize::max_value()),
fuel_consumed: UnsafeCell::new(0),
epoch_deadline: UnsafeCell::new(0),
last_wasm_exit_fp: UnsafeCell::new(0),
last_wasm_exit_pc: UnsafeCell::new(0),
last_wasm_entry_sp: UnsafeCell::new(0),
}
}
}
#[cfg(test)]
mod test_vmruntime_limits {
use super::VMRuntimeLimits;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, PtrSize, VMOffsets};
#[test]
fn vmctx_runtime_limits_offset() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
offsets.vmctx_runtime_limits(),
offsets.ptr.vmcontext_runtime_limits().into()
);
}
#[test]
fn field_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
offset_of!(VMRuntimeLimits, stack_limit),
usize::from(offsets.ptr.vmruntime_limits_stack_limit())
);
assert_eq!(
offset_of!(VMRuntimeLimits, fuel_consumed),
usize::from(offsets.ptr.vmruntime_limits_fuel_consumed())
);
assert_eq!(
offset_of!(VMRuntimeLimits, epoch_deadline),
usize::from(offsets.ptr.vmruntime_limits_epoch_deadline())
);
assert_eq!(
offset_of!(VMRuntimeLimits, last_wasm_exit_fp),
usize::from(offsets.ptr.vmruntime_limits_last_wasm_exit_fp())
);
assert_eq!(
offset_of!(VMRuntimeLimits, last_wasm_exit_pc),
usize::from(offsets.ptr.vmruntime_limits_last_wasm_exit_pc())
);
assert_eq!(
offset_of!(VMRuntimeLimits, last_wasm_entry_sp),
usize::from(offsets.ptr.vmruntime_limits_last_wasm_entry_sp())
);
}
}
#[derive(Debug)]
#[repr(C, align(16))] pub struct VMContext {
pub _marker: marker::PhantomPinned,
}
impl VMContext {
#[inline]
pub unsafe fn from_opaque(opaque: *mut VMOpaqueContext) -> *mut VMContext {
debug_assert_eq!((*opaque).magic, VMCONTEXT_MAGIC);
opaque.cast()
}
}
#[allow(missing_docs)]
#[repr(C)]
#[derive(Copy, Clone)]
pub union ValRaw {
i32: i32,
i64: i64,
f32: u32,
f64: u64,
v128: [u8; 16],
funcref: *mut c_void,
externref: u32,
anyref: u32,
}
const _: () = {
assert!(std::mem::size_of::<ValRaw>() == 16);
assert!(std::mem::align_of::<ValRaw>() == 8);
};
unsafe impl Send for ValRaw {}
unsafe impl Sync for ValRaw {}
impl std::fmt::Debug for ValRaw {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
struct Hex<T>(T);
impl<T: std::fmt::LowerHex> std::fmt::Debug for Hex<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let bytes = std::mem::size_of::<T>();
let hex_digits_per_byte = 2;
let hex_digits = bytes * hex_digits_per_byte;
write!(f, "0x{:0width$x}", self.0, width = hex_digits)
}
}
unsafe {
f.debug_struct("ValRaw")
.field("i32", &Hex(self.i32))
.field("i64", &Hex(self.i64))
.field("f32", &Hex(self.f32))
.field("f64", &Hex(self.f64))
.field("v128", &Hex(u128::from_le_bytes(self.v128)))
.field("funcref", &self.funcref)
.field("externref", &Hex(self.externref))
.field("anyref", &Hex(self.anyref))
.finish()
}
}
}
impl ValRaw {
#[inline]
pub fn i32(i: i32) -> ValRaw {
ValRaw::u64(i.unsigned().into())
}
#[inline]
pub fn i64(i: i64) -> ValRaw {
ValRaw { i64: i.to_le() }
}
#[inline]
pub fn u32(i: u32) -> ValRaw {
ValRaw::u64(i.into())
}
#[inline]
pub fn u64(i: u64) -> ValRaw {
ValRaw::i64(i as i64)
}
#[inline]
pub fn f32(i: u32) -> ValRaw {
ValRaw::u64(i.into())
}
#[inline]
pub fn f64(i: u64) -> ValRaw {
ValRaw { f64: i.to_le() }
}
#[inline]
pub fn v128(i: u128) -> ValRaw {
ValRaw {
v128: i.to_le_bytes(),
}
}
#[inline]
pub fn funcref(i: *mut c_void) -> ValRaw {
ValRaw {
funcref: Strict::map_addr(i, |i| i.to_le()),
}
}
#[inline]
pub fn externref(e: u32) -> ValRaw {
assert!(cfg!(feature = "gc") || e == 0);
ValRaw {
externref: e.to_le(),
}
}
#[inline]
pub fn anyref(r: u32) -> ValRaw {
assert!(cfg!(feature = "gc") || r == 0);
ValRaw { anyref: r.to_le() }
}
#[inline]
pub fn get_i32(&self) -> i32 {
unsafe { i32::from_le(self.i32) }
}
#[inline]
pub fn get_i64(&self) -> i64 {
unsafe { i64::from_le(self.i64) }
}
#[inline]
pub fn get_u32(&self) -> u32 {
self.get_i32().unsigned()
}
#[inline]
pub fn get_u64(&self) -> u64 {
self.get_i64().unsigned()
}
#[inline]
pub fn get_f32(&self) -> u32 {
unsafe { u32::from_le(self.f32) }
}
#[inline]
pub fn get_f64(&self) -> u64 {
unsafe { u64::from_le(self.f64) }
}
#[inline]
pub fn get_v128(&self) -> u128 {
unsafe { u128::from_le_bytes(self.v128) }
}
#[inline]
pub fn get_funcref(&self) -> *mut c_void {
unsafe { Strict::map_addr(self.funcref, |i| usize::from_le(i)) }
}
#[inline]
pub fn get_externref(&self) -> u32 {
let externref = u32::from_le(unsafe { self.externref });
assert!(cfg!(feature = "gc") || externref == 0);
externref
}
#[inline]
pub fn get_anyref(&self) -> u32 {
let anyref = u32::from_le(unsafe { self.anyref });
assert!(cfg!(feature = "gc") || anyref == 0);
anyref
}
}
pub struct VMOpaqueContext {
pub(crate) magic: u32,
_marker: marker::PhantomPinned,
}
impl VMOpaqueContext {
#[inline]
pub fn from_vmcontext(ptr: *mut VMContext) -> *mut VMOpaqueContext {
ptr.cast()
}
#[inline]
pub fn from_vm_array_call_host_func_context(
ptr: *mut VMArrayCallHostFuncContext,
) -> *mut VMOpaqueContext {
ptr.cast()
}
#[inline]
pub fn from_vm_native_call_host_func_context(
ptr: *mut VMNativeCallHostFuncContext,
) -> *mut VMOpaqueContext {
ptr.cast()
}
}