use crate::{
Store, VMCallerCheckedAnyfunc, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition,
VMOpaqueContext, VMSharedSignatureIndex, ValRaw,
};
use memoffset::offset_of;
use std::alloc::{self, Layout};
use std::marker;
use std::mem;
use std::ops::Deref;
use std::ptr::{self, NonNull};
use wasmtime_environ::component::{
Component, LoweredIndex, RuntimeAlwaysTrapIndex, RuntimeComponentInstanceIndex,
RuntimeMemoryIndex, RuntimePostReturnIndex, RuntimeReallocIndex, RuntimeTranscoderIndex,
StringEncoding, VMComponentOffsets, FLAG_MAY_ENTER, FLAG_MAY_LEAVE, FLAG_NEEDS_POST_RETURN,
VMCOMPONENT_MAGIC,
};
use wasmtime_environ::HostPtr;
const INVALID_PTR: usize = 0xdead_dead_beef_beef_u64 as usize;
mod transcode;
#[repr(C)]
pub struct ComponentInstance {
offsets: VMComponentOffsets<HostPtr>,
vmctx: VMComponentContext,
}
pub type VMLoweringCallee = extern "C" fn(
vmctx: *mut VMOpaqueContext,
data: *mut u8,
flags: InstanceFlags,
opt_memory: *mut VMMemoryDefinition,
opt_realloc: *mut VMCallerCheckedAnyfunc,
string_encoding: StringEncoding,
args_and_results: *mut ValRaw,
nargs_and_results: usize,
);
#[derive(Copy, Clone)]
#[repr(C)]
pub struct VMLowering {
pub callee: VMLoweringCallee,
pub data: *mut u8,
}
#[repr(C)]
#[cfg_attr(target_pointer_width = "32", repr(align(4)))]
#[cfg_attr(target_pointer_width = "64", repr(align(8)))]
pub struct VMComponentContext {
_marker: marker::PhantomPinned,
}
impl ComponentInstance {
fn alloc_layout(offsets: &VMComponentOffsets<HostPtr>) -> Layout {
let size = mem::size_of::<Self>()
.checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
.unwrap();
let align = mem::align_of::<Self>();
Layout::from_size_align(size, align).unwrap()
}
unsafe fn new_at(
ptr: *mut ComponentInstance,
alloc_size: usize,
offsets: VMComponentOffsets<HostPtr>,
store: *mut dyn Store,
) {
assert!(alloc_size >= Self::alloc_layout(&offsets).size());
ptr::write(
ptr,
ComponentInstance {
offsets,
vmctx: VMComponentContext {
_marker: marker::PhantomPinned,
},
},
);
(*ptr).initialize_vmctx(store);
}
fn vmctx(&self) -> *mut VMComponentContext {
&self.vmctx as *const VMComponentContext as *mut VMComponentContext
}
unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
self.vmctx()
.cast::<u8>()
.add(usize::try_from(offset).unwrap())
.cast()
}
pub fn instance_flags(&self, instance: RuntimeComponentInstanceIndex) -> InstanceFlags {
unsafe { InstanceFlags(self.vmctx_plus_offset(self.offsets.instance_flags(instance))) }
}
pub fn store(&self) -> *mut dyn Store {
unsafe {
let ret = *self.vmctx_plus_offset::<*mut dyn Store>(self.offsets.store());
assert!(!ret.is_null());
ret
}
}
pub fn runtime_memory(&self, idx: RuntimeMemoryIndex) -> *mut VMMemoryDefinition {
unsafe {
let ret = *self.vmctx_plus_offset(self.offsets.runtime_memory(idx));
debug_assert!(ret as usize != INVALID_PTR);
ret
}
}
pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMCallerCheckedAnyfunc> {
unsafe {
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_realloc(idx));
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
ret
}
}
pub fn runtime_post_return(
&self,
idx: RuntimePostReturnIndex,
) -> NonNull<VMCallerCheckedAnyfunc> {
unsafe {
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_post_return(idx));
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
ret
}
}
pub fn lowering(&self, idx: LoweredIndex) -> VMLowering {
unsafe {
let ret = *self.vmctx_plus_offset::<VMLowering>(self.offsets.lowering(idx));
debug_assert!(ret.callee as usize != INVALID_PTR);
debug_assert!(ret.data as usize != INVALID_PTR);
ret
}
}
pub fn lowering_anyfunc(&self, idx: LoweredIndex) -> NonNull<VMCallerCheckedAnyfunc> {
unsafe { self.anyfunc(self.offsets.lowering_anyfunc(idx)) }
}
pub fn always_trap_anyfunc(
&self,
idx: RuntimeAlwaysTrapIndex,
) -> NonNull<VMCallerCheckedAnyfunc> {
unsafe { self.anyfunc(self.offsets.always_trap_anyfunc(idx)) }
}
pub fn transcoder_anyfunc(
&self,
idx: RuntimeTranscoderIndex,
) -> NonNull<VMCallerCheckedAnyfunc> {
unsafe { self.anyfunc(self.offsets.transcoder_anyfunc(idx)) }
}
unsafe fn anyfunc(&self, offset: u32) -> NonNull<VMCallerCheckedAnyfunc> {
let ret = self.vmctx_plus_offset::<VMCallerCheckedAnyfunc>(offset);
debug_assert!((*ret).func_ptr.as_ptr() as usize != INVALID_PTR);
debug_assert!((*ret).vmctx as usize != INVALID_PTR);
NonNull::new(ret).unwrap()
}
pub fn set_runtime_memory(&mut self, idx: RuntimeMemoryIndex, ptr: *mut VMMemoryDefinition) {
unsafe {
debug_assert!(!ptr.is_null());
let storage = self.vmctx_plus_offset(self.offsets.runtime_memory(idx));
debug_assert!(*storage as usize == INVALID_PTR);
*storage = ptr;
}
}
pub fn set_runtime_realloc(
&mut self,
idx: RuntimeReallocIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
) {
unsafe {
let storage = self.vmctx_plus_offset(self.offsets.runtime_realloc(idx));
debug_assert!(*storage as usize == INVALID_PTR);
*storage = ptr.as_ptr();
}
}
pub fn set_runtime_post_return(
&mut self,
idx: RuntimePostReturnIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
) {
unsafe {
let storage = self.vmctx_plus_offset(self.offsets.runtime_post_return(idx));
debug_assert!(*storage as usize == INVALID_PTR);
*storage = ptr.as_ptr();
}
}
pub fn set_lowering(
&mut self,
idx: LoweredIndex,
lowering: VMLowering,
anyfunc_func_ptr: NonNull<VMFunctionBody>,
anyfunc_type_index: VMSharedSignatureIndex,
) {
unsafe {
debug_assert!(
*self.vmctx_plus_offset::<usize>(self.offsets.lowering_callee(idx)) == INVALID_PTR
);
debug_assert!(
*self.vmctx_plus_offset::<usize>(self.offsets.lowering_data(idx)) == INVALID_PTR
);
*self.vmctx_plus_offset(self.offsets.lowering(idx)) = lowering;
self.set_anyfunc(
self.offsets.lowering_anyfunc(idx),
anyfunc_func_ptr,
anyfunc_type_index,
);
}
}
pub fn set_always_trap(
&mut self,
idx: RuntimeAlwaysTrapIndex,
func_ptr: NonNull<VMFunctionBody>,
type_index: VMSharedSignatureIndex,
) {
unsafe { self.set_anyfunc(self.offsets.always_trap_anyfunc(idx), func_ptr, type_index) }
}
pub fn set_transcoder(
&mut self,
idx: RuntimeTranscoderIndex,
func_ptr: NonNull<VMFunctionBody>,
type_index: VMSharedSignatureIndex,
) {
unsafe { self.set_anyfunc(self.offsets.transcoder_anyfunc(idx), func_ptr, type_index) }
}
unsafe fn set_anyfunc(
&mut self,
offset: u32,
func_ptr: NonNull<VMFunctionBody>,
type_index: VMSharedSignatureIndex,
) {
debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
let vmctx = self.vmctx();
*self.vmctx_plus_offset(offset) = VMCallerCheckedAnyfunc {
func_ptr,
type_index,
vmctx: VMOpaqueContext::from_vmcomponent(vmctx),
};
}
unsafe fn initialize_vmctx(&mut self, store: *mut dyn Store) {
*self.vmctx_plus_offset(self.offsets.magic()) = VMCOMPONENT_MAGIC;
*self.vmctx_plus_offset(self.offsets.transcode_libcalls()) =
&transcode::VMBuiltinTranscodeArray::INIT;
*self.vmctx_plus_offset(self.offsets.store()) = store;
*self.vmctx_plus_offset(self.offsets.limits()) = (*store).vmruntime_limits();
for i in 0..self.offsets.num_runtime_component_instances {
let i = RuntimeComponentInstanceIndex::from_u32(i);
let mut def = VMGlobalDefinition::new();
*def.as_i32_mut() = FLAG_MAY_ENTER | FLAG_MAY_LEAVE;
*self.instance_flags(i).0 = def;
}
if cfg!(debug_assertions) {
for i in 0..self.offsets.num_lowerings {
let i = LoweredIndex::from_u32(i);
let offset = self.offsets.lowering_callee(i);
*self.vmctx_plus_offset(offset) = INVALID_PTR;
let offset = self.offsets.lowering_data(i);
*self.vmctx_plus_offset(offset) = INVALID_PTR;
let offset = self.offsets.lowering_anyfunc(i);
*self.vmctx_plus_offset(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_always_trap {
let i = RuntimeAlwaysTrapIndex::from_u32(i);
let offset = self.offsets.always_trap_anyfunc(i);
*self.vmctx_plus_offset(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_transcoders {
let i = RuntimeTranscoderIndex::from_u32(i);
let offset = self.offsets.transcoder_anyfunc(i);
*self.vmctx_plus_offset(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_runtime_memories {
let i = RuntimeMemoryIndex::from_u32(i);
let offset = self.offsets.runtime_memory(i);
*self.vmctx_plus_offset(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_runtime_reallocs {
let i = RuntimeReallocIndex::from_u32(i);
let offset = self.offsets.runtime_realloc(i);
*self.vmctx_plus_offset(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_runtime_post_returns {
let i = RuntimePostReturnIndex::from_u32(i);
let offset = self.offsets.runtime_post_return(i);
*self.vmctx_plus_offset(offset) = INVALID_PTR;
}
}
}
}
impl VMComponentContext {
pub fn instance(&self) -> *mut ComponentInstance {
unsafe {
(self as *const Self as *mut u8)
.offset(-(offset_of!(ComponentInstance, vmctx) as isize))
as *mut ComponentInstance
}
}
}
pub struct OwnedComponentInstance {
ptr: ptr::NonNull<ComponentInstance>,
}
unsafe impl Send for OwnedComponentInstance where ComponentInstance: Send {}
unsafe impl Sync for OwnedComponentInstance where ComponentInstance: Sync {}
impl OwnedComponentInstance {
pub fn new(component: &Component, store: *mut dyn Store) -> OwnedComponentInstance {
let offsets = VMComponentOffsets::new(HostPtr, component);
let layout = ComponentInstance::alloc_layout(&offsets);
unsafe {
let ptr = alloc::alloc_zeroed(layout) as *mut ComponentInstance;
let ptr = ptr::NonNull::new(ptr).unwrap();
ComponentInstance::new_at(ptr.as_ptr(), layout.size(), offsets, store);
OwnedComponentInstance { ptr }
}
}
unsafe fn instance_mut(&mut self) -> &mut ComponentInstance {
&mut *self.ptr.as_ptr()
}
pub fn set_runtime_memory(&mut self, idx: RuntimeMemoryIndex, ptr: *mut VMMemoryDefinition) {
unsafe { self.instance_mut().set_runtime_memory(idx, ptr) }
}
pub fn set_runtime_realloc(
&mut self,
idx: RuntimeReallocIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
) {
unsafe { self.instance_mut().set_runtime_realloc(idx, ptr) }
}
pub fn set_runtime_post_return(
&mut self,
idx: RuntimePostReturnIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
) {
unsafe { self.instance_mut().set_runtime_post_return(idx, ptr) }
}
pub fn set_lowering(
&mut self,
idx: LoweredIndex,
lowering: VMLowering,
anyfunc_func_ptr: NonNull<VMFunctionBody>,
anyfunc_type_index: VMSharedSignatureIndex,
) {
unsafe {
self.instance_mut()
.set_lowering(idx, lowering, anyfunc_func_ptr, anyfunc_type_index)
}
}
pub fn set_always_trap(
&mut self,
idx: RuntimeAlwaysTrapIndex,
func_ptr: NonNull<VMFunctionBody>,
type_index: VMSharedSignatureIndex,
) {
unsafe {
self.instance_mut()
.set_always_trap(idx, func_ptr, type_index)
}
}
pub fn set_transcoder(
&mut self,
idx: RuntimeTranscoderIndex,
func_ptr: NonNull<VMFunctionBody>,
type_index: VMSharedSignatureIndex,
) {
unsafe {
self.instance_mut()
.set_transcoder(idx, func_ptr, type_index)
}
}
}
impl Deref for OwnedComponentInstance {
type Target = ComponentInstance;
fn deref(&self) -> &ComponentInstance {
unsafe { &*self.ptr.as_ptr() }
}
}
impl Drop for OwnedComponentInstance {
fn drop(&mut self) {
let layout = ComponentInstance::alloc_layout(&self.offsets);
unsafe {
ptr::drop_in_place(self.ptr.as_ptr());
alloc::dealloc(self.ptr.as_ptr().cast(), layout);
}
}
}
impl VMComponentContext {
#[inline]
pub unsafe fn from_opaque(opaque: *mut VMOpaqueContext) -> *mut VMComponentContext {
debug_assert_eq!((*opaque).magic, VMCOMPONENT_MAGIC);
opaque.cast()
}
}
impl VMOpaqueContext {
#[inline]
pub fn from_vmcomponent(ptr: *mut VMComponentContext) -> *mut VMOpaqueContext {
ptr.cast()
}
}
#[allow(missing_docs)]
#[repr(transparent)]
pub struct InstanceFlags(*mut VMGlobalDefinition);
#[allow(missing_docs)]
impl InstanceFlags {
#[inline]
pub unsafe fn may_leave(&self) -> bool {
*(*self.0).as_i32() & FLAG_MAY_LEAVE != 0
}
#[inline]
pub unsafe fn set_may_leave(&mut self, val: bool) {
if val {
*(*self.0).as_i32_mut() |= FLAG_MAY_LEAVE;
} else {
*(*self.0).as_i32_mut() &= !FLAG_MAY_LEAVE;
}
}
#[inline]
pub unsafe fn may_enter(&self) -> bool {
*(*self.0).as_i32() & FLAG_MAY_ENTER != 0
}
#[inline]
pub unsafe fn set_may_enter(&mut self, val: bool) {
if val {
*(*self.0).as_i32_mut() |= FLAG_MAY_ENTER;
} else {
*(*self.0).as_i32_mut() &= !FLAG_MAY_ENTER;
}
}
#[inline]
pub unsafe fn needs_post_return(&self) -> bool {
*(*self.0).as_i32() & FLAG_NEEDS_POST_RETURN != 0
}
#[inline]
pub unsafe fn set_needs_post_return(&mut self, val: bool) {
if val {
*(*self.0).as_i32_mut() |= FLAG_NEEDS_POST_RETURN;
} else {
*(*self.0).as_i32_mut() &= !FLAG_NEEDS_POST_RETURN;
}
}
#[inline]
pub fn as_raw(&self) -> *mut VMGlobalDefinition {
self.0
}
}