use crate::{
SendSyncPtr, Store, VMArrayCallFunction, VMFuncRef, VMGlobalDefinition, VMMemoryDefinition,
VMNativeCallFunction, VMOpaqueContext, VMSharedTypeIndex, VMWasmCallFunction, ValRaw,
};
use anyhow::Result;
use memoffset::offset_of;
use sptr::Strict;
use std::alloc::{self, Layout};
use std::any::Any;
use std::marker;
use std::mem;
use std::ops::Deref;
use std::ptr::{self, NonNull};
use std::sync::Arc;
use wasmtime_environ::component::*;
use wasmtime_environ::{HostPtr, PrimaryMap};
const INVALID_PTR: usize = 0xdead_dead_beef_beef_u64 as usize;
mod libcalls;
mod resources;
pub use self::resources::{CallContexts, ResourceTable, ResourceTables};
#[repr(C)]
pub struct ComponentInstance {
offsets: VMComponentOffsets<HostPtr>,
vmctx_self_reference: SendSyncPtr<VMComponentContext>,
runtime_info: Arc<dyn ComponentRuntimeInfo>,
component_resource_tables: PrimaryMap<TypeResourceTableIndex, ResourceTable>,
resource_types: Arc<dyn Any + Send + Sync>,
vmctx: VMComponentContext,
}
pub type VMLoweringCallee = extern "C" fn(
vmctx: *mut VMOpaqueContext,
data: *mut u8,
ty: TypeFuncIndex,
flags: InstanceFlags,
opt_memory: *mut VMMemoryDefinition,
opt_realloc: *mut VMFuncRef,
string_encoding: StringEncoding,
args_and_results: *mut mem::MaybeUninit<ValRaw>,
nargs_and_results: usize,
);
#[derive(Copy, Clone)]
#[repr(C)]
pub struct VMLowering {
pub callee: VMLoweringCallee,
pub data: *mut u8,
}
#[repr(C)]
#[repr(align(16))]
pub struct VMComponentContext {
_marker: marker::PhantomPinned,
}
impl ComponentInstance {
pub unsafe fn from_vmctx<R>(
vmctx: *mut VMComponentContext,
f: impl FnOnce(&mut ComponentInstance) -> R,
) -> R {
let ptr = vmctx
.byte_sub(mem::size_of::<ComponentInstance>())
.cast::<ComponentInstance>();
f(&mut *ptr)
}
fn alloc_layout(offsets: &VMComponentOffsets<HostPtr>) -> Layout {
let size = mem::size_of::<Self>()
.checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
.unwrap();
let align = mem::align_of::<Self>();
Layout::from_size_align(size, align).unwrap()
}
unsafe fn new_at(
ptr: NonNull<ComponentInstance>,
alloc_size: usize,
offsets: VMComponentOffsets<HostPtr>,
runtime_info: Arc<dyn ComponentRuntimeInfo>,
resource_types: Arc<dyn Any + Send + Sync>,
store: *mut dyn Store,
) {
assert!(alloc_size >= Self::alloc_layout(&offsets).size());
let num_tables = runtime_info.component().num_resource_tables;
let mut component_resource_tables = PrimaryMap::with_capacity(num_tables);
for _ in 0..num_tables {
component_resource_tables.push(ResourceTable::default());
}
ptr::write(
ptr.as_ptr(),
ComponentInstance {
offsets,
vmctx_self_reference: SendSyncPtr::new(
NonNull::new(
ptr.as_ptr()
.byte_add(mem::size_of::<ComponentInstance>())
.cast(),
)
.unwrap(),
),
component_resource_tables,
runtime_info,
resource_types,
vmctx: VMComponentContext {
_marker: marker::PhantomPinned,
},
},
);
(*ptr.as_ptr()).initialize_vmctx(store);
}
fn vmctx(&self) -> *mut VMComponentContext {
let addr = std::ptr::addr_of!(self.vmctx);
Strict::with_addr(self.vmctx_self_reference.as_ptr(), Strict::addr(addr))
}
unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *const T {
self.vmctx()
.byte_add(usize::try_from(offset).unwrap())
.cast()
}
unsafe fn vmctx_plus_offset_mut<T>(&mut self, offset: u32) -> *mut T {
self.vmctx()
.byte_add(usize::try_from(offset).unwrap())
.cast()
}
#[inline]
pub fn instance_flags(&self, instance: RuntimeComponentInstanceIndex) -> InstanceFlags {
unsafe {
let ptr = self
.vmctx_plus_offset::<VMGlobalDefinition>(self.offsets.instance_flags(instance))
.cast_mut();
InstanceFlags(SendSyncPtr::new(NonNull::new(ptr).unwrap()))
}
}
pub fn store(&self) -> *mut dyn Store {
unsafe {
let ret = *self.vmctx_plus_offset::<*mut dyn Store>(self.offsets.store());
assert!(!ret.is_null());
ret
}
}
pub fn runtime_memory(&self, idx: RuntimeMemoryIndex) -> *mut VMMemoryDefinition {
unsafe {
let ret = *self.vmctx_plus_offset(self.offsets.runtime_memory(idx));
debug_assert!(ret as usize != INVALID_PTR);
ret
}
}
pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMFuncRef> {
unsafe {
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_realloc(idx));
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
ret
}
}
pub fn runtime_post_return(&self, idx: RuntimePostReturnIndex) -> NonNull<VMFuncRef> {
unsafe {
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_post_return(idx));
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
ret
}
}
pub fn lowering(&self, idx: LoweredIndex) -> VMLowering {
unsafe {
let ret = *self.vmctx_plus_offset::<VMLowering>(self.offsets.lowering(idx));
debug_assert!(ret.callee as usize != INVALID_PTR);
debug_assert!(ret.data as usize != INVALID_PTR);
ret
}
}
pub fn trampoline_func_ref(&self, idx: TrampolineIndex) -> NonNull<VMFuncRef> {
unsafe {
let offset = self.offsets.trampoline_func_ref(idx);
let ret = self.vmctx_plus_offset::<VMFuncRef>(offset);
debug_assert!(
mem::transmute::<Option<NonNull<VMWasmCallFunction>>, usize>((*ret).wasm_call)
!= INVALID_PTR
);
debug_assert!((*ret).vmctx as usize != INVALID_PTR);
NonNull::new(ret.cast_mut()).unwrap()
}
}
pub fn set_runtime_memory(&mut self, idx: RuntimeMemoryIndex, ptr: *mut VMMemoryDefinition) {
unsafe {
debug_assert!(!ptr.is_null());
let storage = self.vmctx_plus_offset_mut(self.offsets.runtime_memory(idx));
debug_assert!(*storage as usize == INVALID_PTR);
*storage = ptr;
}
}
pub fn set_runtime_realloc(&mut self, idx: RuntimeReallocIndex, ptr: NonNull<VMFuncRef>) {
unsafe {
let storage = self.vmctx_plus_offset_mut(self.offsets.runtime_realloc(idx));
debug_assert!(*storage as usize == INVALID_PTR);
*storage = ptr.as_ptr();
}
}
pub fn set_runtime_post_return(
&mut self,
idx: RuntimePostReturnIndex,
ptr: NonNull<VMFuncRef>,
) {
unsafe {
let storage = self.vmctx_plus_offset_mut(self.offsets.runtime_post_return(idx));
debug_assert!(*storage as usize == INVALID_PTR);
*storage = ptr.as_ptr();
}
}
pub fn set_lowering(&mut self, idx: LoweredIndex, lowering: VMLowering) {
unsafe {
debug_assert!(
*self.vmctx_plus_offset::<usize>(self.offsets.lowering_callee(idx)) == INVALID_PTR
);
debug_assert!(
*self.vmctx_plus_offset::<usize>(self.offsets.lowering_data(idx)) == INVALID_PTR
);
*self.vmctx_plus_offset_mut(self.offsets.lowering(idx)) = lowering;
}
}
pub fn set_trampoline(
&mut self,
idx: TrampolineIndex,
wasm_call: NonNull<VMWasmCallFunction>,
native_call: NonNull<VMNativeCallFunction>,
array_call: VMArrayCallFunction,
type_index: VMSharedTypeIndex,
) {
unsafe {
let offset = self.offsets.trampoline_func_ref(idx);
debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
let vmctx = VMOpaqueContext::from_vmcomponent(self.vmctx());
*self.vmctx_plus_offset_mut(offset) = VMFuncRef {
wasm_call: Some(wasm_call),
native_call,
array_call,
type_index,
vmctx,
};
}
}
pub fn set_resource_destructor(
&mut self,
idx: ResourceIndex,
dtor: Option<NonNull<VMFuncRef>>,
) {
unsafe {
let offset = self.offsets.resource_destructor(idx);
debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
*self.vmctx_plus_offset_mut(offset) = dtor;
}
}
pub fn resource_destructor(&self, idx: ResourceIndex) -> Option<NonNull<VMFuncRef>> {
unsafe {
let offset = self.offsets.resource_destructor(idx);
debug_assert!(*self.vmctx_plus_offset::<usize>(offset) != INVALID_PTR);
*self.vmctx_plus_offset(offset)
}
}
unsafe fn initialize_vmctx(&mut self, store: *mut dyn Store) {
*self.vmctx_plus_offset_mut(self.offsets.magic()) = VMCOMPONENT_MAGIC;
*self.vmctx_plus_offset_mut(self.offsets.libcalls()) = &libcalls::VMComponentLibcalls::INIT;
*self.vmctx_plus_offset_mut(self.offsets.store()) = store;
*self.vmctx_plus_offset_mut(self.offsets.limits()) = (*store).vmruntime_limits();
for i in 0..self.offsets.num_runtime_component_instances {
let i = RuntimeComponentInstanceIndex::from_u32(i);
let mut def = VMGlobalDefinition::new();
*def.as_i32_mut() = FLAG_MAY_ENTER | FLAG_MAY_LEAVE;
*self.instance_flags(i).as_raw() = def;
}
if cfg!(debug_assertions) {
for i in 0..self.offsets.num_lowerings {
let i = LoweredIndex::from_u32(i);
let offset = self.offsets.lowering_callee(i);
*self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
let offset = self.offsets.lowering_data(i);
*self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_trampolines {
let i = TrampolineIndex::from_u32(i);
let offset = self.offsets.trampoline_func_ref(i);
*self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_runtime_memories {
let i = RuntimeMemoryIndex::from_u32(i);
let offset = self.offsets.runtime_memory(i);
*self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_runtime_reallocs {
let i = RuntimeReallocIndex::from_u32(i);
let offset = self.offsets.runtime_realloc(i);
*self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_runtime_post_returns {
let i = RuntimePostReturnIndex::from_u32(i);
let offset = self.offsets.runtime_post_return(i);
*self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
}
for i in 0..self.offsets.num_resources {
let i = ResourceIndex::from_u32(i);
let offset = self.offsets.resource_destructor(i);
*self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
}
}
}
pub fn component(&self) -> &Component {
self.runtime_info.component()
}
pub fn component_types(&self) -> &Arc<ComponentTypes> {
self.runtime_info.component_types()
}
pub fn realloc_func_ty(&self) -> &Arc<dyn std::any::Any + Send + Sync> {
self.runtime_info.realloc_func_type()
}
pub fn resource_types(&self) -> &Arc<dyn Any + Send + Sync> {
&self.resource_types
}
pub fn resource_owned_by_own_instance(&self, ty: TypeResourceTableIndex) -> bool {
let resource = &self.component_types()[ty];
let component = self.component();
let idx = match component.defined_resource_index(resource.ty) {
Some(idx) => idx,
None => return false,
};
resource.instance == component.defined_resource_instances[idx]
}
pub fn resource_new32(&mut self, resource: TypeResourceTableIndex, rep: u32) -> Result<u32> {
self.resource_tables().resource_new(Some(resource), rep)
}
pub fn resource_rep32(&mut self, resource: TypeResourceTableIndex, idx: u32) -> Result<u32> {
self.resource_tables().resource_rep(Some(resource), idx)
}
pub fn resource_drop(
&mut self,
resource: TypeResourceTableIndex,
idx: u32,
) -> Result<Option<u32>> {
self.resource_tables().resource_drop(Some(resource), idx)
}
fn resource_tables(&mut self) -> ResourceTables<'_> {
ResourceTables {
host_table: None,
calls: unsafe { (&mut *self.store()).component_calls() },
tables: Some(&mut self.component_resource_tables),
}
}
#[inline]
pub fn component_resource_tables(
&mut self,
) -> &mut PrimaryMap<TypeResourceTableIndex, ResourceTable> {
&mut self.component_resource_tables
}
pub fn dtor_and_flags(
&self,
ty: TypeResourceTableIndex,
) -> (Option<NonNull<VMFuncRef>>, Option<InstanceFlags>) {
let resource = self.component_types()[ty].ty;
let dtor = self.resource_destructor(resource);
let component = self.component();
let flags = component.defined_resource_index(resource).map(|i| {
let instance = component.defined_resource_instances[i];
self.instance_flags(instance)
});
(dtor, flags)
}
pub(crate) fn resource_transfer_own(
&mut self,
idx: u32,
src: TypeResourceTableIndex,
dst: TypeResourceTableIndex,
) -> Result<u32> {
let mut tables = self.resource_tables();
let rep = tables.resource_lift_own(Some(src), idx)?;
tables.resource_lower_own(Some(dst), rep)
}
pub(crate) fn resource_transfer_borrow(
&mut self,
idx: u32,
src: TypeResourceTableIndex,
dst: TypeResourceTableIndex,
) -> Result<u32> {
let dst_owns_resource = self.resource_owned_by_own_instance(dst);
let mut tables = self.resource_tables();
let rep = tables.resource_lift_borrow(Some(src), idx)?;
if dst_owns_resource {
return Ok(rep);
}
tables.resource_lower_borrow(Some(dst), rep)
}
pub(crate) fn resource_enter_call(&mut self) {
self.resource_tables().enter_call()
}
pub(crate) fn resource_exit_call(&mut self) -> Result<()> {
self.resource_tables().exit_call()
}
}
impl VMComponentContext {
pub fn instance(&self) -> *mut ComponentInstance {
unsafe {
(self as *const Self as *mut u8)
.offset(-(offset_of!(ComponentInstance, vmctx) as isize))
as *mut ComponentInstance
}
}
}
pub struct OwnedComponentInstance {
ptr: SendSyncPtr<ComponentInstance>,
}
impl OwnedComponentInstance {
pub fn new(
runtime_info: Arc<dyn ComponentRuntimeInfo>,
resource_types: Arc<dyn Any + Send + Sync>,
store: *mut dyn Store,
) -> OwnedComponentInstance {
let component = runtime_info.component();
let offsets = VMComponentOffsets::new(HostPtr, component);
let layout = ComponentInstance::alloc_layout(&offsets);
unsafe {
let ptr = alloc::alloc_zeroed(layout) as *mut ComponentInstance;
let ptr = NonNull::new(ptr).unwrap();
ComponentInstance::new_at(
ptr,
layout.size(),
offsets,
runtime_info,
resource_types,
store,
);
let ptr = SendSyncPtr::new(ptr);
OwnedComponentInstance { ptr }
}
}
unsafe fn instance_mut(&mut self) -> &mut ComponentInstance {
&mut *self.ptr.as_ptr()
}
pub fn instance_ptr(&self) -> *mut ComponentInstance {
self.ptr.as_ptr()
}
pub fn set_runtime_memory(&mut self, idx: RuntimeMemoryIndex, ptr: *mut VMMemoryDefinition) {
unsafe { self.instance_mut().set_runtime_memory(idx, ptr) }
}
pub fn set_runtime_realloc(&mut self, idx: RuntimeReallocIndex, ptr: NonNull<VMFuncRef>) {
unsafe { self.instance_mut().set_runtime_realloc(idx, ptr) }
}
pub fn set_runtime_post_return(
&mut self,
idx: RuntimePostReturnIndex,
ptr: NonNull<VMFuncRef>,
) {
unsafe { self.instance_mut().set_runtime_post_return(idx, ptr) }
}
pub fn set_lowering(&mut self, idx: LoweredIndex, lowering: VMLowering) {
unsafe { self.instance_mut().set_lowering(idx, lowering) }
}
pub fn set_trampoline(
&mut self,
idx: TrampolineIndex,
wasm_call: NonNull<VMWasmCallFunction>,
native_call: NonNull<VMNativeCallFunction>,
array_call: VMArrayCallFunction,
type_index: VMSharedTypeIndex,
) {
unsafe {
self.instance_mut()
.set_trampoline(idx, wasm_call, native_call, array_call, type_index)
}
}
pub fn set_resource_destructor(
&mut self,
idx: ResourceIndex,
dtor: Option<NonNull<VMFuncRef>>,
) {
unsafe { self.instance_mut().set_resource_destructor(idx, dtor) }
}
pub fn resource_types_mut(&mut self) -> &mut Arc<dyn Any + Send + Sync> {
unsafe { &mut (*self.ptr.as_ptr()).resource_types }
}
}
impl Deref for OwnedComponentInstance {
type Target = ComponentInstance;
fn deref(&self) -> &ComponentInstance {
unsafe { &*self.ptr.as_ptr() }
}
}
impl Drop for OwnedComponentInstance {
fn drop(&mut self) {
let layout = ComponentInstance::alloc_layout(&self.offsets);
unsafe {
ptr::drop_in_place(self.ptr.as_ptr());
alloc::dealloc(self.ptr.as_ptr().cast(), layout);
}
}
}
impl VMComponentContext {
#[inline]
pub unsafe fn from_opaque(opaque: *mut VMOpaqueContext) -> *mut VMComponentContext {
debug_assert_eq!((*opaque).magic, VMCOMPONENT_MAGIC);
opaque.cast()
}
}
impl VMOpaqueContext {
#[inline]
pub fn from_vmcomponent(ptr: *mut VMComponentContext) -> *mut VMOpaqueContext {
ptr.cast()
}
}
#[allow(missing_docs)]
#[repr(transparent)]
#[derive(Copy, Clone)]
pub struct InstanceFlags(SendSyncPtr<VMGlobalDefinition>);
#[allow(missing_docs)]
impl InstanceFlags {
#[inline]
pub unsafe fn may_leave(&self) -> bool {
*(*self.as_raw()).as_i32() & FLAG_MAY_LEAVE != 0
}
#[inline]
pub unsafe fn set_may_leave(&mut self, val: bool) {
if val {
*(*self.as_raw()).as_i32_mut() |= FLAG_MAY_LEAVE;
} else {
*(*self.as_raw()).as_i32_mut() &= !FLAG_MAY_LEAVE;
}
}
#[inline]
pub unsafe fn may_enter(&self) -> bool {
*(*self.as_raw()).as_i32() & FLAG_MAY_ENTER != 0
}
#[inline]
pub unsafe fn set_may_enter(&mut self, val: bool) {
if val {
*(*self.as_raw()).as_i32_mut() |= FLAG_MAY_ENTER;
} else {
*(*self.as_raw()).as_i32_mut() &= !FLAG_MAY_ENTER;
}
}
#[inline]
pub unsafe fn needs_post_return(&self) -> bool {
*(*self.as_raw()).as_i32() & FLAG_NEEDS_POST_RETURN != 0
}
#[inline]
pub unsafe fn set_needs_post_return(&mut self, val: bool) {
if val {
*(*self.as_raw()).as_i32_mut() |= FLAG_NEEDS_POST_RETURN;
} else {
*(*self.as_raw()).as_i32_mut() &= !FLAG_NEEDS_POST_RETURN;
}
}
#[inline]
pub fn as_raw(&self) -> *mut VMGlobalDefinition {
self.0.as_ptr()
}
}
pub trait ComponentRuntimeInfo: Send + Sync + 'static {
fn component(&self) -> &Component;
fn component_types(&self) -> &Arc<ComponentTypes>;
fn realloc_func_type(&self) -> &Arc<dyn std::any::Any + Send + Sync>;
}