use crate::{
AnyfuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex,
GlobalIndex, MemoryIndex, Module, TableIndex,
};
use cranelift_entity::packed_option::ReservedValue;
use std::convert::TryFrom;
use wasmtime_types::OwnedMemoryIndex;
#[cfg(target_pointer_width = "32")]
fn cast_to_u32(sz: usize) -> u32 {
u32::try_from(sz).unwrap()
}
#[cfg(target_pointer_width = "64")]
fn cast_to_u32(sz: usize) -> u32 {
u32::try_from(sz).expect("overflow in cast from usize to u32")
}
#[inline]
fn align(offset: u32, width: u32) -> u32 {
(offset + (width - 1)) / width * width
}
#[derive(Debug, Clone, Copy)]
pub struct VMOffsets<P> {
pub ptr: P,
pub num_imported_functions: u32,
pub num_imported_tables: u32,
pub num_imported_memories: u32,
pub num_imported_globals: u32,
pub num_defined_tables: u32,
pub num_defined_memories: u32,
pub num_owned_memories: u32,
pub num_defined_globals: u32,
pub num_escaped_funcs: u32,
magic: u32,
runtime_limits: u32,
callee: u32,
epoch_ptr: u32,
externref_activations_table: u32,
store: u32,
builtin_functions: u32,
signature_ids: u32,
imported_functions: u32,
imported_tables: u32,
imported_memories: u32,
imported_globals: u32,
defined_tables: u32,
defined_memories: u32,
owned_memories: u32,
defined_globals: u32,
defined_anyfuncs: u32,
size: u32,
}
pub trait PtrSize {
fn size(&self) -> u8;
#[allow(clippy::erasing_op)]
#[inline]
fn vmcaller_checked_anyfunc_func_ptr(&self) -> u8 {
0 * self.size()
}
#[allow(clippy::identity_op)]
#[inline]
fn vmcaller_checked_anyfunc_type_index(&self) -> u8 {
1 * self.size()
}
#[inline]
fn vmcaller_checked_anyfunc_vmctx(&self) -> u8 {
2 * self.size()
}
#[inline]
fn size_of_vmcaller_checked_anyfunc(&self) -> u8 {
3 * self.size()
}
#[inline]
fn size_of_vmglobal_definition(&self) -> u8 {
16
}
#[inline]
fn vmruntime_limits_stack_limit(&self) -> u8 {
0
}
#[inline]
fn vmruntime_limits_fuel_consumed(&self) -> u8 {
self.size()
}
#[inline]
fn vmruntime_limits_epoch_deadline(&self) -> u8 {
self.vmruntime_limits_fuel_consumed() + 8 }
fn vmruntime_limits_last_wasm_exit_fp(&self) -> u8 {
self.vmruntime_limits_epoch_deadline() + 8
}
fn vmruntime_limits_last_wasm_exit_pc(&self) -> u8 {
self.vmruntime_limits_last_wasm_exit_fp() + self.size()
}
fn vmruntime_limits_last_wasm_entry_sp(&self) -> u8 {
self.vmruntime_limits_last_wasm_exit_pc() + self.size()
}
#[allow(clippy::erasing_op)]
#[inline]
fn vmmemory_definition_base(&self) -> u8 {
0 * self.size()
}
#[allow(clippy::identity_op)]
#[inline]
fn vmmemory_definition_current_length(&self) -> u8 {
1 * self.size()
}
#[inline]
fn size_of_vmmemory_definition(&self) -> u8 {
2 * self.size()
}
#[inline]
fn size_of_vmmemory_pointer(&self) -> u8 {
self.size()
}
}
pub struct HostPtr;
impl PtrSize for HostPtr {
#[inline]
fn size(&self) -> u8 {
std::mem::size_of::<usize>() as u8
}
}
impl PtrSize for u8 {
#[inline]
fn size(&self) -> u8 {
*self
}
}
#[derive(Debug, Clone, Copy)]
pub struct VMOffsetsFields<P> {
pub ptr: P,
pub num_imported_functions: u32,
pub num_imported_tables: u32,
pub num_imported_memories: u32,
pub num_imported_globals: u32,
pub num_defined_tables: u32,
pub num_defined_memories: u32,
pub num_owned_memories: u32,
pub num_defined_globals: u32,
pub num_escaped_funcs: u32,
}
impl<P: PtrSize> VMOffsets<P> {
pub fn new(ptr: P, module: &Module) -> Self {
let num_owned_memories = module
.memory_plans
.iter()
.skip(module.num_imported_memories)
.filter(|p| !p.1.memory.shared)
.count()
.try_into()
.unwrap();
VMOffsets::from(VMOffsetsFields {
ptr,
num_imported_functions: cast_to_u32(module.num_imported_funcs),
num_imported_tables: cast_to_u32(module.num_imported_tables),
num_imported_memories: cast_to_u32(module.num_imported_memories),
num_imported_globals: cast_to_u32(module.num_imported_globals),
num_defined_tables: cast_to_u32(module.table_plans.len() - module.num_imported_tables),
num_defined_memories: cast_to_u32(
module.memory_plans.len() - module.num_imported_memories,
),
num_owned_memories,
num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
})
}
#[inline]
pub fn pointer_size(&self) -> u8 {
self.ptr.size()
}
pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
macro_rules! calculate_sizes {
($($name:ident: $desc:tt,)*) => {{
let VMOffsets {
ptr: _,
num_imported_functions: _,
num_imported_tables: _,
num_imported_memories: _,
num_imported_globals: _,
num_defined_tables: _,
num_defined_globals: _,
num_defined_memories: _,
num_owned_memories: _,
num_escaped_funcs: _,
size,
$($name,)*
} = *self;
let mut last = size;
$(
assert!($name <= last);
let tmp = $name;
let $name = last - $name;
last = tmp;
)*
assert_eq!(last, 0);
IntoIterator::into_iter([$(($desc, $name),)*])
}};
}
calculate_sizes! {
defined_anyfuncs: "module functions",
defined_globals: "defined globals",
owned_memories: "owned memories",
defined_memories: "defined memories",
defined_tables: "defined tables",
imported_globals: "imported globals",
imported_memories: "imported memories",
imported_tables: "imported tables",
imported_functions: "imported functions",
signature_ids: "module types",
builtin_functions: "jit builtin functions state",
store: "jit store state",
externref_activations_table: "jit host externref state",
epoch_ptr: "jit current epoch state",
callee: "callee function pointer",
runtime_limits: "jit runtime limits state",
magic: "magic value",
}
}
}
impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
let mut ret = Self {
ptr: fields.ptr,
num_imported_functions: fields.num_imported_functions,
num_imported_tables: fields.num_imported_tables,
num_imported_memories: fields.num_imported_memories,
num_imported_globals: fields.num_imported_globals,
num_defined_tables: fields.num_defined_tables,
num_defined_memories: fields.num_defined_memories,
num_owned_memories: fields.num_owned_memories,
num_defined_globals: fields.num_defined_globals,
num_escaped_funcs: fields.num_escaped_funcs,
magic: 0,
runtime_limits: 0,
callee: 0,
epoch_ptr: 0,
externref_activations_table: 0,
store: 0,
builtin_functions: 0,
signature_ids: 0,
imported_functions: 0,
imported_tables: 0,
imported_memories: 0,
imported_globals: 0,
defined_tables: 0,
defined_memories: 0,
owned_memories: 0,
defined_globals: 0,
defined_anyfuncs: 0,
size: 0,
};
#[inline]
fn cadd(count: u32, size: u32) -> u32 {
count.checked_add(size).unwrap()
}
#[inline]
fn cmul(count: u32, size: u8) -> u32 {
count.checked_mul(u32::from(size)).unwrap()
}
let mut next_field_offset = 0;
macro_rules! fields {
(size($field:ident) = $size:expr, $($rest:tt)*) => {
ret.$field = next_field_offset;
next_field_offset = cadd(next_field_offset, u32::from($size));
fields!($($rest)*);
};
(align($align:expr), $($rest:tt)*) => {
next_field_offset = align(next_field_offset, $align);
fields!($($rest)*);
};
() => {};
}
fields! {
size(magic) = 4u32,
align(u32::from(ret.ptr.size())),
size(runtime_limits) = ret.ptr.size(),
size(callee) = ret.ptr.size(),
size(epoch_ptr) = ret.ptr.size(),
size(externref_activations_table) = ret.ptr.size(),
size(store) = ret.ptr.size() * 2,
size(builtin_functions) = ret.pointer_size(),
size(signature_ids) = ret.ptr.size(),
size(imported_functions)
= cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
size(imported_tables)
= cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
size(imported_memories)
= cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
size(imported_globals)
= cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
size(defined_tables)
= cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
size(defined_memories)
= cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
size(owned_memories)
= cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
align(16),
size(defined_globals)
= cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
size(defined_anyfuncs) = cmul(
ret.num_escaped_funcs,
ret.ptr.size_of_vmcaller_checked_anyfunc(),
),
}
ret.size = next_field_offset;
assert_eq!(ret.magic, 0);
return ret;
}
}
impl<P: PtrSize> VMOffsets<P> {
#[allow(clippy::erasing_op)]
#[inline]
pub fn vmfunction_import_body(&self) -> u8 {
0 * self.pointer_size()
}
#[allow(clippy::identity_op)]
#[inline]
pub fn vmfunction_import_vmctx(&self) -> u8 {
1 * self.pointer_size()
}
#[inline]
pub fn size_of_vmfunction_import(&self) -> u8 {
2 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[allow(clippy::identity_op)]
pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
1 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[allow(clippy::erasing_op)]
#[inline]
pub fn vmtable_import_from(&self) -> u8 {
0 * self.pointer_size()
}
#[allow(clippy::identity_op)]
#[inline]
pub fn vmtable_import_vmctx(&self) -> u8 {
1 * self.pointer_size()
}
#[inline]
pub fn size_of_vmtable_import(&self) -> u8 {
2 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[allow(clippy::erasing_op)]
#[inline]
pub fn vmtable_definition_base(&self) -> u8 {
0 * self.pointer_size()
}
#[allow(clippy::identity_op)]
pub fn vmtable_definition_current_elements(&self) -> u8 {
1 * self.pointer_size()
}
#[inline]
pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
4
}
#[inline]
pub fn size_of_vmtable_definition(&self) -> u8 {
2 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[allow(clippy::erasing_op)]
#[inline]
pub fn vmmemory_import_from(&self) -> u8 {
0 * self.pointer_size()
}
#[allow(clippy::identity_op)]
#[inline]
pub fn vmmemory_import_vmctx(&self) -> u8 {
1 * self.pointer_size()
}
#[inline]
pub fn size_of_vmmemory_import(&self) -> u8 {
3 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[allow(clippy::erasing_op)]
#[inline]
pub fn vmglobal_import_from(&self) -> u8 {
0 * self.pointer_size()
}
#[allow(clippy::identity_op)]
#[inline]
pub fn size_of_vmglobal_import(&self) -> u8 {
1 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn size_of_vmshared_signature_index(&self) -> u8 {
4
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vmctx_magic(&self) -> u32 {
self.magic
}
#[inline]
pub fn vmctx_runtime_limits(&self) -> u32 {
self.runtime_limits
}
pub fn vmctx_callee(&self) -> u32 {
self.callee
}
#[inline]
pub fn vmctx_epoch_ptr(&self) -> u32 {
self.epoch_ptr
}
#[inline]
pub fn vmctx_externref_activations_table(&self) -> u32 {
self.externref_activations_table
}
#[inline]
pub fn vmctx_store(&self) -> u32 {
self.store
}
#[inline]
pub fn vmctx_signature_ids_array(&self) -> u32 {
self.signature_ids
}
#[allow(clippy::erasing_op)]
#[inline]
pub fn vmctx_imported_functions_begin(&self) -> u32 {
self.imported_functions
}
#[allow(clippy::identity_op)]
#[inline]
pub fn vmctx_imported_tables_begin(&self) -> u32 {
self.imported_tables
}
#[inline]
pub fn vmctx_imported_memories_begin(&self) -> u32 {
self.imported_memories
}
#[inline]
pub fn vmctx_imported_globals_begin(&self) -> u32 {
self.imported_globals
}
#[inline]
pub fn vmctx_tables_begin(&self) -> u32 {
self.defined_tables
}
#[inline]
pub fn vmctx_memories_begin(&self) -> u32 {
self.defined_memories
}
#[inline]
pub fn vmctx_owned_memories_begin(&self) -> u32 {
self.owned_memories
}
#[inline]
pub fn vmctx_globals_begin(&self) -> u32 {
self.defined_globals
}
#[inline]
pub fn vmctx_anyfuncs_begin(&self) -> u32 {
self.defined_anyfuncs
}
#[inline]
pub fn vmctx_builtin_functions(&self) -> u32 {
self.builtin_functions
}
#[inline]
pub fn size_of_vmctx(&self) -> u32 {
self.size
}
#[inline]
pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
assert!(index.as_u32() < self.num_imported_functions);
self.vmctx_imported_functions_begin()
+ index.as_u32() * u32::from(self.size_of_vmfunction_import())
}
#[inline]
pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
assert!(index.as_u32() < self.num_imported_tables);
self.vmctx_imported_tables_begin()
+ index.as_u32() * u32::from(self.size_of_vmtable_import())
}
#[inline]
pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
assert!(index.as_u32() < self.num_imported_memories);
self.vmctx_imported_memories_begin()
+ index.as_u32() * u32::from(self.size_of_vmmemory_import())
}
#[inline]
pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
assert!(index.as_u32() < self.num_imported_globals);
self.vmctx_imported_globals_begin()
+ index.as_u32() * u32::from(self.size_of_vmglobal_import())
}
#[inline]
pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
assert!(index.as_u32() < self.num_defined_tables);
self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
}
#[inline]
pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
assert!(index.as_u32() < self.num_defined_memories);
self.vmctx_memories_begin()
+ index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
}
#[inline]
pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
assert!(index.as_u32() < self.num_owned_memories);
self.vmctx_owned_memories_begin()
+ index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
}
#[inline]
pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
assert!(index.as_u32() < self.num_defined_globals);
self.vmctx_globals_begin()
+ index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
}
#[inline]
pub fn vmctx_anyfunc(&self, index: AnyfuncIndex) -> u32 {
assert!(!index.is_reserved_value());
assert!(index.as_u32() < self.num_escaped_funcs);
self.vmctx_anyfuncs_begin()
+ index.as_u32() * u32::from(self.ptr.size_of_vmcaller_checked_anyfunc())
}
#[inline]
pub fn vmctx_vmfunction_import_body(&self, index: FuncIndex) -> u32 {
self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_body())
}
#[inline]
pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
}
#[inline]
pub fn vmctx_vmtable_import_from(&self, index: TableIndex) -> u32 {
self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
}
#[inline]
pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
}
#[inline]
pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
}
#[inline]
pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
}
#[inline]
pub fn vmctx_vmmemory_import_vmctx(&self, index: MemoryIndex) -> u32 {
self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_vmctx())
}
#[inline]
pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
}
#[inline]
pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
self.vmctx_vmmemory_definition(index)
+ u32::from(self.ptr.vmmemory_definition_current_length())
}
#[inline]
pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vm_extern_data_ref_count(&self) -> u32 {
0
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vm_extern_ref_activation_table_next(&self) -> u32 {
0
}
#[inline]
pub fn vm_extern_ref_activation_table_end(&self) -> u32 {
self.pointer_size().into()
}
}
pub const VM_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"host");
#[cfg(test)]
mod tests {
use crate::vmoffsets::align;
#[test]
fn alignment() {
fn is_aligned(x: u32) -> bool {
x % 16 == 0
}
assert!(is_aligned(align(0, 16)));
assert!(is_aligned(align(32, 16)));
assert!(is_aligned(align(33, 16)));
assert!(is_aligned(align(31, 16)));
}
}