use crate::{
DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
};
use cranelift_entity::packed_option::ReservedValue;
#[cfg(target_pointer_width = "32")]
fn cast_to_u32(sz: usize) -> u32 {
u32::try_from(sz).unwrap()
}
#[cfg(target_pointer_width = "64")]
fn cast_to_u32(sz: usize) -> u32 {
u32::try_from(sz).expect("overflow in cast from usize to u32")
}
#[inline]
fn align(offset: u32, width: u32) -> u32 {
(offset + (width - 1)) / width * width
}
#[derive(Debug, Clone, Copy)]
pub struct VMOffsets<P> {
pub ptr: P,
pub num_imported_functions: u32,
pub num_imported_tables: u32,
pub num_imported_memories: u32,
pub num_imported_globals: u32,
pub num_imported_tags: u32,
pub num_defined_tables: u32,
pub num_defined_memories: u32,
pub num_owned_memories: u32,
pub num_defined_globals: u32,
pub num_defined_tags: u32,
pub num_escaped_funcs: u32,
imported_functions: u32,
imported_tables: u32,
imported_memories: u32,
imported_globals: u32,
imported_tags: u32,
defined_tables: u32,
defined_memories: u32,
owned_memories: u32,
defined_globals: u32,
defined_tags: u32,
defined_func_refs: u32,
size: u32,
}
pub trait PtrSize {
fn size(&self) -> u8;
fn vmcontext_store_context(&self) -> u8 {
u8::try_from(align(
u32::try_from(core::mem::size_of::<u32>()).unwrap(),
u32::from(self.size()),
))
.unwrap()
}
fn vmcontext_builtin_functions(&self) -> u8 {
self.vmcontext_store_context() + self.size()
}
#[inline]
fn vm_func_ref_array_call(&self) -> u8 {
0 * self.size()
}
#[inline]
fn vm_func_ref_wasm_call(&self) -> u8 {
1 * self.size()
}
#[inline]
fn vm_func_ref_type_index(&self) -> u8 {
2 * self.size()
}
#[inline]
fn vm_func_ref_vmctx(&self) -> u8 {
3 * self.size()
}
#[inline]
fn size_of_vm_func_ref(&self) -> u8 {
4 * self.size()
}
#[inline]
fn size_of_vmglobal_definition(&self) -> u8 {
16
}
#[inline]
fn size_of_vmtag_definition(&self) -> u8 {
4
}
#[inline]
fn maximum_value_size(&self) -> u8 {
self.size_of_vmglobal_definition()
}
#[inline]
fn vmstore_context_fuel_consumed(&self) -> u8 {
0
}
#[inline]
fn vmstore_context_epoch_deadline(&self) -> u8 {
self.vmstore_context_fuel_consumed() + 8
}
#[inline]
fn vmstore_context_stack_limit(&self) -> u8 {
self.vmstore_context_epoch_deadline() + 8
}
#[inline]
fn vmstore_context_gc_heap(&self) -> u8 {
self.vmstore_context_stack_limit() + self.size()
}
fn vmstore_context_gc_heap_base(&self) -> u8 {
let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_base();
debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
offset
}
fn vmstore_context_gc_heap_current_length(&self) -> u8 {
let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_current_length();
debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
offset
}
fn vmstore_context_last_wasm_exit_trampoline_fp(&self) -> u8 {
self.vmstore_context_gc_heap() + self.size_of_vmmemory_definition()
}
fn vmstore_context_last_wasm_exit_pc(&self) -> u8 {
self.vmstore_context_last_wasm_exit_trampoline_fp() + self.size()
}
fn vmstore_context_last_wasm_entry_sp(&self) -> u8 {
self.vmstore_context_last_wasm_exit_pc() + self.size()
}
fn vmstore_context_last_wasm_entry_fp(&self) -> u8 {
self.vmstore_context_last_wasm_entry_sp() + self.size()
}
fn vmstore_context_last_wasm_entry_trap_handler(&self) -> u8 {
self.vmstore_context_last_wasm_entry_fp() + self.size()
}
fn vmstore_context_stack_chain(&self) -> u8 {
self.vmstore_context_last_wasm_entry_trap_handler() + self.size()
}
fn vmstore_context_store_data(&self) -> u8 {
self.vmstore_context_stack_chain() + self.size_of_vmstack_chain()
}
#[inline]
fn vmmemory_definition_base(&self) -> u8 {
0 * self.size()
}
#[inline]
fn vmmemory_definition_current_length(&self) -> u8 {
1 * self.size()
}
#[inline]
fn size_of_vmmemory_definition(&self) -> u8 {
2 * self.size()
}
#[inline]
fn size_of_vmmemory_pointer(&self) -> u8 {
self.size()
}
fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
u8::try_from(align(
u32::try_from(core::mem::size_of::<u32>()).unwrap(),
u32::from(self.size()),
))
.unwrap()
}
fn size_of_vmstack_chain(&self) -> u8 {
2 * self.size()
}
fn vmstack_limits_stack_limit(&self) -> u8 {
0
}
fn vmstack_limits_last_wasm_entry_fp(&self) -> u8 {
self.size()
}
fn vmhostarray_length(&self) -> u8 {
0
}
fn vmhostarray_capacity(&self) -> u8 {
4
}
fn vmhostarray_data(&self) -> u8 {
8
}
fn size_of_vmhostarray(&self) -> u8 {
8 + self.size()
}
fn vmcommon_stack_information_limits(&self) -> u8 {
0 * self.size()
}
fn vmcommon_stack_information_state(&self) -> u8 {
2 * self.size()
}
fn vmcommon_stack_information_handlers(&self) -> u8 {
u8::try_from(align(
self.vmcommon_stack_information_state() as u32 + 4,
u32::from(self.size()),
))
.unwrap()
}
fn vmcommon_stack_information_first_switch_handler_index(&self) -> u8 {
self.vmcommon_stack_information_handlers() + self.size_of_vmhostarray()
}
fn size_of_vmcommon_stack_information(&self) -> u8 {
u8::try_from(align(
self.vmcommon_stack_information_first_switch_handler_index() as u32 + 4,
u32::from(self.size()),
))
.unwrap()
}
fn vmcontobj_contref(&self) -> u8 {
0
}
fn vmcontobj_revision(&self) -> u8 {
self.size()
}
fn size_of_vmcontobj(&self) -> u8 {
u8::try_from(align(
u32::from(self.vmcontobj_revision())
+ u32::try_from(core::mem::size_of::<usize>()).unwrap(),
u32::from(self.size()),
))
.unwrap()
}
fn vmcontref_common_stack_information(&self) -> u8 {
0 * self.size()
}
fn vmcontref_parent_chain(&self) -> u8 {
u8::try_from(align(
(self.vmcontref_common_stack_information() + self.size_of_vmcommon_stack_information())
as u32,
u32::from(self.size()),
))
.unwrap()
}
fn vmcontref_last_ancestor(&self) -> u8 {
self.vmcontref_parent_chain() + 2 * self.size()
}
fn vmcontref_revision(&self) -> u8 {
self.vmcontref_last_ancestor() + self.size()
}
fn vmcontref_stack(&self) -> u8 {
self.vmcontref_revision() + self.size()
}
fn vmcontref_args(&self) -> u8 {
self.vmcontref_stack() + 3 * self.size()
}
fn vmcontref_values(&self) -> u8 {
self.vmcontref_args() + self.size_of_vmhostarray()
}
#[inline]
fn vmctx_magic(&self) -> u8 {
0
}
#[inline]
fn vmctx_store_context(&self) -> u8 {
self.vmctx_magic() + self.size()
}
#[inline]
fn vmctx_builtin_functions(&self) -> u8 {
self.vmctx_store_context() + self.size()
}
#[inline]
fn vmctx_epoch_ptr(&self) -> u8 {
self.vmctx_builtin_functions() + self.size()
}
#[inline]
fn vmctx_gc_heap_data(&self) -> u8 {
self.vmctx_epoch_ptr() + self.size()
}
#[inline]
fn vmctx_type_ids_array(&self) -> u8 {
self.vmctx_gc_heap_data() + self.size()
}
#[inline]
fn vmctx_dynamic_data_start(&self) -> u8 {
self.vmctx_type_ids_array() + self.size()
}
}
#[derive(Clone, Copy)]
pub struct HostPtr;
impl PtrSize for HostPtr {
#[inline]
fn size(&self) -> u8 {
core::mem::size_of::<usize>() as u8
}
}
impl PtrSize for u8 {
#[inline]
fn size(&self) -> u8 {
*self
}
}
#[derive(Debug, Clone, Copy)]
pub struct VMOffsetsFields<P> {
pub ptr: P,
pub num_imported_functions: u32,
pub num_imported_tables: u32,
pub num_imported_memories: u32,
pub num_imported_globals: u32,
pub num_imported_tags: u32,
pub num_defined_tables: u32,
pub num_defined_memories: u32,
pub num_owned_memories: u32,
pub num_defined_globals: u32,
pub num_defined_tags: u32,
pub num_escaped_funcs: u32,
}
impl<P: PtrSize> VMOffsets<P> {
pub fn new(ptr: P, module: &Module) -> Self {
let num_owned_memories = module
.memories
.iter()
.skip(module.num_imported_memories)
.filter(|p| !p.1.shared)
.count()
.try_into()
.unwrap();
VMOffsets::from(VMOffsetsFields {
ptr,
num_imported_functions: cast_to_u32(module.num_imported_funcs),
num_imported_tables: cast_to_u32(module.num_imported_tables),
num_imported_memories: cast_to_u32(module.num_imported_memories),
num_imported_globals: cast_to_u32(module.num_imported_globals),
num_imported_tags: cast_to_u32(module.num_imported_tags),
num_defined_tables: cast_to_u32(module.num_defined_tables()),
num_defined_memories: cast_to_u32(module.num_defined_memories()),
num_owned_memories,
num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
})
}
#[inline]
pub fn pointer_size(&self) -> u8 {
self.ptr.size()
}
pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
macro_rules! calculate_sizes {
($($name:ident: $desc:tt,)*) => {{
let VMOffsets {
ptr: _,
num_imported_functions: _,
num_imported_tables: _,
num_imported_memories: _,
num_imported_globals: _,
num_imported_tags: _,
num_defined_tables: _,
num_defined_globals: _,
num_defined_memories: _,
num_defined_tags: _,
num_owned_memories: _,
num_escaped_funcs: _,
size,
$($name,)*
} = *self;
let mut last = size;
$(
assert!($name <= last);
let tmp = $name;
let $name = last - $name;
last = tmp;
)*
assert_ne!(last, 0);
IntoIterator::into_iter([
$(($desc, $name),)*
("static vmctx data", last),
])
}};
}
calculate_sizes! {
defined_func_refs: "module functions",
defined_tags: "defined tags",
defined_globals: "defined globals",
defined_tables: "defined tables",
imported_tags: "imported tags",
imported_globals: "imported globals",
imported_tables: "imported tables",
imported_functions: "imported functions",
owned_memories: "owned memories",
defined_memories: "defined memories",
imported_memories: "imported memories",
}
}
}
impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
let mut ret = Self {
ptr: fields.ptr,
num_imported_functions: fields.num_imported_functions,
num_imported_tables: fields.num_imported_tables,
num_imported_memories: fields.num_imported_memories,
num_imported_globals: fields.num_imported_globals,
num_imported_tags: fields.num_imported_tags,
num_defined_tables: fields.num_defined_tables,
num_defined_memories: fields.num_defined_memories,
num_owned_memories: fields.num_owned_memories,
num_defined_globals: fields.num_defined_globals,
num_defined_tags: fields.num_defined_tags,
num_escaped_funcs: fields.num_escaped_funcs,
imported_functions: 0,
imported_tables: 0,
imported_memories: 0,
imported_globals: 0,
imported_tags: 0,
defined_tables: 0,
defined_memories: 0,
owned_memories: 0,
defined_globals: 0,
defined_tags: 0,
defined_func_refs: 0,
size: 0,
};
#[inline]
fn cadd(count: u32, size: u32) -> u32 {
count.checked_add(size).unwrap()
}
#[inline]
fn cmul(count: u32, size: u8) -> u32 {
count.checked_mul(u32::from(size)).unwrap()
}
let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
macro_rules! fields {
(size($field:ident) = $size:expr, $($rest:tt)*) => {
ret.$field = next_field_offset;
next_field_offset = cadd(next_field_offset, u32::from($size));
fields!($($rest)*);
};
(align($align:expr), $($rest:tt)*) => {
next_field_offset = align(next_field_offset, $align);
fields!($($rest)*);
};
() => {};
}
fields! {
size(imported_memories)
= cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
size(defined_memories)
= cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
size(owned_memories)
= cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
size(imported_functions)
= cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
size(imported_tables)
= cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
size(imported_globals)
= cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
size(imported_tags)
= cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
size(defined_tables)
= cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
align(16),
size(defined_globals)
= cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
size(defined_tags)
= cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
size(defined_func_refs) = cmul(
ret.num_escaped_funcs,
ret.ptr.size_of_vm_func_ref(),
),
}
ret.size = next_field_offset;
return ret;
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vmfunction_import_wasm_call(&self) -> u8 {
0 * self.pointer_size()
}
#[inline]
pub fn vmfunction_import_array_call(&self) -> u8 {
1 * self.pointer_size()
}
#[inline]
pub fn vmfunction_import_vmctx(&self) -> u8 {
2 * self.pointer_size()
}
#[inline]
pub fn size_of_vmfunction_import(&self) -> u8 {
3 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
1 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vmtable_import_from(&self) -> u8 {
0 * self.pointer_size()
}
#[inline]
pub fn vmtable_import_vmctx(&self) -> u8 {
1 * self.pointer_size()
}
#[inline]
pub fn vmtable_import_index(&self) -> u8 {
2 * self.pointer_size()
}
#[inline]
pub fn size_of_vmtable_import(&self) -> u8 {
3 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vmtable_definition_base(&self) -> u8 {
0 * self.pointer_size()
}
pub fn vmtable_definition_current_elements(&self) -> u8 {
1 * self.pointer_size()
}
#[inline]
pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
self.pointer_size()
}
#[inline]
pub fn size_of_vmtable_definition(&self) -> u8 {
2 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vmmemory_import_from(&self) -> u8 {
0 * self.pointer_size()
}
#[inline]
pub fn vmmemory_import_vmctx(&self) -> u8 {
1 * self.pointer_size()
}
#[inline]
pub fn vmmemory_import_index(&self) -> u8 {
2 * self.pointer_size()
}
#[inline]
pub fn size_of_vmmemory_import(&self) -> u8 {
3 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vmglobal_import_from(&self) -> u8 {
0 * self.pointer_size()
}
#[inline]
pub fn size_of_vmglobal_import(&self) -> u8 {
2 * self.pointer_size() + 8
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn size_of_vmshared_type_index(&self) -> u8 {
4
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vmtag_import_from(&self) -> u8 {
0 * self.pointer_size()
}
#[inline]
pub fn vmtag_import_vmctx(&self) -> u8 {
1 * self.pointer_size()
}
#[inline]
pub fn vmtag_import_index(&self) -> u8 {
2 * self.pointer_size()
}
#[inline]
pub fn size_of_vmtag_import(&self) -> u8 {
3 * self.pointer_size()
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vmctx_imported_functions_begin(&self) -> u32 {
self.imported_functions
}
#[inline]
pub fn vmctx_imported_tables_begin(&self) -> u32 {
self.imported_tables
}
#[inline]
pub fn vmctx_imported_memories_begin(&self) -> u32 {
self.imported_memories
}
#[inline]
pub fn vmctx_imported_globals_begin(&self) -> u32 {
self.imported_globals
}
#[inline]
pub fn vmctx_imported_tags_begin(&self) -> u32 {
self.imported_tags
}
#[inline]
pub fn vmctx_tables_begin(&self) -> u32 {
self.defined_tables
}
#[inline]
pub fn vmctx_memories_begin(&self) -> u32 {
self.defined_memories
}
#[inline]
pub fn vmctx_owned_memories_begin(&self) -> u32 {
self.owned_memories
}
#[inline]
pub fn vmctx_globals_begin(&self) -> u32 {
self.defined_globals
}
#[inline]
pub fn vmctx_tags_begin(&self) -> u32 {
self.defined_tags
}
#[inline]
pub fn vmctx_func_refs_begin(&self) -> u32 {
self.defined_func_refs
}
#[inline]
pub fn size_of_vmctx(&self) -> u32 {
self.size
}
#[inline]
pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
assert!(index.as_u32() < self.num_imported_functions);
self.vmctx_imported_functions_begin()
+ index.as_u32() * u32::from(self.size_of_vmfunction_import())
}
#[inline]
pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
assert!(index.as_u32() < self.num_imported_tables);
self.vmctx_imported_tables_begin()
+ index.as_u32() * u32::from(self.size_of_vmtable_import())
}
#[inline]
pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
assert!(index.as_u32() < self.num_imported_memories);
self.vmctx_imported_memories_begin()
+ index.as_u32() * u32::from(self.size_of_vmmemory_import())
}
#[inline]
pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
assert!(index.as_u32() < self.num_imported_globals);
self.vmctx_imported_globals_begin()
+ index.as_u32() * u32::from(self.size_of_vmglobal_import())
}
#[inline]
pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
assert!(index.as_u32() < self.num_imported_tags);
self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
}
#[inline]
pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
assert!(index.as_u32() < self.num_defined_tables);
self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
}
#[inline]
pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
assert!(index.as_u32() < self.num_defined_memories);
self.vmctx_memories_begin()
+ index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
}
#[inline]
pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
assert!(index.as_u32() < self.num_owned_memories);
self.vmctx_owned_memories_begin()
+ index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
}
#[inline]
pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
assert!(index.as_u32() < self.num_defined_globals);
self.vmctx_globals_begin()
+ index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
}
#[inline]
pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
assert!(index.as_u32() < self.num_defined_tags);
self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
}
#[inline]
pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
assert!(!index.is_reserved_value());
assert!(index.as_u32() < self.num_escaped_funcs);
self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
}
#[inline]
pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
}
#[inline]
pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
}
#[inline]
pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
}
#[inline]
pub fn vmctx_vmtable_from(&self, index: TableIndex) -> u32 {
self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
}
#[inline]
pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
}
#[inline]
pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
}
#[inline]
pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
}
#[inline]
pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
}
#[inline]
pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
self.vmctx_vmmemory_definition(index)
+ u32::from(self.ptr.vmmemory_definition_current_length())
}
#[inline]
pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
}
#[inline]
pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
}
#[inline]
pub fn vmctx_vmtag_import_vmctx(&self, index: TagIndex) -> u32 {
self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_vmctx())
}
#[inline]
pub fn vmctx_vmtag_import_index(&self, index: TagIndex) -> u32 {
self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_index())
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vm_gc_header_kind(&self) -> u32 {
0
}
#[inline]
pub fn vm_gc_header_reserved_bits(&self) -> u32 {
self.vm_gc_header_kind()
}
#[inline]
pub fn vm_gc_header_ty(&self) -> u32 {
self.vm_gc_header_kind() + 4
}
}
impl<P: PtrSize> VMOffsets<P> {
#[inline]
pub fn vm_drc_header_ref_count(&self) -> u32 {
8
}
#[inline]
pub fn vm_drc_header_next_over_approximated_stack_root(&self) -> u32 {
self.vm_drc_header_ref_count() + 8
}
}
pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
#[cfg(test)]
mod tests {
use crate::vmoffsets::align;
#[test]
fn alignment() {
fn is_aligned(x: u32) -> bool {
x % 16 == 0
}
assert!(is_aligned(align(0, 16)));
assert!(is_aligned(align(32, 16)));
assert!(is_aligned(align(33, 16)));
assert!(is_aligned(align(31, 16)));
}
}