use crate::PtrSize;
use crate::component::*;
pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
#[derive(Debug, Clone, Copy)]
pub struct VMComponentOffsets<P> {
pub ptr: P,
pub num_lowerings: u32,
pub num_runtime_memories: u32,
pub num_runtime_tables: u32,
pub num_runtime_reallocs: u32,
pub num_runtime_callbacks: u32,
pub num_runtime_post_returns: u32,
pub num_runtime_component_instances: u32,
pub num_trampolines: u32,
pub num_unsafe_intrinsics: u32,
pub num_resources: u32,
magic: u32,
builtins: u32,
vm_store_context: u32,
flags: u32,
task_may_block: u32,
trampoline_func_refs: u32,
intrinsic_func_refs: u32,
lowerings: u32,
memories: u32,
tables: u32,
reallocs: u32,
callbacks: u32,
post_returns: u32,
resource_destructors: u32,
size: u32,
}
#[inline]
fn align(offset: u32, align: u32) -> u32 {
assert!(align.is_power_of_two());
(offset + (align - 1)) & !(align - 1)
}
impl<P: PtrSize> VMComponentOffsets<P> {
pub fn new(ptr: P, component: &Component) -> Self {
let mut ret = Self {
ptr,
num_lowerings: component.num_lowerings,
num_runtime_memories: component.num_runtime_memories,
num_runtime_tables: component.num_runtime_tables,
num_runtime_reallocs: component.num_runtime_reallocs,
num_runtime_callbacks: component.num_runtime_callbacks,
num_runtime_post_returns: component.num_runtime_post_returns,
num_runtime_component_instances: component.num_runtime_component_instances,
num_trampolines: component.trampolines.len().try_into().unwrap(),
num_unsafe_intrinsics: if let Some(i) = component
.unsafe_intrinsics
.iter()
.rposition(|x| x.is_some())
{
u32::try_from(i + 1).unwrap()
} else {
0
},
num_resources: component.num_resources,
magic: 0,
builtins: 0,
vm_store_context: 0,
flags: 0,
task_may_block: 0,
trampoline_func_refs: 0,
intrinsic_func_refs: 0,
lowerings: 0,
memories: 0,
tables: 0,
reallocs: 0,
callbacks: 0,
post_returns: 0,
resource_destructors: 0,
size: 0,
};
#[inline]
fn cmul(count: u32, size: u8) -> u32 {
count.checked_mul(u32::from(size)).unwrap()
}
let mut next_field_offset = 0;
macro_rules! fields {
(size($field:ident) = $size:expr, $($rest:tt)*) => {
ret.$field = next_field_offset;
next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
fields!($($rest)*);
};
(align($align:expr), $($rest:tt)*) => {
next_field_offset = align(next_field_offset, $align);
fields!($($rest)*);
};
() => {};
}
fields! {
size(magic) = 4u32,
align(u32::from(ret.ptr.size())),
size(builtins) = ret.ptr.size(),
size(vm_store_context) = ret.ptr.size(),
align(16),
size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
size(task_may_block) = ret.ptr.size_of_vmglobal_definition(),
align(u32::from(ret.ptr.size())),
size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
size(intrinsic_func_refs) = cmul(ret.num_unsafe_intrinsics, ret.ptr.size_of_vm_func_ref()),
size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),
size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
}
ret.size = next_field_offset;
assert_eq!(ret.magic, 0);
return ret;
}
#[inline]
pub fn pointer_size(&self) -> u8 {
self.ptr.size()
}
#[inline]
pub fn magic(&self) -> u32 {
self.magic
}
#[inline]
pub fn builtins(&self) -> u32 {
self.builtins
}
#[inline]
pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
assert!(index.as_u32() < self.num_runtime_component_instances);
self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
}
pub fn task_may_block(&self) -> u32 {
self.task_may_block
}
#[inline]
pub fn vm_store_context(&self) -> u32 {
self.vm_store_context
}
#[inline]
pub fn trampoline_func_refs(&self) -> u32 {
self.trampoline_func_refs
}
#[inline]
pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
assert!(index.as_u32() < self.num_trampolines);
self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
}
#[inline]
pub fn unsafe_intrinsic_func_refs(&self) -> u32 {
self.intrinsic_func_refs
}
#[inline]
pub fn unsafe_intrinsic_func_ref(&self, intrinsic: UnsafeIntrinsic) -> u32 {
assert!(intrinsic.index() < self.num_unsafe_intrinsics);
self.unsafe_intrinsic_func_refs()
+ intrinsic.index() * u32::from(self.ptr.size_of_vm_func_ref())
}
#[inline]
pub fn lowerings(&self) -> u32 {
self.lowerings
}
#[inline]
pub fn lowering(&self, index: LoweredIndex) -> u32 {
assert!(index.as_u32() < self.num_lowerings);
self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
}
#[inline]
pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
self.lowering(index) + self.lowering_callee_offset()
}
#[inline]
pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
self.lowering(index) + self.lowering_data_offset()
}
#[inline]
pub fn lowering_size(&self) -> u8 {
2 * self.ptr.size()
}
#[inline]
pub fn lowering_callee_offset(&self) -> u32 {
0
}
#[inline]
pub fn lowering_data_offset(&self) -> u32 {
u32::from(self.ptr.size())
}
#[inline]
pub fn runtime_memories(&self) -> u32 {
self.memories
}
#[inline]
pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
assert!(index.as_u32() < self.num_runtime_memories);
self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
}
#[inline]
pub fn runtime_tables(&self) -> u32 {
self.tables
}
#[inline]
pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
assert!(index.as_u32() < self.num_runtime_tables);
self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())
}
#[inline]
pub fn size_of_vmtable_import(&self) -> u8 {
3 * self.pointer_size()
}
#[inline]
pub fn runtime_reallocs(&self) -> u32 {
self.reallocs
}
#[inline]
pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
assert!(index.as_u32() < self.num_runtime_reallocs);
self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
}
#[inline]
pub fn runtime_callbacks(&self) -> u32 {
self.callbacks
}
#[inline]
pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
assert!(index.as_u32() < self.num_runtime_callbacks);
self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
}
#[inline]
pub fn runtime_post_returns(&self) -> u32 {
self.post_returns
}
#[inline]
pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
assert!(index.as_u32() < self.num_runtime_post_returns);
self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
}
#[inline]
pub fn resource_destructors(&self) -> u32 {
self.resource_destructors
}
#[inline]
pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
assert!(index.as_u32() < self.num_resources);
self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
}
#[inline]
pub fn size_of_vmctx(&self) -> u32 {
self.size
}
}