use crate::Definition;
use crate::module::ModuleRegistry;
use crate::prelude::*;
use crate::runtime::HostFunc;
use crate::runtime::vm::{AlwaysMut, SendSyncPtr, VMArrayCallHostFuncContext, VMFuncRef};
use alloc::sync::Arc;
use core::mem::size_of;
use core::ptr::NonNull;
#[derive(Default)]
pub struct FuncRefs {
bump: AlwaysMut<bumpalo::Bump>,
with_holes: TryVec<SendSyncPtr<VMFuncRef>>,
storage: TryVec<Storage>,
}
enum Storage {
InstancePreDefinitions {
#[expect(dead_code, reason = "only here to keep the original value alive")]
defs: Arc<TryVec<Definition>>,
},
InstancePreFuncRefs {
#[expect(dead_code, reason = "only here to keep the original value alive")]
funcs: Arc<TryVec<VMFuncRef>>,
},
BoxHost {
#[expect(dead_code, reason = "only here to keep the original value alive")]
func: Box<HostFunc>,
},
ArcHost {
#[expect(dead_code, reason = "only here to keep the original value alive")]
func: Arc<HostFunc>,
},
}
impl FuncRefs {
pub unsafe fn push(
&mut self,
func_ref: VMFuncRef,
modules: &ModuleRegistry,
) -> Result<NonNull<VMFuncRef>, OutOfMemory> {
debug_assert!(func_ref.wasm_call.is_none());
let func_ref = self
.bump
.get_mut()
.try_alloc(func_ref)
.map_err(|_| OutOfMemory::new(size_of::<VMFuncRef>()))?;
let has_hole = unsafe { !try_fill(func_ref, modules) };
let unpatched = SendSyncPtr::from(func_ref);
if has_hole {
self.with_holes.push(unpatched)?;
}
Ok(unpatched.as_non_null())
}
pub fn fill(&mut self, modules: &ModuleRegistry) {
self.with_holes
.retain_mut(|f| unsafe { !try_fill(f.as_mut(), modules) });
}
pub fn reserve_storage(&mut self, amt: usize) -> Result<(), OutOfMemory> {
self.storage.reserve(amt)
}
pub fn push_instance_pre_func_refs(
&mut self,
funcs: Arc<TryVec<VMFuncRef>>,
) -> Result<(), OutOfMemory> {
self.storage.push(Storage::InstancePreFuncRefs { funcs })
}
pub fn push_instance_pre_definitions(
&mut self,
defs: Arc<TryVec<Definition>>,
) -> Result<(), OutOfMemory> {
self.storage.push(Storage::InstancePreDefinitions { defs })
}
pub fn push_arc_host(
&mut self,
func: Arc<HostFunc>,
modules: &ModuleRegistry,
) -> Result<NonNull<VMFuncRef>, OutOfMemory> {
debug_assert!(func.func_ref().wasm_call.is_none());
let ret = unsafe { self.push(func.func_ref().clone(), modules)? };
self.storage.push(Storage::ArcHost { func })?;
Ok(ret)
}
pub fn push_box_host(
&mut self,
func: Box<HostFunc>,
modules: &ModuleRegistry,
) -> Result<NonNull<VMFuncRef>, OutOfMemory> {
debug_assert!(func.func_ref().wasm_call.is_none());
let ret = unsafe { self.push(func.func_ref().clone(), modules)? };
self.storage.push(Storage::BoxHost { func })?;
Ok(ret)
}
}
unsafe fn try_fill(func_ref: &mut VMFuncRef, modules: &ModuleRegistry) -> bool {
debug_assert!(func_ref.wasm_call.is_none());
unsafe {
let _ = VMArrayCallHostFuncContext::from_opaque(func_ref.vmctx.as_non_null());
}
func_ref.wasm_call = modules
.wasm_to_array_trampoline(func_ref.type_index)
.map(|f| f.into());
func_ref.wasm_call.is_some()
}