use crate::prelude::*;
use crate::runtime::vm::{
ExternRefHostDataId, ExternRefHostDataTable, GcHeapObject, SendSyncPtr, TypedGcRef, VMArrayRef,
VMExternRef, VMGcHeader, VMGcObjectData, VMGcRef,
};
use crate::store::Asyncness;
use crate::vm::VMMemoryDefinition;
use core::ptr::NonNull;
use core::slice;
use core::{alloc::Layout, any::Any, marker, mem, ops::Range, ptr};
use wasmtime_environ::{GcArrayLayout, GcStructLayout, GcTypeLayouts, VMSharedTypeIndex};
pub unsafe trait GcRuntime: 'static + Send + Sync {
fn layouts(&self) -> &dyn GcTypeLayouts;
#[cfg(feature = "gc")]
fn new_gc_heap(&self, engine: &crate::Engine) -> Result<Box<dyn GcHeap>>;
}
pub unsafe trait GcHeap: 'static + Send + Sync {
fn is_attached(&self) -> bool;
fn attach(&mut self, memory: crate::vm::Memory);
fn detach(&mut self) -> crate::vm::Memory;
fn as_any(&self) -> &dyn Any;
fn as_any_mut(&mut self) -> &mut dyn Any;
fn enter_no_gc_scope(&mut self);
fn exit_no_gc_scope(&mut self);
fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef;
fn drop_gc_ref(&mut self, host_data_table: &mut ExternRefHostDataTable, gc_ref: VMGcRef) {
let mut dest = Some(gc_ref);
self.write_gc_ref(host_data_table, &mut dest, None);
}
fn write_gc_ref(
&mut self,
host_data_table: &mut ExternRefHostDataTable,
destination: &mut Option<VMGcRef>,
source: Option<&VMGcRef>,
);
fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef);
fn alloc_externref(
&mut self,
host_data: ExternRefHostDataId,
) -> Result<Result<VMExternRef, u64>>;
fn externref_host_data(&self, externref: &VMExternRef) -> ExternRefHostDataId;
fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader;
fn header_mut(&mut self, gc_ref: &VMGcRef) -> &mut VMGcHeader;
fn object_size(&self, gc_ref: &VMGcRef) -> usize;
fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result<Result<VMGcRef, u64>>;
fn alloc_uninit_struct_or_exn(
&mut self,
ty: VMSharedTypeIndex,
layout: &GcStructLayout,
) -> Result<Result<VMGcRef, u64>>;
fn dealloc_uninit_struct_or_exn(&mut self, structref: VMGcRef);
fn alloc_uninit_array(
&mut self,
ty: VMSharedTypeIndex,
len: u32,
layout: &GcArrayLayout,
) -> Result<Result<VMArrayRef, u64>>;
fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef);
fn array_len(&self, arrayref: &VMArrayRef) -> u32;
fn gc<'a>(
&'a mut self,
roots: GcRootsIter<'a>,
host_data_table: &'a mut ExternRefHostDataTable,
) -> Box<dyn GarbageCollection<'a> + 'a>;
unsafe fn vmctx_gc_heap_data(&self) -> NonNull<u8>;
fn take_memory(&mut self) -> crate::vm::Memory;
unsafe fn replace_memory(&mut self, memory: crate::vm::Memory, delta_bytes_grown: u64);
fn vmmemory(&self) -> VMMemoryDefinition;
#[inline]
fn heap_slice(&self) -> &[u8] {
let vmmemory = self.vmmemory();
let ptr = vmmemory.base.as_ptr().cast_const();
let len = vmmemory.current_length();
unsafe { slice::from_raw_parts(ptr, len) }
}
#[inline]
fn heap_slice_mut(&mut self) -> &mut [u8] {
let vmmemory = self.vmmemory();
let ptr = vmmemory.base.as_ptr();
let len = vmmemory.current_length();
unsafe { slice::from_raw_parts_mut(ptr, len) }
}
#[inline]
fn index<T>(&self, gc_ref: &TypedGcRef<T>) -> &T
where
Self: Sized,
T: GcHeapObject,
{
assert!(!mem::needs_drop::<T>());
let gc_ref = gc_ref.as_untyped();
let start = gc_ref.as_heap_index().unwrap().get();
let start = usize::try_from(start).unwrap();
let len = mem::size_of::<T>();
let slice = &self.heap_slice()[start..][..len];
unsafe { &*(slice.as_ptr().cast::<T>()) }
}
#[inline]
fn index_mut<T>(&mut self, gc_ref: &TypedGcRef<T>) -> &mut T
where
Self: Sized,
T: GcHeapObject,
{
assert!(!mem::needs_drop::<T>());
let gc_ref = gc_ref.as_untyped();
let start = gc_ref.as_heap_index().unwrap().get();
let start = usize::try_from(start).unwrap();
let len = mem::size_of::<T>();
let slice = &mut self.heap_slice_mut()[start..][..len];
unsafe { &mut *(slice.as_mut_ptr().cast::<T>()) }
}
fn object_range(&self, gc_ref: &VMGcRef) -> Range<usize> {
let start = gc_ref.as_heap_index().unwrap().get();
let start = usize::try_from(start).unwrap();
let size = self.object_size(gc_ref);
let end = start.checked_add(size).unwrap();
start..end
}
fn gc_object_data(&self, gc_ref: &VMGcRef) -> &VMGcObjectData {
let range = self.object_range(gc_ref);
let data = &self.heap_slice()[range];
data.into()
}
fn gc_object_data_mut(&mut self, gc_ref: &VMGcRef) -> &mut VMGcObjectData {
let range = self.object_range(gc_ref);
let data = &mut self.heap_slice_mut()[range];
data.into()
}
fn gc_object_data_pair(
&mut self,
a: &VMGcRef,
b: &VMGcRef,
) -> (&mut VMGcObjectData, &mut VMGcObjectData) {
assert_ne!(a, b);
let a_range = self.object_range(a);
let b_range = self.object_range(b);
assert!(a_range.start <= a_range.end);
assert!(b_range.start <= b_range.end);
assert!(a_range.end <= b_range.start || b_range.end <= a_range.start);
let (a_data, b_data) = if a_range.start < b_range.start {
let (a_half, b_half) = self.heap_slice_mut().split_at_mut(b_range.start);
let b_len = b_range.end - b_range.start;
(&mut a_half[a_range], &mut b_half[..b_len])
} else {
let (b_half, a_half) = self.heap_slice_mut().split_at_mut(a_range.start);
let a_len = a_range.end - a_range.start;
(&mut a_half[..a_len], &mut b_half[b_range])
};
(a_data.into(), b_data.into())
}
}
#[derive(Default)]
pub struct GcRootsList(Vec<RawGcRoot>);
#[derive(Clone, Copy, Debug)]
#[cfg_attr(
not(feature = "gc"),
expect(
dead_code,
reason = "not worth it at this time to #[cfg] away these variants",
)
)]
enum RawGcRoot {
Stack(SendSyncPtr<u32>),
NonStack(SendSyncPtr<VMGcRef>),
}
#[cfg(feature = "gc")]
impl GcRootsList {
#[inline]
pub unsafe fn add_wasm_stack_root(&mut self, ptr_to_root: SendSyncPtr<u32>) {
unsafe {
log::trace!(
"Adding Wasm stack root: {:#p} -> {:#p}",
ptr_to_root,
VMGcRef::from_raw_u32(*ptr_to_root.as_ref()).unwrap()
);
debug_assert!(VMGcRef::from_raw_u32(*ptr_to_root.as_ref()).is_some());
}
self.0.push(RawGcRoot::Stack(ptr_to_root));
}
#[inline]
pub unsafe fn add_root(&mut self, ptr_to_root: SendSyncPtr<VMGcRef>, why: &str) {
unsafe {
log::trace!(
"Adding non-stack root: {why}: {:#p}",
ptr_to_root.as_ref().unchecked_copy()
);
}
self.0.push(RawGcRoot::NonStack(ptr_to_root))
}
#[inline]
pub unsafe fn iter<'a>(&'a mut self) -> GcRootsIter<'a> {
GcRootsIter {
list: self,
index: 0,
}
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
#[inline]
pub fn clear(&mut self) {
self.0.clear();
}
}
pub struct GcRootsIter<'a> {
list: &'a mut GcRootsList,
index: usize,
}
impl<'a> Iterator for GcRootsIter<'a> {
type Item = GcRoot<'a>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let root = GcRoot {
raw: self.list.0.get(self.index).copied()?,
_phantom: marker::PhantomData,
};
self.index += 1;
Some(root)
}
}
#[derive(Debug)]
pub struct GcRoot<'a> {
raw: RawGcRoot,
_phantom: marker::PhantomData<&'a mut VMGcRef>,
}
impl GcRoot<'_> {
#[inline]
pub fn is_on_wasm_stack(&self) -> bool {
matches!(self.raw, RawGcRoot::Stack(_))
}
#[inline]
pub fn get(&self) -> VMGcRef {
match self.raw {
RawGcRoot::NonStack(ptr) => unsafe { ptr::read(ptr.as_ptr()) },
RawGcRoot::Stack(ptr) => unsafe {
let raw: u32 = ptr::read(ptr.as_ptr());
VMGcRef::from_raw_u32(raw).expect("non-null")
},
}
}
pub fn set(&mut self, new_ref: VMGcRef) {
match self.raw {
RawGcRoot::NonStack(ptr) => unsafe {
ptr::write(ptr.as_ptr(), new_ref);
},
RawGcRoot::Stack(ptr) => unsafe {
ptr::write(ptr.as_ptr(), new_ref.as_raw_u32());
},
}
}
}
pub trait GarbageCollection<'a>: Send + Sync {
fn collect_increment(&mut self) -> GcProgress;
fn collect(&mut self) {
loop {
match self.collect_increment() {
GcProgress::Continue => continue,
GcProgress::Complete => return,
}
}
}
}
pub enum GcProgress {
Continue,
Complete,
}
pub async fn collect_async<'a>(
mut collection: Box<dyn GarbageCollection<'a> + 'a>,
asyncness: Asyncness,
) {
loop {
match collection.collect_increment() {
GcProgress::Continue => {
if asyncness != Asyncness::No {
#[cfg(feature = "async")]
crate::runtime::vm::Yield::new().await
}
}
GcProgress::Complete => return,
}
}
}
#[cfg(all(test, feature = "async"))]
mod collect_async_tests {
use super::*;
#[test]
fn is_send_and_sync() {
fn _assert_send_sync<T: Send + Sync>(_: T) {}
fn _foo<'a>(collection: Box<dyn GarbageCollection<'a>>) {
_assert_send_sync(collect_async(collection, Asyncness::Yes));
}
}
}