#[cfg(feature = "gc")]
mod enabled;
#[cfg(feature = "gc")]
pub use enabled::*;
#[cfg(not(feature = "gc"))]
mod disabled;
#[cfg(not(feature = "gc"))]
pub use disabled::*;
mod func_ref;
mod gc_ref;
mod gc_runtime;
mod host_data;
mod i31;
pub use func_ref::*;
pub use gc_ref::*;
pub use gc_runtime::*;
pub use host_data::*;
pub use i31::*;
use crate::prelude::*;
use crate::runtime::vm::GcHeapAllocationIndex;
use core::alloc::Layout;
use core::ptr;
use core::{any::Any, num::NonZeroUsize};
use wasmtime_environ::{GcArrayLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
pub struct GcStore {
pub allocation_index: GcHeapAllocationIndex,
pub gc_heap: Box<dyn GcHeap>,
pub host_data_table: ExternRefHostDataTable,
pub func_ref_table: FuncRefTable,
}
impl GcStore {
pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
let host_data_table = ExternRefHostDataTable::default();
let func_ref_table = FuncRefTable::default();
Self {
allocation_index,
gc_heap,
host_data_table,
func_ref_table,
}
}
pub fn gc(&mut self, roots: GcRootsIter<'_>) {
let mut collection = self.gc_heap.gc(roots, &mut self.host_data_table);
collection.collect();
}
#[cfg(feature = "async")]
pub async fn gc_async(&mut self, roots: GcRootsIter<'_>) {
let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
collect_async(collection).await;
}
pub fn kind(&self, gc_ref: &VMGcRef) -> VMGcKind {
debug_assert!(!gc_ref.is_i31());
self.header(gc_ref).kind()
}
pub fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
debug_assert!(!gc_ref.is_i31());
self.gc_heap.header(gc_ref)
}
pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
if gc_ref.is_i31() {
gc_ref.unchecked_copy()
} else {
self.gc_heap.clone_gc_ref(gc_ref)
}
}
pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
if destination.as_ref().map_or(true, |d| d.is_i31())
&& source.as_ref().map_or(true, |s| s.is_i31())
{
*destination = source.map(|s| s.unchecked_copy());
return;
}
self.gc_heap
.write_gc_ref(&mut self.host_data_table, destination, source);
}
pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
if !gc_ref.is_i31() {
self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
}
}
pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) {
if !gc_ref.is_i31() {
log::trace!("exposing GC ref to Wasm: {gc_ref:p}");
self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
}
}
pub fn alloc_externref(
&mut self,
value: Box<dyn Any + Send + Sync>,
) -> Result<Result<VMExternRef, Box<dyn Any + Send + Sync>>> {
let host_data_id = self.host_data_table.alloc(value);
match self.gc_heap.alloc_externref(host_data_id)? {
#[cfg_attr(not(feature = "gc"), allow(unreachable_patterns))]
Some(x) => Ok(Ok(x)),
None => Ok(Err(self.host_data_table.dealloc(host_data_id))),
}
}
pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
let host_data_id = self.gc_heap.externref_host_data(externref);
self.host_data_table.get(host_data_id)
}
pub fn externref_host_data_mut(
&mut self,
externref: &VMExternRef,
) -> &mut (dyn Any + Send + Sync) {
let host_data_id = self.gc_heap.externref_host_data(externref);
self.host_data_table.get_mut(host_data_id)
}
pub fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result<Option<VMGcRef>> {
self.gc_heap.alloc_raw(header, layout)
}
pub fn alloc_uninit_struct(
&mut self,
ty: VMSharedTypeIndex,
layout: &GcStructLayout,
) -> Result<Option<VMStructRef>> {
self.gc_heap.alloc_uninit_struct(ty, layout)
}
pub fn dealloc_uninit_struct(&mut self, structref: VMStructRef) {
self.gc_heap.dealloc_uninit_struct(structref);
}
pub fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> VMGcObjectDataMut<'_> {
self.gc_heap.gc_object_data(gc_ref)
}
pub fn alloc_uninit_array(
&mut self,
ty: VMSharedTypeIndex,
len: u32,
layout: &GcArrayLayout,
) -> Result<Option<VMArrayRef>> {
self.gc_heap.alloc_uninit_array(ty, len, layout)
}
pub fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
self.gc_heap.dealloc_uninit_array(arrayref);
}
pub fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
self.gc_heap.array_len(arrayref)
}
}
pub fn disabled_gc_heap() -> Box<dyn GcHeap> {
return Box::new(DisabledGcHeap);
}
pub(crate) struct DisabledGcHeap;
unsafe impl GcHeap for DisabledGcHeap {
fn as_any(&self) -> &dyn Any {
self
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
fn enter_no_gc_scope(&mut self) {}
fn exit_no_gc_scope(&mut self) {}
fn header(&self, _gc_ref: &VMGcRef) -> &VMGcHeader {
unreachable!()
}
fn clone_gc_ref(&mut self, _gc_ref: &VMGcRef) -> VMGcRef {
unreachable!()
}
fn write_gc_ref(
&mut self,
_host_data_table: &mut ExternRefHostDataTable,
_destination: &mut Option<VMGcRef>,
_source: Option<&VMGcRef>,
) {
unreachable!()
}
fn expose_gc_ref_to_wasm(&mut self, _gc_ref: VMGcRef) {
unreachable!()
}
fn need_gc_before_entering_wasm(&self, _num_gc_refs: NonZeroUsize) -> bool {
unreachable!()
}
fn alloc_externref(&mut self, _host_data: ExternRefHostDataId) -> Result<Option<VMExternRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
because the `gc` cargo feature was not enabled"
)
}
fn externref_host_data(&self, _externref: &VMExternRef) -> ExternRefHostDataId {
unreachable!()
}
fn alloc_raw(&mut self, _header: VMGcHeader, _layout: Layout) -> Result<Option<VMGcRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
because the `gc` cargo feature was not enabled"
)
}
fn alloc_uninit_struct(
&mut self,
_ty: wasmtime_environ::VMSharedTypeIndex,
_layout: &GcStructLayout,
) -> Result<Option<VMStructRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
because the `gc` cargo feature was not enabled"
)
}
fn dealloc_uninit_struct(&mut self, _structref: VMStructRef) {
unreachable!()
}
fn gc_object_data(&mut self, _gc_ref: &VMGcRef) -> VMGcObjectDataMut<'_> {
unreachable!()
}
fn alloc_uninit_array(
&mut self,
_ty: VMSharedTypeIndex,
_len: u32,
_layout: &GcArrayLayout,
) -> Result<Option<VMArrayRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
because the `gc` cargo feature was not enabled"
)
}
fn dealloc_uninit_array(&mut self, _structref: VMArrayRef) {
unreachable!()
}
fn array_len(&self, _arrayref: &VMArrayRef) -> u32 {
unreachable!()
}
fn gc<'a>(
&'a mut self,
_roots: GcRootsIter<'a>,
_host_data_table: &'a mut ExternRefHostDataTable,
) -> Box<dyn GarbageCollection<'a> + 'a> {
return Box::new(NoGc);
struct NoGc;
impl<'a> GarbageCollection<'a> for NoGc {
fn collect_increment(&mut self) -> GcProgress {
GcProgress::Complete
}
}
}
unsafe fn vmctx_gc_heap_base(&self) -> *mut u8 {
ptr::null_mut()
}
unsafe fn vmctx_gc_heap_bound(&self) -> usize {
0
}
unsafe fn vmctx_gc_heap_data(&self) -> *mut u8 {
ptr::null_mut()
}
#[cfg(feature = "pooling-allocator")]
fn reset(&mut self) {}
}