use super::free_list::FreeList;
use crate::{
ExternRefHostDataId, ExternRefHostDataTable, GarbageCollection, GcHeap, GcHeapObject,
GcProgress, GcRootsIter, GcRuntime, Mmap, TypedGcRef, VMExternRef, VMGcHeader, VMGcRef,
};
use anyhow::Result;
use std::{
alloc::Layout,
any::Any,
cell::UnsafeCell,
collections::HashSet,
mem,
num::NonZeroUsize,
ptr::{self, NonNull},
};
use wasmtime_environ::VMGcKind;
pub struct DrcCollector;
unsafe impl GcRuntime for DrcCollector {
fn new_gc_heap(&self) -> Result<Box<dyn GcHeap>> {
let heap = DrcHeap::new()?;
Ok(Box::new(heap) as _)
}
}
struct DrcHeap {
no_gc_count: u64,
activations_table: Box<VMGcRefActivationsTable>,
heap: Mmap,
free_list: FreeList,
}
impl DrcHeap {
fn new() -> Result<Self> {
Self::with_capacity(super::DEFAULT_GC_HEAP_CAPACITY)
}
fn with_capacity(capacity: usize) -> Result<Self> {
let heap = Mmap::with_at_least(capacity)?;
let free_list = FreeList::new(heap.len());
Ok(Self {
no_gc_count: 0,
activations_table: Box::new(VMGcRefActivationsTable::default()),
heap,
free_list,
})
}
fn heap_slice(&self) -> &[UnsafeCell<u8>] {
let ptr = self.heap.as_ptr().cast::<UnsafeCell<u8>>();
let len = self.heap.len();
unsafe { std::slice::from_raw_parts(ptr, len) }
}
fn heap_slice_mut(&mut self) -> &mut [u8] {
let ptr = self.heap.as_mut_ptr();
let len = self.heap.len();
unsafe { std::slice::from_raw_parts_mut(ptr, len) }
}
fn index<T>(&self, gc_ref: &TypedGcRef<T>) -> &T
where
T: GcHeapObject,
{
assert!(!std::mem::needs_drop::<T>());
let gc_ref = gc_ref.as_untyped();
let start = gc_ref.as_heap_index().unwrap().get();
let start = usize::try_from(start).unwrap();
let len = std::mem::size_of::<T>();
let slice = &self.heap_slice()[start..][..len];
unsafe { &*(slice.as_ptr().cast::<T>()) }
}
fn index_mut<T>(&mut self, gc_ref: &TypedGcRef<T>) -> &mut T
where
T: GcHeapObject,
{
assert!(!std::mem::needs_drop::<T>());
let gc_ref = gc_ref.as_untyped();
let start = gc_ref.as_heap_index().unwrap().get();
let start = usize::try_from(start).unwrap();
let len = std::mem::size_of::<T>();
let slice = &mut self.heap_slice_mut()[start..][..len];
unsafe { &mut *(slice.as_mut_ptr().cast::<T>()) }
}
fn inc_ref(&mut self, gc_ref: &VMGcRef) {
if gc_ref.is_i31() {
return;
}
let drc_ref = drc_ref(gc_ref);
let header = self.index_mut(&drc_ref);
debug_assert_ne!(
*header.ref_count.get_mut(),
0,
"{:#p} is supposedly live; should have nonzero ref count",
*gc_ref
);
*header.ref_count.get_mut() += 1;
log::trace!(
"increment {:#p} ref count -> {}",
*gc_ref,
header.ref_count.get_mut()
);
}
fn dec_ref(&mut self, gc_ref: &VMGcRef) -> bool {
if gc_ref.is_i31() {
return false;
}
let drc_ref = drc_ref(gc_ref);
let header = self.index_mut(drc_ref);
debug_assert_ne!(
*header.ref_count.get_mut(),
0,
"{:#p} is supposedly live; should have nonzero ref count",
*gc_ref
);
*header.ref_count.get_mut() -= 1;
log::trace!(
"decrement {:#p} ref count -> {}",
*gc_ref,
header.ref_count.get_mut()
);
*header.ref_count.get_mut() == 0
}
fn dec_ref_and_maybe_dealloc(
&mut self,
host_data_table: &mut ExternRefHostDataTable,
gc_ref: &VMGcRef,
) {
if self.dec_ref(gc_ref) {
if let Some(externref) = gc_ref.as_typed::<VMDrcExternRef>(self) {
let host_data_id = self.index(externref).host_data;
host_data_table.dealloc(host_data_id);
}
let layout = self.layout(gc_ref);
self.free_list
.dealloc(gc_ref.as_heap_index().unwrap(), layout);
}
}
fn layout(&self, gc_ref: &VMGcRef) -> Layout {
assert!(VMGcRef::ONLY_EXTERN_REF_AND_I31);
assert!(!gc_ref.is_i31());
Layout::new::<VMDrcExternRef>()
}
fn trace(&mut self, roots: &mut GcRootsIter<'_>) {
debug_assert!({
self.activations_table.precise_stack_roots.is_empty()
});
let mut activations_table_set: DebugOnly<HashSet<_>> = Default::default();
if cfg!(debug_assertions) {
self.activations_table.elements(|elem| {
activations_table_set.insert(elem.unchecked_copy());
});
}
for root in roots {
if !root.is_on_wasm_stack() {
continue;
}
let gc_ref = root.get();
debug_assert!(
gc_ref.is_i31() || activations_table_set.contains(&gc_ref),
"every on-stack gc_ref inside a Wasm frame should \
have an entry in the VMGcRefActivationsTable; \
{gc_ref:?} is not in the table",
);
if gc_ref.is_i31() {
continue;
}
debug_assert_ne!(
*self.index_mut(drc_ref(&gc_ref)).ref_count.get_mut(),
0,
"{gc_ref:#p} is on the Wasm stack and therefore should be held \
by the activations table; should have nonzero ref count",
);
log::trace!("Found GC reference on the stack: {:#p}", gc_ref);
let is_new = self
.activations_table
.precise_stack_roots
.insert(gc_ref.unchecked_copy());
if is_new {
self.inc_ref(&gc_ref);
}
}
}
fn iter_bump_chunk(&mut self) -> impl Iterator<Item = VMGcRef> + '_ {
let num_filled = self.activations_table.num_filled_in_bump_chunk();
self.activations_table
.alloc
.chunk
.iter_mut()
.take(num_filled)
.map(|slot| {
let r64 = *slot.get_mut();
VMGcRef::from_r64(r64)
.expect("valid r64")
.expect("non-null")
})
}
#[inline(never)]
#[cold]
fn log_gc_ref_set(prefix: &str, items: impl Iterator<Item = VMGcRef>) {
assert!(log::log_enabled!(log::Level::Trace));
let mut set = "{".to_string();
let mut any = false;
for gc_ref in items {
any = true;
set += &format!("\n {gc_ref:#p},");
}
if any {
set.push('\n');
}
set.push('}');
log::trace!("{prefix}: {set}");
}
fn drain_bump_chunk(&mut self, mut f: impl FnMut(&mut Self, VMGcRef)) {
let num_filled = self.activations_table.num_filled_in_bump_chunk();
let mut alloc = mem::take(&mut self.activations_table.alloc);
for slot in alloc.chunk.iter_mut().take(num_filled) {
let r64 = mem::take(slot.get_mut());
let gc_ref = VMGcRef::from_r64(r64)
.expect("valid r64")
.expect("non-null");
f(self, gc_ref);
*slot.get_mut() = 0;
}
debug_assert!(
alloc.chunk.iter_mut().all(|slot| *slot.get_mut() == 0),
"after sweeping the bump chunk, all slots should be empty",
);
debug_assert!(self.activations_table.alloc.chunk.is_empty());
self.activations_table.alloc = alloc;
}
fn sweep(&mut self, host_data_table: &mut ExternRefHostDataTable) {
if log::log_enabled!(log::Level::Trace) {
Self::log_gc_ref_set("bump chunk before sweeping", self.iter_bump_chunk());
}
log::trace!("Begin sweeping bump chunk");
self.drain_bump_chunk(|heap, gc_ref| {
heap.dec_ref_and_maybe_dealloc(host_data_table, &gc_ref);
});
log::trace!("Done sweeping bump chunk");
if self.activations_table.alloc.chunk.is_empty() {
self.activations_table.alloc.force_allocation();
} else {
self.activations_table.alloc.reset();
}
if log::log_enabled!(log::Level::Trace) {
Self::log_gc_ref_set(
"hash set before sweeping",
self.activations_table
.over_approximated_stack_roots
.iter()
.map(|r| r.unchecked_copy()),
);
}
mem::swap(
&mut self.activations_table.precise_stack_roots,
&mut self.activations_table.over_approximated_stack_roots,
);
log::trace!("Begin sweeping hash set");
let mut precise_stack_roots = mem::take(&mut self.activations_table.precise_stack_roots);
for gc_ref in precise_stack_roots.drain() {
self.dec_ref_and_maybe_dealloc(host_data_table, &gc_ref);
}
log::trace!("Done sweeping hash set");
self.activations_table.precise_stack_roots = precise_stack_roots;
if log::log_enabled!(log::Level::Trace) {
Self::log_gc_ref_set(
"hash set after sweeping",
self.activations_table
.over_approximated_stack_roots
.iter()
.map(|r| r.unchecked_copy()),
);
}
}
}
fn drc_ref(gc_ref: &VMGcRef) -> &TypedGcRef<VMDrcHeader> {
debug_assert!(!gc_ref.is_i31());
gc_ref.as_typed_unchecked()
}
fn externref_to_drc(externref: &VMExternRef) -> &TypedGcRef<VMDrcExternRef> {
let gc_ref = externref.as_gc_ref();
debug_assert!(!gc_ref.is_i31());
gc_ref.as_typed_unchecked()
}
#[repr(C)]
struct VMDrcHeader {
header: VMGcHeader,
ref_count: UnsafeCell<u64>,
}
unsafe impl Send for VMDrcHeader {}
unsafe impl Sync for VMDrcHeader {}
unsafe impl GcHeapObject for VMDrcHeader {
#[inline]
fn is(_header: &VMGcHeader) -> bool {
true
}
}
#[repr(C)]
struct VMDrcExternRef {
header: VMDrcHeader,
host_data: ExternRefHostDataId,
}
unsafe impl GcHeapObject for VMDrcExternRef {
#[inline]
fn is(header: &VMGcHeader) -> bool {
header.kind() == VMGcKind::ExternRef
}
}
unsafe impl GcHeap for DrcHeap {
fn as_any(&self) -> &dyn Any {
self as _
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self as _
}
fn enter_no_gc_scope(&mut self) {
self.no_gc_count += 1;
}
fn exit_no_gc_scope(&mut self) {
self.no_gc_count -= 1;
}
fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
self.index(gc_ref.as_typed_unchecked())
}
fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
self.inc_ref(gc_ref);
gc_ref.unchecked_copy()
}
fn write_gc_ref(
&mut self,
host_data_table: &mut ExternRefHostDataTable,
destination: &mut Option<VMGcRef>,
source: Option<&VMGcRef>,
) {
if let Some(src) = source {
self.inc_ref(src);
}
if let Some(dest) = destination {
self.dec_ref_and_maybe_dealloc(host_data_table, dest);
}
*destination = source.map(|s| s.unchecked_copy());
}
fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) {
self.activations_table.insert_without_gc(gc_ref);
}
fn need_gc_before_entering_wasm(&self, num_gc_refs: NonZeroUsize) -> bool {
num_gc_refs.get() > self.activations_table.bump_capacity_remaining()
}
fn alloc_externref(&mut self, host_data: ExternRefHostDataId) -> Result<Option<VMExternRef>> {
let gc_ref = match self.free_list.alloc(Layout::new::<VMDrcExternRef>())? {
None => return Ok(None),
Some(index) => VMGcRef::from_heap_index(index).unwrap(),
};
*self.index_mut(gc_ref.as_typed_unchecked()) = VMDrcExternRef {
header: VMDrcHeader {
header: VMGcHeader::externref(),
ref_count: UnsafeCell::new(1),
},
host_data,
};
log::trace!("increment {gc_ref:#p} ref count -> 1");
Ok(Some(gc_ref.into_externref_unchecked()))
}
fn externref_host_data(&self, externref: &VMExternRef) -> ExternRefHostDataId {
let typed_ref = externref_to_drc(externref);
self.index(typed_ref).host_data
}
fn gc<'a>(
&'a mut self,
roots: GcRootsIter<'a>,
host_data_table: &'a mut ExternRefHostDataTable,
) -> Box<dyn GarbageCollection<'a> + 'a> {
assert_eq!(self.no_gc_count, 0, "Cannot GC inside a no-GC scope!");
Box::new(DrcCollection {
roots,
host_data_table,
heap: self,
phase: DrcCollectionPhase::Trace,
})
}
unsafe fn vmctx_gc_heap_base(&self) -> *mut u8 {
self.heap.as_ptr().cast_mut()
}
unsafe fn vmctx_gc_heap_bound(&self) -> usize {
self.heap.len()
}
unsafe fn vmctx_gc_heap_data(&self) -> *mut u8 {
let ptr = &*self.activations_table as *const VMGcRefActivationsTable;
ptr.cast_mut().cast::<u8>()
}
#[cfg(feature = "pooling-allocator")]
fn reset(&mut self) {
let DrcHeap {
no_gc_count,
activations_table,
free_list,
heap: _,
} = self;
*no_gc_count = 0;
free_list.reset();
activations_table.reset();
}
}
struct DrcCollection<'a> {
roots: GcRootsIter<'a>,
host_data_table: &'a mut ExternRefHostDataTable,
heap: &'a mut DrcHeap,
phase: DrcCollectionPhase,
}
enum DrcCollectionPhase {
Trace,
Sweep,
Done,
}
impl<'a> GarbageCollection<'a> for DrcCollection<'a> {
fn collect_increment(&mut self) -> GcProgress {
match self.phase {
DrcCollectionPhase::Trace => {
log::trace!("Begin DRC trace");
self.heap.trace(&mut self.roots);
log::trace!("End DRC trace");
self.phase = DrcCollectionPhase::Sweep;
GcProgress::Continue
}
DrcCollectionPhase::Sweep => {
log::trace!("Begin DRC sweep");
self.heap.sweep(self.host_data_table);
log::trace!("End DRC sweep");
self.phase = DrcCollectionPhase::Done;
GcProgress::Complete
}
DrcCollectionPhase::Done => GcProgress::Complete,
}
}
}
type TableElem = UnsafeCell<u64>;
#[repr(C)]
struct VMGcRefActivationsTable {
alloc: VMGcRefTableAlloc,
over_approximated_stack_roots: HashSet<VMGcRef>,
precise_stack_roots: HashSet<VMGcRef>,
}
#[repr(C)]
struct VMGcRefTableAlloc {
next: UnsafeCell<NonNull<TableElem>>,
end: NonNull<TableElem>,
chunk: Box<[TableElem]>,
}
impl Default for VMGcRefTableAlloc {
fn default() -> Self {
let mut chunk: Box<[TableElem]> = Box::new([]);
let next = chunk.as_mut_ptr();
let end = unsafe { next.add(chunk.len()) };
VMGcRefTableAlloc {
next: UnsafeCell::new(NonNull::new(next).unwrap()),
end: NonNull::new(end).unwrap(),
chunk,
}
}
}
impl VMGcRefTableAlloc {
const CHUNK_SIZE: usize = 4096 / mem::size_of::<TableElem>();
fn force_allocation(&mut self) {
assert!(self.chunk.is_empty());
self.chunk = (0..Self::CHUNK_SIZE).map(|_| UnsafeCell::new(0)).collect();
self.reset();
}
fn reset(&mut self) {
self.next = UnsafeCell::new(NonNull::new(self.chunk.as_mut_ptr()).unwrap());
self.end = NonNull::new(unsafe { self.chunk.as_mut_ptr().add(self.chunk.len()) }).unwrap();
}
}
unsafe impl Send for VMGcRefTableAlloc {}
unsafe impl Sync for VMGcRefTableAlloc {}
fn _assert_send_sync() {
fn _assert<T: Send + Sync>() {}
_assert::<VMGcRefActivationsTable>();
}
impl Default for VMGcRefActivationsTable {
fn default() -> Self {
Self::new()
}
}
impl VMGcRefActivationsTable {
fn new() -> Self {
VMGcRefActivationsTable {
alloc: VMGcRefTableAlloc::default(),
over_approximated_stack_roots: HashSet::new(),
precise_stack_roots: HashSet::new(),
}
}
#[cfg(feature = "pooling-allocator")]
fn reset(&mut self) {
let VMGcRefActivationsTable {
alloc,
over_approximated_stack_roots,
precise_stack_roots,
} = self;
alloc.reset();
over_approximated_stack_roots.clear();
precise_stack_roots.clear();
}
#[inline]
fn bump_capacity_remaining(&self) -> usize {
let end = self.alloc.end.as_ptr() as usize;
let next = unsafe { *self.alloc.next.get() };
end - next.as_ptr() as usize
}
#[inline]
fn try_insert(&mut self, gc_ref: VMGcRef) -> Result<(), VMGcRef> {
unsafe {
let next = *self.alloc.next.get();
if next == self.alloc.end {
return Err(gc_ref);
}
debug_assert_eq!(
(*next.as_ref().get()),
0,
"slots >= the `next` bump finger are always `None`"
);
ptr::write(next.as_ptr(), UnsafeCell::new(gc_ref.into_r64()));
let next = NonNull::new_unchecked(next.as_ptr().add(1));
debug_assert!(next <= self.alloc.end);
*self.alloc.next.get() = next;
Ok(())
}
}
#[inline]
fn insert_without_gc(&mut self, gc_ref: VMGcRef) {
if let Err(gc_ref) = self.try_insert(gc_ref) {
self.insert_slow_without_gc(gc_ref);
}
}
#[inline(never)]
fn insert_slow_without_gc(&mut self, gc_ref: VMGcRef) {
self.over_approximated_stack_roots.insert(gc_ref);
}
fn num_filled_in_bump_chunk(&self) -> usize {
let next = unsafe { *self.alloc.next.get() };
let bytes_unused = (self.alloc.end.as_ptr() as usize) - (next.as_ptr() as usize);
let slots_unused = bytes_unused / mem::size_of::<TableElem>();
self.alloc.chunk.len().saturating_sub(slots_unused)
}
fn elements(&self, mut f: impl FnMut(&VMGcRef)) {
for elem in self.over_approximated_stack_roots.iter() {
f(elem);
}
let num_filled = self.num_filled_in_bump_chunk();
for slot in self.alloc.chunk.iter().take(num_filled) {
if let Some(elem) = VMGcRef::from_r64(unsafe { *slot.get() }).expect("valid r64") {
f(&elem);
}
}
}
}
#[derive(Debug, Default)]
struct DebugOnly<T> {
inner: T,
}
impl<T> std::ops::Deref for DebugOnly<T> {
type Target = T;
fn deref(&self) -> &T {
if cfg!(debug_assertions) {
&self.inner
} else {
panic!(
"only deref `DebugOnly` when `cfg(debug_assertions)` or \
inside a `debug_assert!(..)`"
)
}
}
}
impl<T> std::ops::DerefMut for DebugOnly<T> {
fn deref_mut(&mut self) -> &mut T {
if cfg!(debug_assertions) {
&mut self.inner
} else {
panic!(
"only deref `DebugOnly` when `cfg(debug_assertions)` or \
inside a `debug_assert!(..)`"
)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn ref_count_is_at_correct_offset() {
let extern_data = VMDrcHeader {
header: VMGcHeader::externref(),
ref_count: UnsafeCell::new(0),
};
let extern_data_ptr = &extern_data as *const _;
let ref_count_ptr = &extern_data.ref_count as *const _;
let actual_offset = (ref_count_ptr as usize) - (extern_data_ptr as usize);
let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields {
ptr: 8,
num_imported_functions: 0,
num_imported_tables: 0,
num_imported_memories: 0,
num_imported_globals: 0,
num_defined_tables: 0,
num_defined_memories: 0,
num_owned_memories: 0,
num_defined_globals: 0,
num_escaped_funcs: 0,
});
assert_eq!(
offsets.vm_drc_header_ref_count(),
actual_offset.try_into().unwrap(),
);
}
#[test]
fn table_next_is_at_correct_offset() {
let table = VMGcRefActivationsTable::new();
let table_ptr = &table as *const _;
let next_ptr = &table.alloc.next as *const _;
let actual_offset = (next_ptr as usize) - (table_ptr as usize);
let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields {
ptr: 8,
num_imported_functions: 0,
num_imported_tables: 0,
num_imported_memories: 0,
num_imported_globals: 0,
num_defined_tables: 0,
num_defined_memories: 0,
num_owned_memories: 0,
num_defined_globals: 0,
num_escaped_funcs: 0,
});
assert_eq!(
offsets.vm_gc_ref_activation_table_next() as usize,
actual_offset
);
}
#[test]
fn table_end_is_at_correct_offset() {
let table = VMGcRefActivationsTable::new();
let table_ptr = &table as *const _;
let end_ptr = &table.alloc.end as *const _;
let actual_offset = (end_ptr as usize) - (table_ptr as usize);
let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields {
ptr: 8,
num_imported_functions: 0,
num_imported_tables: 0,
num_imported_memories: 0,
num_imported_globals: 0,
num_defined_tables: 0,
num_defined_memories: 0,
num_owned_memories: 0,
num_defined_globals: 0,
num_escaped_funcs: 0,
});
assert_eq!(
offsets.vm_gc_ref_activation_table_end() as usize,
actual_offset
);
}
}