use crate::prelude::*;
use crate::runtime::vm::{GcRootsList, GcStore, VMGcRef};
use crate::{
store::{AutoAssertNoGc, StoreId, StoreOpaque},
AsContext, AsContextMut, GcRef, Result, RootedGcRef,
};
use anyhow::anyhow;
use core::any;
use core::marker;
use core::mem;
use core::num::NonZeroU64;
use core::{
fmt::{self, Debug},
hash::{Hash, Hasher},
ops::{Deref, DerefMut},
};
use wasmtime_slab::{Id as SlabId, Slab};
mod sealed {
use super::*;
pub unsafe trait GcRefImpl: Sized {
fn transmute_ref(index: &GcRootIndex) -> &Self;
}
pub trait RootedGcRefImpl<T: GcRef> {
fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef>;
fn try_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Result<&'a VMGcRef> {
self.get_gc_ref(store).ok_or_else(|| {
anyhow!("attempted to use a garbage-collected object that has been unrooted")
})
}
fn clone_gc_ref(&self, store: &mut AutoAssertNoGc<'_>) -> Option<VMGcRef> {
let gc_ref = self.get_gc_ref(store)?.unchecked_copy();
Some(store.unwrap_gc_store_mut().clone_gc_ref(&gc_ref))
}
fn try_clone_gc_ref(&self, store: &mut AutoAssertNoGc<'_>) -> Result<VMGcRef> {
let gc_ref = self.try_gc_ref(store)?.unchecked_copy();
Ok(store.gc_store_mut()?.clone_gc_ref(&gc_ref))
}
}
}
pub(crate) use sealed::*;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[doc(hidden)]
#[repr(C)] pub struct GcRootIndex {
store_id: StoreId,
generation: u32,
index: PackedIndex,
}
const _: () = {
assert!(mem::size_of::<GcRootIndex>() == 16);
assert!(mem::align_of::<GcRootIndex>() == 8);
};
impl GcRootIndex {
#[inline]
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
self.store_id == store.id()
}
pub(crate) fn unchecked_get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef> {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
if let Some(index) = self.index.as_lifo() {
let entry = store.gc_roots().lifo_roots.get(index)?;
if entry.generation == self.generation {
Some(&entry.gc_ref)
} else {
None
}
} else if let Some(id) = self.index.as_manual() {
let gc_ref = store.gc_roots().manually_rooted.get(id);
debug_assert!(gc_ref.is_some());
gc_ref
} else {
unreachable!()
}
}
pub(crate) fn get_gc_ref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Option<&'a VMGcRef> {
self.unchecked_get_gc_ref(store)
}
pub(crate) fn unchecked_try_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Result<&'a VMGcRef> {
self.unchecked_get_gc_ref(store).ok_or_else(|| {
anyhow!("attempted to use a garbage-collected object that has been unrooted")
})
}
pub(crate) fn try_gc_ref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcRef> {
self.get_gc_ref(store).ok_or_else(|| {
anyhow!("attempted to use a garbage-collected object that has been unrooted")
})
}
pub(crate) fn try_clone_gc_ref(&self, store: &mut AutoAssertNoGc<'_>) -> Result<VMGcRef> {
let gc_ref = self.try_gc_ref(store)?.unchecked_copy();
Ok(store.gc_store_mut()?.clone_gc_ref(&gc_ref))
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
#[repr(transparent)]
struct PackedIndex(u32);
impl Debug for PackedIndex {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(index) = self.as_lifo() {
f.debug_tuple("PackedIndex::Lifo").field(&index).finish()
} else if let Some(id) = self.as_manual() {
f.debug_tuple("PackedIndex::Manual").field(&id).finish()
} else {
unreachable!()
}
}
}
impl PackedIndex {
const DISCRIMINANT_MASK: u32 = 0b1 << 31;
const LIFO_DISCRIMINANT: u32 = 0b0 << 31;
const MANUAL_DISCRIMINANT: u32 = 0b1 << 31;
const PAYLOAD_MASK: u32 = !Self::DISCRIMINANT_MASK;
fn new_lifo(index: usize) -> PackedIndex {
let index32 = u32::try_from(index).unwrap();
assert_eq!(index32 & Self::DISCRIMINANT_MASK, 0);
let packed = PackedIndex(Self::LIFO_DISCRIMINANT | index32);
debug_assert!(packed.is_lifo());
debug_assert_eq!(packed.as_lifo(), Some(index));
debug_assert!(!packed.is_manual());
debug_assert!(packed.as_manual().is_none());
packed
}
fn new_manual(id: SlabId) -> PackedIndex {
let raw = id.into_raw();
assert_eq!(raw & Self::DISCRIMINANT_MASK, 0);
let packed = PackedIndex(Self::MANUAL_DISCRIMINANT | raw);
debug_assert!(packed.is_manual());
debug_assert_eq!(packed.as_manual(), Some(id));
debug_assert!(!packed.is_lifo());
debug_assert!(packed.as_lifo().is_none());
packed
}
fn discriminant(&self) -> u32 {
self.0 & Self::DISCRIMINANT_MASK
}
fn is_lifo(&self) -> bool {
self.discriminant() == Self::LIFO_DISCRIMINANT
}
fn is_manual(&self) -> bool {
self.discriminant() == Self::MANUAL_DISCRIMINANT
}
fn payload(&self) -> u32 {
self.0 & Self::PAYLOAD_MASK
}
fn as_lifo(&self) -> Option<usize> {
if self.is_lifo() {
Some(usize::try_from(self.payload()).unwrap())
} else {
None
}
}
fn as_manual(&self) -> Option<SlabId> {
if self.is_manual() {
Some(SlabId::from_raw(self.payload()))
} else {
None
}
}
}
#[derive(Debug, Default)]
pub(crate) struct RootSet {
manually_rooted: Slab<VMGcRef>,
lifo_roots: Vec<LifoRoot>,
lifo_generation: u32,
}
#[derive(Debug)]
struct LifoRoot {
generation: u32,
gc_ref: VMGcRef,
}
impl RootSet {
pub(crate) fn trace_roots(&mut self, gc_roots_list: &mut GcRootsList) {
log::trace!("Begin trace user LIFO roots");
for root in &mut self.lifo_roots {
unsafe {
gc_roots_list.add_root((&mut root.gc_ref).into());
}
}
log::trace!("End trace user LIFO roots");
log::trace!("Begin trace user manual roots");
for (_id, root) in self.manually_rooted.iter_mut() {
unsafe {
gc_roots_list.add_root(root.into());
}
}
log::trace!("End trace user manual roots");
}
#[inline]
pub(crate) fn enter_lifo_scope(&self) -> usize {
let len = self.lifo_roots.len();
log::debug!("Entering GC root set LIFO scope: {len}");
len
}
#[inline]
pub(crate) fn exit_lifo_scope(&mut self, gc_store: &mut GcStore, scope: usize) {
log::debug!("Exiting GC root set LIFO scope: {scope}");
debug_assert!(self.lifo_roots.len() >= scope);
if self.lifo_roots.len() > scope {
self.exit_lifo_scope_slow(gc_store, scope);
}
}
#[inline(never)]
#[cold]
fn exit_lifo_scope_slow(&mut self, gc_store: &mut GcStore, scope: usize) {
self.lifo_generation += 1;
let mut lifo_roots = mem::take(&mut self.lifo_roots);
for root in lifo_roots.drain(scope..) {
gc_store.drop_gc_ref(root.gc_ref);
}
self.lifo_roots = lifo_roots;
}
pub(crate) fn with_lifo_scope<S, T>(store: &mut S, f: impl FnOnce(&mut S) -> T) -> T
where
S: DerefMut<Target = StoreOpaque>,
{
let scope = store.gc_roots().enter_lifo_scope();
let ret = f(store);
store.exit_gc_lifo_scope(scope);
ret
}
pub(crate) fn push_lifo_root(&mut self, store_id: StoreId, gc_ref: VMGcRef) -> GcRootIndex {
let generation = self.lifo_generation;
let index = self.lifo_roots.len();
let index = PackedIndex::new_lifo(index);
self.lifo_roots.push(LifoRoot { generation, gc_ref });
GcRootIndex {
store_id,
generation,
index,
}
}
}
#[repr(transparent)]
pub struct Rooted<T: GcRef> {
inner: GcRootIndex,
_phantom: marker::PhantomData<T>,
}
impl<T: GcRef> Clone for Rooted<T> {
fn clone(&self) -> Self {
Rooted {
inner: self.inner,
_phantom: marker::PhantomData,
}
}
}
impl<T: GcRef> Copy for Rooted<T> {}
impl<T: GcRef> Debug for Rooted<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let name = format!("Rooted<{}>", any::type_name::<T>());
f.debug_struct(&name).field("inner", &self.inner).finish()
}
}
impl<T: GcRef> RootedGcRefImpl<T> for Rooted<T> {
fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef> {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
let index = self.inner.index.as_lifo().unwrap();
let entry = store.gc_roots().lifo_roots.get(index)?;
if entry.generation == self.inner.generation {
Some(&entry.gc_ref)
} else {
None
}
}
}
impl<T: GcRef> Deref for Rooted<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
T::transmute_ref(&self.inner)
}
}
impl<T: GcRef> Rooted<T> {
pub(crate) fn new(store: &mut AutoAssertNoGc<'_>, gc_ref: VMGcRef) -> Rooted<T> {
let id = store.id();
let roots = store.gc_roots_mut();
let inner = roots.push_lifo_root(id, gc_ref);
Rooted {
inner,
_phantom: marker::PhantomData,
}
}
#[inline]
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
debug_assert!(self.inner.index.is_lifo());
self.inner.comes_from_same_store(store)
}
pub fn to_manually_rooted(&self, mut store: impl AsContextMut) -> Result<ManuallyRooted<T>> {
self._to_manually_rooted(store.as_context_mut().0)
}
pub(crate) fn _to_manually_rooted(&self, store: &mut StoreOpaque) -> Result<ManuallyRooted<T>> {
let mut store = AutoAssertNoGc::new(store);
let gc_ref = self.try_clone_gc_ref(&mut store)?;
Ok(ManuallyRooted::new(&mut store, gc_ref))
}
pub fn rooted_eq(a: Self, b: Self) -> bool {
a.inner == b.inner
}
pub fn ref_eq(
store: impl AsContext,
a: &impl RootedGcRef<T>,
b: &impl RootedGcRef<T>,
) -> Result<bool> {
let store = store.as_context().0;
let a = a.try_gc_ref(store)?;
let b = b.try_gc_ref(store)?;
Ok(a == b)
}
pub fn rooted_hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.inner.hash(state);
}
pub fn ref_hash<H>(&self, store: impl AsContext, state: &mut H) -> Result<()>
where
H: Hasher,
{
let gc_ref = self.try_gc_ref(store.as_context().0)?;
gc_ref.hash(state);
Ok(())
}
}
pub struct RootScope<C>
where
C: AsContextMut,
{
store: C,
scope: usize,
}
impl<C> Drop for RootScope<C>
where
C: AsContextMut,
{
fn drop(&mut self) {
self.store.as_context_mut().0.exit_gc_lifo_scope(self.scope);
}
}
impl<C> RootScope<C>
where
C: AsContextMut,
{
pub fn new(store: C) -> Self {
let scope = store.as_context().0.gc_roots().enter_lifo_scope();
RootScope { store, scope }
}
fn gc_roots(&mut self) -> &mut RootSet {
self.store.as_context_mut().0.gc_roots_mut()
}
fn lifo_roots(&mut self) -> &mut Vec<LifoRoot> {
&mut self.gc_roots().lifo_roots
}
pub fn reserve(&mut self, additional: usize) {
self.lifo_roots().reserve(additional);
}
}
impl<T> AsContext for RootScope<T>
where
T: AsContextMut,
{
type Data = T::Data;
fn as_context(&self) -> crate::StoreContext<'_, Self::Data> {
self.store.as_context()
}
}
impl<T> AsContextMut for RootScope<T>
where
T: AsContextMut,
{
fn as_context_mut(&mut self) -> crate::StoreContextMut<'_, Self::Data> {
self.store.as_context_mut()
}
}
#[repr(transparent)] pub struct ManuallyRooted<T>
where
T: GcRef,
{
inner: GcRootIndex,
_phantom: marker::PhantomData<T>,
}
const _: () = {
use crate::{AnyRef, ExternRef};
assert!(mem::size_of::<ManuallyRooted<AnyRef>>() == 16);
assert!(mem::align_of::<ManuallyRooted<AnyRef>>() == 8);
assert!(mem::size_of::<ManuallyRooted<ExternRef>>() == 16);
assert!(mem::align_of::<ManuallyRooted<ExternRef>>() == 8);
};
impl<T: GcRef> Debug for ManuallyRooted<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let name = format!("ManuallyRooted<{}>", any::type_name::<T>());
f.debug_struct(&name).field("inner", &self.inner).finish()
}
}
impl<T: GcRef> Deref for ManuallyRooted<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
T::transmute_ref(&self.inner)
}
}
impl<T> ManuallyRooted<T>
where
T: GcRef,
{
pub(crate) fn new(store: &mut AutoAssertNoGc<'_>, gc_ref: VMGcRef) -> Self {
let id = store.gc_roots_mut().manually_rooted.alloc(gc_ref);
ManuallyRooted {
inner: GcRootIndex {
store_id: store.id(),
generation: 0,
index: PackedIndex::new_manual(id),
},
_phantom: marker::PhantomData,
}
}
#[inline]
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
debug_assert!(self.inner.index.is_manual());
self.inner.comes_from_same_store(store)
}
pub fn clone(&self, mut store: impl AsContextMut) -> Self {
self._clone(store.as_context_mut().0)
}
pub(crate) fn _clone(&self, store: &mut StoreOpaque) -> Self {
let mut store = AutoAssertNoGc::new(store);
let gc_ref = self
.clone_gc_ref(&mut store)
.expect("ManuallyRooted always has a gc ref");
Self::new(&mut store, gc_ref)
}
pub fn unroot(self, mut store: impl AsContextMut) {
self._unroot(store.as_context_mut().0)
}
pub(crate) fn _unroot(self, store: &mut StoreOpaque) {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
let mut store = AutoAssertNoGc::new(store);
let id = self.inner.index.as_manual().unwrap();
let roots = store.gc_roots_mut();
let gc_ref = roots.manually_rooted.dealloc(id);
store.unwrap_gc_store_mut().drop_gc_ref(gc_ref);
}
pub fn to_rooted(&self, mut context: impl AsContextMut) -> Rooted<T> {
self._to_rooted(context.as_context_mut().0)
}
pub(crate) fn _to_rooted(&self, store: &mut StoreOpaque) -> Rooted<T> {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
let mut store = AutoAssertNoGc::new(store);
let gc_ref = self.clone_gc_ref(&mut store).unwrap();
Rooted::new(&mut store, gc_ref)
}
pub fn into_rooted(self, mut context: impl AsContextMut) -> Rooted<T> {
self._into_rooted(context.as_context_mut().0)
}
pub(crate) fn _into_rooted(self, store: &mut StoreOpaque) -> Rooted<T> {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
let rooted = self._to_rooted(store);
self._unroot(store);
rooted
}
pub fn ref_eq(
store: impl AsContext,
a: &impl RootedGcRef<T>,
b: &impl RootedGcRef<T>,
) -> Result<bool> {
Rooted::ref_eq(store, a, b)
}
pub fn rooted_hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.inner.hash(state);
}
pub fn ref_hash<H>(&self, store: impl AsContext, state: &mut H)
where
H: Hasher,
{
let gc_ref = self
.get_gc_ref(store.as_context().0)
.expect("ManuallyRooted's get_gc_ref is infallible");
gc_ref.hash(state);
}
#[doc(hidden)]
pub fn into_parts_for_c_api(self) -> (NonZeroU64, u32, u32) {
(
self.inner.store_id.as_raw(),
self.inner.generation,
self.inner.index.0,
)
}
#[doc(hidden)]
pub unsafe fn from_raw_parts_for_c_api(a: NonZeroU64, b: u32, c: u32) -> ManuallyRooted<T> {
ManuallyRooted {
inner: GcRootIndex {
store_id: StoreId::from_raw(a),
generation: b,
index: PackedIndex(c),
},
_phantom: marker::PhantomData,
}
}
}
impl<T: GcRef> RootedGcRefImpl<T> for ManuallyRooted<T> {
fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef> {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
let id = self.inner.index.as_manual().unwrap();
store.gc_roots().manually_rooted.get(id)
}
}
#[cfg(test)]
mod tests {
use crate::ExternRef;
use super::*;
#[test]
fn sizes() {
assert_eq!(std::mem::size_of::<Rooted<ExternRef>>(), 16);
assert_eq!(std::mem::size_of::<ManuallyRooted<ExternRef>>(), 16);
}
}