use crate::runtime::vm::{GcRootsList, GcStore, VMGcRef};
use crate::{
AsContext, AsContextMut, GcRef, Result, RootedGcRef,
error::OutOfMemory,
store::{AsStoreOpaque, AutoAssertNoGc, StoreId, StoreOpaque},
};
use crate::{ValRaw, prelude::*};
use alloc::sync::{Arc, Weak};
use core::any;
use core::marker;
use core::mem::{self, MaybeUninit};
use core::num::{NonZeroU64, NonZeroUsize};
use core::{
fmt::{self, Debug},
hash::{Hash, Hasher},
ops::{Deref, DerefMut},
};
use wasmtime_core::slab::{Id as SlabId, Slab};
mod sealed {
use super::*;
pub unsafe trait GcRefImpl: Sized {
fn transmute_ref(index: &GcRootIndex) -> &Self;
}
pub trait RootedGcRefImpl<T: GcRef> {
fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef>;
fn try_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Result<&'a VMGcRef> {
self.get_gc_ref(store).ok_or_else(|| {
format_err!("attempted to use a garbage-collected object that has been unrooted")
})
}
fn clone_gc_ref(&self, store: &mut AutoAssertNoGc<'_>) -> Option<VMGcRef> {
let gc_ref = self.get_gc_ref(store)?.unchecked_copy();
Some(store.clone_gc_ref(&gc_ref))
}
fn try_clone_gc_ref(&self, store: &mut AutoAssertNoGc<'_>) -> Result<VMGcRef> {
let gc_ref = self.try_gc_ref(store)?.unchecked_copy();
Ok(store.clone_gc_ref(&gc_ref))
}
}
}
pub(crate) use sealed::*;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[doc(hidden)]
#[repr(C)] pub struct GcRootIndex {
store_id: StoreId,
generation: u32,
index: PackedIndex,
}
const _: () = {
assert!(mem::size_of::<GcRootIndex>() == 16);
assert!(mem::align_of::<GcRootIndex>() == mem::align_of::<u64>());
};
impl GcRootIndex {
#[inline]
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
self.store_id == store.id()
}
pub(crate) fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef> {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
if let Some(index) = self.index.as_lifo() {
let entry = store.gc_roots().lifo_roots.get(index)?;
if entry.generation == self.generation {
Some(&entry.gc_ref)
} else {
None
}
} else if let Some(id) = self.index.as_owned() {
let gc_ref = store.gc_roots().owned_rooted.get(id);
debug_assert!(gc_ref.is_some());
gc_ref
} else {
unreachable!()
}
}
pub(crate) fn try_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Result<&'a VMGcRef> {
self.get_gc_ref(store).ok_or_else(|| {
format_err!("attempted to use a garbage-collected object that has been unrooted")
})
}
pub(crate) fn try_clone_gc_ref(&self, store: &mut AutoAssertNoGc<'_>) -> Result<VMGcRef> {
let gc_ref = self.try_gc_ref(store)?.unchecked_copy();
Ok(store.clone_gc_ref(&gc_ref))
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
#[repr(transparent)]
struct PackedIndex(u32);
impl Debug for PackedIndex {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(index) = self.as_lifo() {
f.debug_tuple("PackedIndex::Lifo").field(&index).finish()
} else if let Some(id) = self.as_owned() {
f.debug_tuple("PackedIndex::Owned").field(&id).finish()
} else {
unreachable!()
}
}
}
impl PackedIndex {
const DISCRIMINANT_MASK: u32 = 0b1 << 31;
const LIFO_DISCRIMINANT: u32 = 0b0 << 31;
const OWNED_DISCRIMINANT: u32 = 0b1 << 31;
const PAYLOAD_MASK: u32 = !Self::DISCRIMINANT_MASK;
fn new_lifo(index: usize) -> PackedIndex {
let index32 = u32::try_from(index).unwrap();
assert_eq!(index32 & Self::DISCRIMINANT_MASK, 0);
let packed = PackedIndex(Self::LIFO_DISCRIMINANT | index32);
debug_assert!(packed.is_lifo());
debug_assert_eq!(packed.as_lifo(), Some(index));
debug_assert!(!packed.is_owned());
debug_assert!(packed.as_owned().is_none());
packed
}
fn new_owned(id: SlabId) -> PackedIndex {
let raw = id.into_raw();
assert_eq!(raw & Self::DISCRIMINANT_MASK, 0);
let packed = PackedIndex(Self::OWNED_DISCRIMINANT | raw);
debug_assert!(packed.is_owned());
debug_assert_eq!(packed.as_owned(), Some(id));
debug_assert!(!packed.is_lifo());
debug_assert!(packed.as_lifo().is_none());
packed
}
fn discriminant(&self) -> u32 {
self.0 & Self::DISCRIMINANT_MASK
}
fn is_lifo(&self) -> bool {
self.discriminant() == Self::LIFO_DISCRIMINANT
}
fn is_owned(&self) -> bool {
self.discriminant() == Self::OWNED_DISCRIMINANT
}
fn payload(&self) -> u32 {
self.0 & Self::PAYLOAD_MASK
}
fn as_lifo(&self) -> Option<usize> {
if self.is_lifo() {
Some(usize::try_from(self.payload()).unwrap())
} else {
None
}
}
fn as_owned(&self) -> Option<SlabId> {
if self.is_owned() {
Some(SlabId::from_raw(self.payload()))
} else {
None
}
}
}
#[derive(Debug, Default)]
pub(crate) struct RootSet {
owned_rooted: Slab<VMGcRef>,
liveness_flags: Vec<(Weak<()>, SlabId)>,
liveness_trim_high_water: Option<NonZeroUsize>,
lifo_roots: Vec<LifoRoot>,
lifo_generation: u32,
}
#[derive(Debug)]
struct LifoRoot {
generation: u32,
gc_ref: VMGcRef,
}
impl RootSet {
pub(crate) fn trace_roots(&mut self, gc_roots_list: &mut GcRootsList) {
log::trace!("Begin trace user LIFO roots");
for root in &mut self.lifo_roots {
unsafe {
gc_roots_list.add_root((&mut root.gc_ref).into(), "user LIFO root");
}
}
log::trace!("End trace user LIFO roots");
log::trace!("Begin trace user owned roots");
for (_id, root) in self.owned_rooted.iter_mut() {
unsafe {
gc_roots_list.add_root(root.into(), "user owned root");
}
}
log::trace!("End trace user owned roots");
}
#[inline]
pub(crate) fn enter_lifo_scope(&self) -> usize {
self.lifo_roots.len()
}
#[inline]
pub(crate) fn exit_lifo_scope(&mut self, gc_store: Option<&mut GcStore>, scope: usize) {
debug_assert!(self.lifo_roots.len() >= scope);
if self.lifo_roots.len() > scope {
self.exit_lifo_scope_slow(gc_store, scope);
}
}
#[inline(never)]
#[cold]
fn exit_lifo_scope_slow(&mut self, mut gc_store: Option<&mut GcStore>, scope: usize) {
self.lifo_generation += 1;
let mut lifo_roots = mem::take(&mut self.lifo_roots);
for root in lifo_roots.drain(scope..) {
if let Some(gc_store) = &mut gc_store {
gc_store.drop_gc_ref(root.gc_ref);
}
}
self.lifo_roots = lifo_roots;
}
pub(crate) fn with_lifo_scope<S, T>(store: &mut S, f: impl FnOnce(&mut S) -> T) -> T
where
S: ?Sized + DerefMut<Target = StoreOpaque>,
{
let scope = store.gc_roots().enter_lifo_scope();
let ret = f(store);
store.exit_gc_lifo_scope(scope);
ret
}
pub(crate) fn push_lifo_root(&mut self, store_id: StoreId, gc_ref: VMGcRef) -> GcRootIndex {
let generation = self.lifo_generation;
let index = self.lifo_roots.len();
let index = PackedIndex::new_lifo(index);
self.lifo_roots.push(LifoRoot { generation, gc_ref });
GcRootIndex {
store_id,
generation,
index,
}
}
pub(crate) fn trim_liveness_flags(&mut self, gc_store: &mut GcStore, eager: bool) {
const DEFAULT_HIGH_WATER: usize = 8;
const GROWTH_FACTOR: usize = 2;
let high_water_mark = self
.liveness_trim_high_water
.map(|x| x.get())
.unwrap_or(DEFAULT_HIGH_WATER);
if !eager && self.liveness_flags.len() < high_water_mark {
return;
}
self.liveness_flags.retain(|(flag, index)| {
if flag.strong_count() == 0 {
let gc_ref = self.owned_rooted.dealloc(*index);
gc_store.drop_gc_ref(gc_ref);
false
} else {
true
}
});
let post_trim_len = self.liveness_flags.len();
let high_water_mark = core::cmp::max(
DEFAULT_HIGH_WATER,
post_trim_len.saturating_mul(GROWTH_FACTOR),
);
self.liveness_trim_high_water = Some(NonZeroUsize::new(high_water_mark).unwrap());
}
}
#[repr(transparent)]
pub struct Rooted<T: GcRef> {
inner: GcRootIndex,
_phantom: marker::PhantomData<T>,
}
impl<T: GcRef> Clone for Rooted<T> {
fn clone(&self) -> Self {
Rooted {
inner: self.inner,
_phantom: marker::PhantomData,
}
}
}
impl<T: GcRef> Copy for Rooted<T> {}
impl<T: GcRef> Debug for Rooted<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let name = format!("Rooted<{}>", any::type_name::<T>());
f.debug_struct(&name).field("inner", &self.inner).finish()
}
}
impl<T: GcRef> RootedGcRefImpl<T> for Rooted<T> {
fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef> {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
let index = self.inner.index.as_lifo().unwrap();
let entry = store.gc_roots().lifo_roots.get(index)?;
if entry.generation == self.inner.generation {
Some(&entry.gc_ref)
} else {
None
}
}
}
impl<T: GcRef> Deref for Rooted<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
T::transmute_ref(&self.inner)
}
}
impl<T: GcRef> Rooted<T> {
pub(crate) fn new(store: &mut AutoAssertNoGc<'_>, gc_ref: VMGcRef) -> Rooted<T> {
let id = store.id();
let roots = store.gc_roots_mut();
let inner = roots.push_lifo_root(id, gc_ref);
Rooted {
inner,
_phantom: marker::PhantomData,
}
}
pub(crate) fn from_gc_root_index(inner: GcRootIndex) -> Rooted<T> {
debug_assert!(inner.index.is_lifo());
Rooted {
inner,
_phantom: marker::PhantomData,
}
}
#[inline]
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
debug_assert!(self.inner.index.is_lifo());
self.inner.comes_from_same_store(store)
}
pub fn to_owned_rooted(&self, mut store: impl AsContextMut) -> Result<OwnedRooted<T>> {
self._to_owned_rooted(store.as_context_mut().0)
}
pub(crate) fn _to_owned_rooted(&self, store: &mut StoreOpaque) -> Result<OwnedRooted<T>> {
let mut store = AutoAssertNoGc::new(store);
let gc_ref = self.try_clone_gc_ref(&mut store)?;
Ok(OwnedRooted::new(&mut store, gc_ref)?)
}
pub fn rooted_eq(a: Self, b: Self) -> bool {
a.inner == b.inner
}
pub fn ref_eq(
store: impl AsContext,
a: &impl RootedGcRef<T>,
b: &impl RootedGcRef<T>,
) -> Result<bool> {
let store = store.as_context().0;
Self::_ref_eq(store, a, b)
}
pub(crate) fn _ref_eq(
store: &StoreOpaque,
a: &impl RootedGcRef<T>,
b: &impl RootedGcRef<T>,
) -> Result<bool> {
let a = a.try_gc_ref(store)?;
let b = b.try_gc_ref(store)?;
Ok(a == b)
}
pub fn rooted_hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.inner.hash(state);
}
pub fn ref_hash<H>(&self, store: impl AsContext, state: &mut H) -> Result<()>
where
H: Hasher,
{
let gc_ref = self.try_gc_ref(store.as_context().0)?;
gc_ref.hash(state);
Ok(())
}
pub(crate) fn unchecked_cast<U: GcRef>(self) -> Rooted<U> {
Rooted::from_gc_root_index(self.inner)
}
pub(super) fn wasm_ty_store(
self,
store: &mut AutoAssertNoGc<'_>,
ptr: &mut MaybeUninit<ValRaw>,
val_raw: impl Fn(u32) -> ValRaw,
) -> Result<()> {
let gc_ref = self.inner.try_clone_gc_ref(store)?;
let raw = match store.optional_gc_store_mut() {
Some(s) => s.expose_gc_ref_to_wasm(gc_ref),
None => {
debug_assert!(gc_ref.is_i31());
gc_ref.as_raw_non_zero_u32()
}
};
ptr.write(val_raw(raw.get()));
Ok(())
}
pub(super) fn wasm_ty_load(
store: &mut AutoAssertNoGc<'_>,
raw_gc_ref: u32,
from_cloned_gc_ref: impl Fn(&mut AutoAssertNoGc<'_>, VMGcRef) -> Self,
) -> Self {
debug_assert_ne!(raw_gc_ref, 0);
let gc_ref = VMGcRef::from_raw_u32(raw_gc_ref).expect("non-null");
let gc_ref = match store.optional_gc_store_mut() {
Some(s) => s.clone_gc_ref(&gc_ref),
None => {
debug_assert!(gc_ref.is_i31());
gc_ref.unchecked_copy()
}
};
from_cloned_gc_ref(store, gc_ref)
}
pub(super) fn wasm_ty_option_store(
me: Option<Self>,
store: &mut AutoAssertNoGc<'_>,
ptr: &mut MaybeUninit<ValRaw>,
val_raw: impl Fn(u32) -> ValRaw,
) -> Result<()> {
match me {
Some(me) => me.wasm_ty_store(store, ptr, val_raw),
None => {
ptr.write(val_raw(0));
Ok(())
}
}
}
pub(super) fn wasm_ty_option_load(
store: &mut AutoAssertNoGc<'_>,
raw_gc_ref: u32,
from_cloned_gc_ref: impl Fn(&mut AutoAssertNoGc<'_>, VMGcRef) -> Self,
) -> Option<Self> {
let gc_ref = VMGcRef::from_raw_u32(raw_gc_ref)?;
let gc_ref = store.clone_gc_ref(&gc_ref);
Some(from_cloned_gc_ref(store, gc_ref))
}
}
pub struct RootScope<C>
where
C: AsContextMut,
{
store: C,
scope: usize,
}
impl<C> Drop for RootScope<C>
where
C: AsContextMut,
{
fn drop(&mut self) {
self.store.as_context_mut().0.exit_gc_lifo_scope(self.scope);
}
}
impl<C> RootScope<C>
where
C: AsContextMut,
{
pub fn new(store: C) -> Self {
let scope = store.as_context().0.gc_roots().enter_lifo_scope();
RootScope { store, scope }
}
fn gc_roots(&mut self) -> &mut RootSet {
self.store.as_context_mut().0.gc_roots_mut()
}
fn lifo_roots(&mut self) -> &mut Vec<LifoRoot> {
&mut self.gc_roots().lifo_roots
}
pub fn reserve(&mut self, additional: usize) {
self.lifo_roots().reserve(additional);
}
}
impl<T> AsContext for RootScope<T>
where
T: AsContextMut,
{
type Data = T::Data;
fn as_context(&self) -> crate::StoreContext<'_, Self::Data> {
self.store.as_context()
}
}
impl<T> AsContextMut for RootScope<T>
where
T: AsContextMut,
{
fn as_context_mut(&mut self) -> crate::StoreContextMut<'_, Self::Data> {
self.store.as_context_mut()
}
}
pub(crate) struct OpaqueRootScope<S>
where
S: AsStoreOpaque,
{
store: S,
scope: usize,
}
impl<S> Drop for OpaqueRootScope<S>
where
S: AsStoreOpaque,
{
fn drop(&mut self) {
self.store.as_store_opaque().exit_gc_lifo_scope(self.scope);
}
}
impl<S> Deref for OpaqueRootScope<S>
where
S: AsStoreOpaque,
{
type Target = S;
fn deref(&self) -> &Self::Target {
&self.store
}
}
impl<S> DerefMut for OpaqueRootScope<S>
where
S: AsStoreOpaque,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.store
}
}
impl<S> OpaqueRootScope<S>
where
S: AsStoreOpaque,
{
pub(crate) fn new(mut store: S) -> Self {
let scope = store.as_store_opaque().gc_roots().enter_lifo_scope();
OpaqueRootScope { store, scope }
}
}
pub struct OwnedRooted<T>
where
T: GcRef,
{
inner: GcRootIndex,
liveness_flag: Arc<()>,
_phantom: marker::PhantomData<T>,
}
const _: () = {
use crate::{AnyRef, ExternRef};
assert!(
mem::size_of::<OwnedRooted<AnyRef>>() >= 16 && mem::size_of::<OwnedRooted<AnyRef>>() <= 24
);
assert!(mem::align_of::<OwnedRooted<AnyRef>>() == mem::align_of::<u64>());
assert!(
mem::size_of::<OwnedRooted<ExternRef>>() >= 16
&& mem::size_of::<OwnedRooted<ExternRef>>() <= 24
);
assert!(mem::align_of::<OwnedRooted<ExternRef>>() == mem::align_of::<u64>());
};
impl<T: GcRef> Debug for OwnedRooted<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let name = format!("OwnedRooted<{}>", any::type_name::<T>());
f.debug_struct(&name).field("inner", &self.inner).finish()
}
}
impl<T: GcRef> Deref for OwnedRooted<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
T::transmute_ref(&self.inner)
}
}
impl<T: GcRef> Clone for OwnedRooted<T> {
fn clone(&self) -> Self {
OwnedRooted {
inner: self.inner,
liveness_flag: self.liveness_flag.clone(),
_phantom: marker::PhantomData,
}
}
}
impl<T> OwnedRooted<T>
where
T: GcRef,
{
pub(crate) fn new(
store: &mut AutoAssertNoGc<'_>,
gc_ref: VMGcRef,
) -> Result<Self, OutOfMemory> {
store.trim_gc_liveness_flags(false);
let roots = store.gc_roots_mut();
let id = roots.owned_rooted.alloc(gc_ref)?;
let liveness_flag = Arc::new(());
roots
.liveness_flags
.push((Arc::downgrade(&liveness_flag), id));
Ok(OwnedRooted {
inner: GcRootIndex {
store_id: store.id(),
generation: 0,
index: PackedIndex::new_owned(id),
},
liveness_flag,
_phantom: marker::PhantomData,
})
}
#[inline]
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
debug_assert!(self.inner.index.is_owned());
self.inner.comes_from_same_store(store)
}
pub fn to_rooted(&self, mut context: impl AsContextMut) -> Rooted<T> {
self._to_rooted(context.as_context_mut().0)
}
pub(crate) fn _to_rooted(&self, store: &mut StoreOpaque) -> Rooted<T> {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
let mut store = AutoAssertNoGc::new(store);
let gc_ref = self.clone_gc_ref(&mut store).unwrap();
Rooted::new(&mut store, gc_ref)
}
pub fn ref_eq(
store: impl AsContext,
a: &impl RootedGcRef<T>,
b: &impl RootedGcRef<T>,
) -> Result<bool> {
Rooted::ref_eq(store, a, b)
}
pub fn rooted_hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.inner.hash(state);
}
pub fn ref_hash<H>(&self, store: impl AsContext, state: &mut H)
where
H: Hasher,
{
let gc_ref = self
.get_gc_ref(store.as_context().0)
.expect("OwnedRooted's get_gc_ref is infallible");
gc_ref.hash(state);
}
pub(crate) fn unchecked_cast<U: GcRef>(self) -> OwnedRooted<U> {
OwnedRooted {
inner: self.inner,
liveness_flag: self.liveness_flag,
_phantom: core::marker::PhantomData,
}
}
pub(super) fn wasm_ty_store(
self,
store: &mut AutoAssertNoGc<'_>,
ptr: &mut MaybeUninit<ValRaw>,
val_raw: impl Fn(u32) -> ValRaw,
) -> Result<()> {
let gc_ref = self.try_clone_gc_ref(store)?;
let raw = match store.optional_gc_store_mut() {
Some(s) => s.expose_gc_ref_to_wasm(gc_ref),
None => {
debug_assert!(gc_ref.is_i31());
gc_ref.as_raw_non_zero_u32()
}
};
ptr.write(val_raw(raw.get()));
Ok(())
}
pub(super) fn wasm_ty_load(
store: &mut AutoAssertNoGc<'_>,
raw_gc_ref: u32,
from_cloned_gc_ref: impl Fn(&mut AutoAssertNoGc<'_>, VMGcRef) -> Rooted<T>,
) -> Self {
debug_assert_ne!(raw_gc_ref, 0);
let gc_ref = VMGcRef::from_raw_u32(raw_gc_ref).expect("non-null");
let gc_ref = store.clone_gc_ref(&gc_ref);
RootSet::with_lifo_scope(store, |store| {
let rooted = from_cloned_gc_ref(store, gc_ref);
rooted._to_owned_rooted(store).expect("rooted is in scope")
})
}
pub(super) fn wasm_ty_option_store(
me: Option<Self>,
store: &mut AutoAssertNoGc<'_>,
ptr: &mut MaybeUninit<ValRaw>,
val_raw: impl Fn(u32) -> ValRaw,
) -> Result<()> {
match me {
Some(me) => me.wasm_ty_store(store, ptr, val_raw),
None => {
ptr.write(val_raw(0));
Ok(())
}
}
}
pub(super) fn wasm_ty_option_load(
store: &mut AutoAssertNoGc<'_>,
raw_gc_ref: u32,
from_cloned_gc_ref: impl Fn(&mut AutoAssertNoGc<'_>, VMGcRef) -> Rooted<T>,
) -> Option<Self> {
let gc_ref = VMGcRef::from_raw_u32(raw_gc_ref)?;
let gc_ref = store.clone_gc_ref(&gc_ref);
RootSet::with_lifo_scope(store, |store| {
let rooted = from_cloned_gc_ref(store, gc_ref);
Some(rooted._to_owned_rooted(store).expect("rooted is in scope"))
})
}
#[doc(hidden)]
pub fn into_parts_for_c_api(self) -> (NonZeroU64, u32, u32, *const ()) {
(
self.inner.store_id.as_raw(),
self.inner.generation,
self.inner.index.0,
Arc::into_raw(self.liveness_flag),
)
}
#[doc(hidden)]
pub unsafe fn from_borrowed_raw_parts_for_c_api(
a: NonZeroU64,
b: u32,
c: u32,
d: *const (),
) -> OwnedRooted<T> {
let liveness_flag = {
let original = unsafe { Arc::from_raw(d) };
let clone = original.clone();
core::mem::forget(original);
clone
};
OwnedRooted {
inner: GcRootIndex {
store_id: StoreId::from_raw(a),
generation: b,
index: PackedIndex(c),
},
liveness_flag,
_phantom: marker::PhantomData,
}
}
#[doc(hidden)]
pub unsafe fn from_owned_raw_parts_for_c_api(
a: NonZeroU64,
b: u32,
c: u32,
d: *const (),
) -> OwnedRooted<T> {
let liveness_flag = unsafe { Arc::from_raw(d) };
OwnedRooted {
inner: GcRootIndex {
store_id: StoreId::from_raw(a),
generation: b,
index: PackedIndex(c),
},
liveness_flag,
_phantom: marker::PhantomData,
}
}
}
impl<T: GcRef> RootedGcRefImpl<T> for OwnedRooted<T> {
fn get_gc_ref<'a>(&self, store: &'a StoreOpaque) -> Option<&'a VMGcRef> {
assert!(
self.comes_from_same_store(store),
"object used with wrong store"
);
let id = self.inner.index.as_owned().unwrap();
store.gc_roots().owned_rooted.get(id)
}
}
#[cfg(test)]
mod tests {
use crate::ExternRef;
use super::*;
#[test]
fn sizes() {
assert_eq!(std::mem::size_of::<Rooted<ExternRef>>(), 16);
assert!(std::mem::size_of::<OwnedRooted<ExternRef>>() <= 24);
}
}