use crate::Engine;
use crate::hash_set::HashSet;
use crate::prelude::*;
use crate::sync::RwLock;
use crate::vm::GcRuntime;
use alloc::borrow::Cow;
use alloc::sync::Arc;
use core::iter;
use core::{
borrow::Borrow,
fmt::{self, Debug},
hash::{Hash, Hasher},
ops::Range,
sync::atomic::{
AtomicBool, AtomicUsize,
Ordering::{AcqRel, Acquire, Release},
},
};
use wasmtime_environ::{
EngineOrModuleTypeIndex, GcLayout, ModuleInternedTypeIndex, ModuleTypes, PrimaryMap,
SecondaryMap, TypeTrace, VMSharedTypeIndex, WasmRecGroup, WasmSubType, iter_entity_range,
packed_option::{PackedOption, ReservedValue},
};
use wasmtime_slab::{Id as SlabId, Slab};
pub struct TypeCollection {
engine: Engine,
rec_groups: Vec<RecGroupEntry>,
types: PrimaryMap<ModuleInternedTypeIndex, VMSharedTypeIndex>,
trampolines: SecondaryMap<VMSharedTypeIndex, PackedOption<ModuleInternedTypeIndex>>,
}
impl Debug for TypeCollection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let TypeCollection {
engine: _,
rec_groups,
types,
trampolines,
} = self;
f.debug_struct("TypeCollection")
.field("rec_groups", rec_groups)
.field("types", types)
.field("trampolines", trampolines)
.finish_non_exhaustive()
}
}
impl Engine {
#[must_use = "types are only registered as long as the `TypeCollection` is live"]
pub(crate) fn register_and_canonicalize_types<'a, I>(
&self,
module_types: &mut ModuleTypes,
env_modules: I,
) -> TypeCollection
where
I: IntoIterator<Item = &'a mut wasmtime_environ::Module>,
I::IntoIter: ExactSizeIterator,
{
if cfg!(debug_assertions) {
module_types
.trace(&mut |idx| match idx {
EngineOrModuleTypeIndex::Module(_) => Ok(()),
EngineOrModuleTypeIndex::Engine(_) | EngineOrModuleTypeIndex::RecGroup(_) => {
Err(idx)
}
})
.expect("should only have module type indices");
}
let engine = self.clone();
let registry = engine.signatures();
let gc_runtime = engine.gc_runtime().map(|rt| &**rt);
let (rec_groups, types) = registry
.0
.write()
.register_module_types(gc_runtime, module_types);
let mut trampolines = SecondaryMap::with_capacity(types.len());
for (module_ty, module_trampoline_ty) in module_types.trampoline_types() {
let shared_ty = types[module_ty];
let trampoline_shared_ty = registry.trampoline_type(shared_ty);
trampolines[trampoline_shared_ty] = Some(module_trampoline_ty).into();
}
module_types.canonicalize_for_runtime_usage(&mut |idx| types[idx]);
for module in env_modules {
module.canonicalize_for_runtime_usage(&mut |idx| types[idx]);
}
TypeCollection {
engine,
rec_groups,
types,
trampolines,
}
}
}
impl TypeCollection {
pub fn as_module_map(&self) -> &PrimaryMap<ModuleInternedTypeIndex, VMSharedTypeIndex> {
&self.types
}
#[inline]
pub fn shared_type(&self, index: ModuleInternedTypeIndex) -> Option<VMSharedTypeIndex> {
let shared_ty = self.types.get(index).copied();
log::trace!("TypeCollection::shared_type({index:?}) -> {shared_ty:?}");
shared_ty
}
#[inline]
pub fn trampoline_type(&self, ty: VMSharedTypeIndex) -> Option<ModuleInternedTypeIndex> {
let trampoline_ty = self.trampolines[ty].expand();
log::trace!("TypeCollection::trampoline_type({ty:?}) -> {trampoline_ty:?}");
trampoline_ty
}
}
impl Drop for TypeCollection {
fn drop(&mut self) {
if !self.rec_groups.is_empty() {
self.engine
.signatures()
.0
.write()
.unregister_type_collection(self);
}
}
}
#[inline]
fn shared_type_index_to_slab_id(index: VMSharedTypeIndex) -> SlabId {
assert!(!index.is_reserved_value());
SlabId::from_raw(index.bits())
}
#[inline]
fn slab_id_to_shared_type_index(id: SlabId) -> VMSharedTypeIndex {
let index = VMSharedTypeIndex::new(id.into_raw());
assert!(!index.is_reserved_value());
index
}
pub struct RegisteredType {
engine: Engine,
entry: RecGroupEntry,
ty: Arc<WasmSubType>,
index: VMSharedTypeIndex,
layout: Option<GcLayout>,
}
impl Debug for RegisteredType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let RegisteredType {
engine: _,
entry: _,
ty,
index,
layout,
} = self;
f.debug_struct("RegisteredType")
.field("index", index)
.field("ty", ty)
.field("layout", layout)
.finish_non_exhaustive()
}
}
impl Clone for RegisteredType {
fn clone(&self) -> Self {
self.engine.signatures().debug_assert_contains(self.index);
self.entry.incref("RegisteredType::clone");
RegisteredType {
engine: self.engine.clone(),
entry: self.entry.clone(),
ty: self.ty.clone(),
index: self.index,
layout: self.layout.clone(),
}
}
}
impl Drop for RegisteredType {
fn drop(&mut self) {
self.engine.signatures().debug_assert_contains(self.index);
if self.entry.decref("RegisteredType::drop") {
self.engine
.signatures()
.0
.write()
.unregister_entry(self.entry.clone());
}
}
}
impl core::ops::Deref for RegisteredType {
type Target = WasmSubType;
fn deref(&self) -> &Self::Target {
&self.ty
}
}
impl PartialEq for RegisteredType {
fn eq(&self, other: &Self) -> bool {
self.engine.signatures().debug_assert_contains(self.index);
other.engine.signatures().debug_assert_contains(other.index);
let eq = self.index == other.index && Engine::same(&self.engine, &other.engine);
if cfg!(debug_assertions) && eq {
assert!(Arc::ptr_eq(&self.entry.0, &other.entry.0));
assert_eq!(self.ty, other.ty);
}
eq
}
}
impl Eq for RegisteredType {}
impl Hash for RegisteredType {
fn hash<H: Hasher>(&self, state: &mut H) {
self.engine.signatures().debug_assert_contains(self.index);
let ptr = Arc::as_ptr(&self.entry.0);
ptr.hash(state);
}
}
impl RegisteredType {
pub fn new(engine: &Engine, ty: WasmSubType) -> RegisteredType {
let (entry, index, ty, layout) = {
log::trace!("RegisteredType::new({ty:?})");
let gc_runtime = engine.gc_runtime().map(|rt| &**rt);
let mut inner = engine.signatures().0.write();
inner.assert_canonicalized_for_runtime_usage_in_this_registry(&ty);
let entry = inner.register_singleton_rec_group(gc_runtime, ty);
let index = entry.0.shared_type_indices[0];
let id = shared_type_index_to_slab_id(index);
let ty = inner.types[id].clone().unwrap();
let layout = inner.type_to_gc_layout.get(index).and_then(|l| l.clone());
(entry, index, ty, layout)
};
RegisteredType::from_parts(engine.clone(), entry, index, ty, layout)
}
pub fn root(engine: &Engine, index: VMSharedTypeIndex) -> RegisteredType {
engine.signatures().debug_assert_contains(index);
let (entry, ty, layout) = {
let id = shared_type_index_to_slab_id(index);
let inner = engine.signatures().0.read();
let ty = inner.types[id].clone().unwrap();
let entry = inner.type_to_rec_group[index].clone().unwrap();
let layout = inner.type_to_gc_layout.get(index).and_then(|l| l.clone());
entry.incref("RegisteredType::root");
(entry, ty, layout)
};
RegisteredType::from_parts(engine.clone(), entry, index, ty, layout)
}
fn from_parts(
engine: Engine,
entry: RecGroupEntry,
index: VMSharedTypeIndex,
ty: Arc<WasmSubType>,
layout: Option<GcLayout>,
) -> Self {
log::trace!(
"RegisteredType::from_parts({engine:?}, {entry:?}, {index:?}, {ty:?}, {layout:?})"
);
engine.signatures().debug_assert_contains(index);
debug_assert!(
entry.0.registrations.load(Acquire) != 0,
"entry should have a non-zero registration count"
);
RegisteredType {
engine,
entry,
ty,
index,
layout,
}
}
pub fn engine(&self) -> &Engine {
&self.engine
}
pub fn index(&self) -> VMSharedTypeIndex {
self.index
}
#[cfg(feature = "gc")]
pub fn layout(&self) -> Option<&GcLayout> {
self.layout.as_ref()
}
}
#[derive(Clone)]
struct RecGroupEntry(Arc<RecGroupEntryInner>);
impl Debug for RecGroupEntry {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct FormatAsPtr<'a, P>(&'a P);
impl<P: fmt::Pointer> Debug for FormatAsPtr<'_, P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:#p}", *self.0)
}
}
f.debug_tuple("RecGroupEntry")
.field(&FormatAsPtr(&self.0))
.finish()
}
}
struct RecGroupEntryInner {
hash_consing_key: WasmRecGroup,
shared_type_indices: Box<[VMSharedTypeIndex]>,
registrations: AtomicUsize,
unregistered: AtomicBool,
}
impl PartialEq for RecGroupEntry {
fn eq(&self, other: &Self) -> bool {
self.0.hash_consing_key == other.0.hash_consing_key
}
}
impl Eq for RecGroupEntry {}
impl Hash for RecGroupEntry {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash_consing_key.hash(state);
}
}
impl Borrow<WasmRecGroup> for RecGroupEntry {
#[inline]
fn borrow(&self) -> &WasmRecGroup {
&self.0.hash_consing_key
}
}
impl RecGroupEntry {
fn incref(&self, why: &str) {
let old_count = self.0.registrations.fetch_add(1, AcqRel);
log::trace!("incref({self:?}) -> count {}: {why}", old_count + 1);
}
#[must_use = "caller must remove entry from registry if `decref` returns `true`"]
fn decref(&self, why: &str) -> bool {
let old_count = self.0.registrations.fetch_sub(1, AcqRel);
debug_assert_ne!(old_count, 0);
log::trace!("decref({self:?}) -> count {}: {why}", old_count - 1);
old_count == 1
}
}
#[derive(Debug, Default)]
struct TypeRegistryInner {
hash_consing_map: HashSet<RecGroupEntry>,
types: Slab<Option<Arc<WasmSubType>>>,
type_to_rec_group: SecondaryMap<VMSharedTypeIndex, Option<RecGroupEntry>>,
type_to_supertypes: SecondaryMap<VMSharedTypeIndex, Option<Box<[VMSharedTypeIndex]>>>,
type_to_trampoline: SecondaryMap<VMSharedTypeIndex, PackedOption<VMSharedTypeIndex>>,
type_to_gc_layout: SecondaryMap<VMSharedTypeIndex, Option<GcLayout>>,
drop_stack: Vec<RecGroupEntry>,
}
impl TypeRegistryInner {
#[inline]
#[track_caller]
fn debug_assert_registered(&self, index: VMSharedTypeIndex) {
debug_assert!(
!index.is_reserved_value(),
"should have an actual VMSharedTypeIndex, not the reserved value"
);
debug_assert!(
self.types.contains(shared_type_index_to_slab_id(index)),
"registry's slab should contain {index:?}",
);
debug_assert!(
self.types[shared_type_index_to_slab_id(index)].is_some(),
"registry's slab should actually contain a type for {index:?}",
);
debug_assert!(
self.type_to_rec_group[index].is_some(),
"{index:?} should have an associated rec group entry"
);
}
#[inline]
#[track_caller]
fn debug_assert_all_registered(&self, indices: impl IntoIterator<Item = VMSharedTypeIndex>) {
if cfg!(debug_assertions) {
for index in indices {
self.debug_assert_registered(index);
}
}
}
fn register_module_types(
&mut self,
gc_runtime: Option<&dyn GcRuntime>,
types: &ModuleTypes,
) -> (
Vec<RecGroupEntry>,
PrimaryMap<ModuleInternedTypeIndex, VMSharedTypeIndex>,
) {
log::trace!("Start registering module types");
let mut entries = Vec::with_capacity(types.rec_groups().len());
let mut map = PrimaryMap::<ModuleInternedTypeIndex, VMSharedTypeIndex>::with_capacity(
types.wasm_types().len(),
);
for (_rec_group_index, module_group) in types.rec_groups() {
let entry = self.register_rec_group(
gc_runtime,
&map,
module_group.clone(),
iter_entity_range(module_group.clone()).map(|ty| types[ty].clone()),
);
for (module_ty, engine_ty) in
iter_entity_range(module_group).zip(entry.0.shared_type_indices.iter())
{
let module_ty2 = map.push(*engine_ty);
assert_eq!(module_ty, module_ty2);
}
entries.push(entry);
}
log::trace!("End registering module types");
(entries, map)
}
fn register_rec_group(
&mut self,
gc_runtime: Option<&dyn GcRuntime>,
map: &PrimaryMap<ModuleInternedTypeIndex, VMSharedTypeIndex>,
range: Range<ModuleInternedTypeIndex>,
types: impl ExactSizeIterator<Item = WasmSubType>,
) -> RecGroupEntry {
log::trace!("registering rec group of length {}", types.len());
debug_assert_eq!(iter_entity_range(range.clone()).len(), types.len());
let mut non_canon_types = Vec::with_capacity(types.len());
let hash_consing_key = WasmRecGroup {
types: types
.zip(iter_entity_range(range.clone()))
.map(|(mut ty, module_index)| {
non_canon_types.push((module_index, ty.clone()));
ty.canonicalize_for_hash_consing(range.clone(), &mut |idx| {
debug_assert!(idx < range.clone().start);
map[idx]
});
ty
})
.collect::<Box<[_]>>(),
};
if cfg!(debug_assertions) {
hash_consing_key
.trace_engine_indices::<_, ()>(&mut |index| Ok(self.debug_assert_registered(index)))
.unwrap();
}
if let Some(entry) = self.hash_consing_map.get(&hash_consing_key) {
log::trace!("hash-consing map hit: reusing {entry:?}");
assert_eq!(entry.0.unregistered.load(Acquire), false);
self.debug_assert_all_registered(entry.0.shared_type_indices.iter().copied());
entry.incref("hash-consing map hit");
return entry.clone();
}
log::trace!("hash-consing map miss: making new registration");
hash_consing_key
.trace_engine_indices::<_, ()>(&mut |index| {
self.debug_assert_registered(index);
let other_entry = self.type_to_rec_group[index].as_ref().unwrap();
assert_eq!(other_entry.0.unregistered.load(Acquire), false);
other_entry.incref("new rec group's type references");
Ok(())
})
.unwrap();
let module_rec_group_start = range.start;
let shared_type_indices: Box<[_]> = non_canon_types
.iter()
.map(|(module_index, ty)| {
let engine_index = slab_id_to_shared_type_index(self.types.alloc(None));
log::trace!(
"reserved {engine_index:?} for {module_index:?} = non-canonical {ty:?}"
);
engine_index
})
.collect();
for (engine_index, (module_index, mut ty)) in
shared_type_indices.iter().copied().zip(non_canon_types)
{
log::trace!("canonicalizing {engine_index:?} for runtime usage");
ty.canonicalize_for_runtime_usage(&mut |module_index| {
if module_index < module_rec_group_start {
let engine_index = map[module_index];
log::trace!(" cross-group {module_index:?} becomes {engine_index:?}");
self.debug_assert_registered(engine_index);
engine_index
} else {
assert!(module_index < range.end);
let rec_group_offset = module_index.as_u32() - module_rec_group_start.as_u32();
let rec_group_offset = usize::try_from(rec_group_offset).unwrap();
let engine_index = shared_type_indices[rec_group_offset];
log::trace!(" intra-group {module_index:?} becomes {engine_index:?}");
assert!(!engine_index.is_reserved_value());
assert!(
self.types
.contains(shared_type_index_to_slab_id(engine_index))
);
engine_index
}
});
self.insert_one_type_from_rec_group(gc_runtime, module_index, engine_index, ty);
}
if cfg!(debug_assertions) {
for index in &shared_type_indices {
let id = shared_type_index_to_slab_id(*index);
debug_assert!(self.types.contains(id));
debug_assert!(self.types[id].is_some());
}
}
debug_assert_eq!(
shared_type_indices.len(),
shared_type_indices
.iter()
.copied()
.collect::<crate::hash_set::HashSet<_>>()
.len(),
"should not have any duplicate type indices",
);
let entry = RecGroupEntry(Arc::new(RecGroupEntryInner {
hash_consing_key,
shared_type_indices,
registrations: AtomicUsize::new(1),
unregistered: AtomicBool::new(false),
}));
log::trace!("new {entry:?} -> count 1");
let is_new_entry = self.hash_consing_map.insert(entry.clone());
debug_assert!(is_new_entry);
for ty in entry.0.shared_type_indices.iter().copied() {
debug_assert!(self.type_to_rec_group[ty].is_none());
self.type_to_rec_group[ty] = Some(entry.clone());
}
self.debug_assert_all_registered(entry.0.shared_type_indices.iter().copied());
for shared_type_index in entry.0.shared_type_indices.iter().copied() {
let slab_id = shared_type_index_to_slab_id(shared_type_index);
let sub_ty = self.types[slab_id].as_ref().unwrap();
if let Some(f) = sub_ty.as_func() {
let trampoline = f.trampoline_type();
match &trampoline {
Cow::Borrowed(_) if sub_ty.is_final && sub_ty.supertype.is_none() => {
log::trace!(
"trampoline_type({shared_type_index:?}) = {shared_type_index:?}",
);
}
Cow::Borrowed(_) | Cow::Owned(_) => {
let trampoline_entry = self.register_singleton_rec_group(
gc_runtime,
WasmSubType {
is_final: true,
supertype: None,
composite_type: wasmtime_environ::WasmCompositeType {
shared: sub_ty.composite_type.shared,
inner: wasmtime_environ::WasmCompositeInnerType::Func(
trampoline.into_owned(),
),
},
},
);
assert_eq!(trampoline_entry.0.shared_type_indices.len(), 1);
let trampoline_index = trampoline_entry.0.shared_type_indices[0];
log::trace!(
"trampoline_type({shared_type_index:?}) = {trampoline_index:?}",
);
self.debug_assert_registered(trampoline_index);
debug_assert_ne!(shared_type_index, trampoline_index);
self.type_to_trampoline[shared_type_index] = Some(trampoline_index).into();
}
}
}
}
entry
}
fn assert_canonicalized_for_runtime_usage_in_this_registry(&self, ty: &WasmSubType) {
ty.trace::<_, ()>(&mut |index| match index {
EngineOrModuleTypeIndex::RecGroup(_) | EngineOrModuleTypeIndex::Module(_) => {
panic!("not canonicalized for runtime usage: {ty:?}")
}
EngineOrModuleTypeIndex::Engine(idx) => {
self.debug_assert_registered(idx);
Ok(())
}
})
.unwrap();
}
fn insert_one_type_from_rec_group(
&mut self,
gc_runtime: Option<&dyn GcRuntime>,
module_index: ModuleInternedTypeIndex,
engine_index: VMSharedTypeIndex,
ty: WasmSubType,
) {
assert!(
ty.is_canonicalized_for_runtime_usage(),
"type is not canonicalized for runtime usage: {ty:?}"
);
assert!(!ty.composite_type.shared);
let gc_layout = match &ty.composite_type.inner {
wasmtime_environ::WasmCompositeInnerType::Func(_) => None,
wasmtime_environ::WasmCompositeInnerType::Array(a) => Some(
gc_runtime
.expect("must have a GC runtime to register array types")
.layouts()
.array_layout(a)
.into(),
),
wasmtime_environ::WasmCompositeInnerType::Struct(s) => Some(
gc_runtime
.expect("must have a GC runtime to register struct types")
.layouts()
.struct_layout(s)
.into(),
),
wasmtime_environ::WasmCompositeInnerType::Exn(e) => Some(
gc_runtime
.expect("must have a GC runtime to register exception types")
.layouts()
.exn_layout(e)
.into(),
),
wasmtime_environ::WasmCompositeInnerType::Cont(_) => None, };
let id = shared_type_index_to_slab_id(engine_index);
assert!(self.types.contains(id));
assert!(self.types[id].is_none());
self.types[id] = Some(Arc::new(ty));
if let Some(supertype) = self.types[id].as_ref().unwrap().supertype {
let supertype = supertype.unwrap_engine_type_index();
let supers_supertypes = self.supertypes(supertype);
let mut supertypes = Vec::with_capacity(supers_supertypes.len() + 1);
supertypes.extend(
supers_supertypes
.iter()
.copied()
.chain(iter::once(supertype)),
);
self.type_to_supertypes[engine_index] = Some(supertypes.into_boxed_slice());
}
if let Some(layout) = gc_layout {
self.type_to_gc_layout[engine_index] = Some(layout);
}
log::trace!(
"finished registering type {module_index:?} as {engine_index:?} = runtime-canonical {:?}",
self.types[id].as_ref().unwrap()
);
}
fn supertypes(&self, ty: VMSharedTypeIndex) -> &[VMSharedTypeIndex] {
self.type_to_supertypes
.get(ty)
.and_then(|s| s.as_deref())
.unwrap_or(&[])
}
fn register_singleton_rec_group(
&mut self,
gc_runtime: Option<&dyn GcRuntime>,
ty: WasmSubType,
) -> RecGroupEntry {
self.assert_canonicalized_for_runtime_usage_in_this_registry(&ty);
let map = PrimaryMap::default();
let range = ModuleInternedTypeIndex::from_bits(u32::MAX - 1)
..ModuleInternedTypeIndex::from_bits(u32::MAX);
self.register_rec_group(gc_runtime, &map, range, iter::once(ty))
}
fn unregister_type_collection(&mut self, collection: &TypeCollection) {
log::trace!("Begin unregistering `TypeCollection`");
for entry in &collection.rec_groups {
self.debug_assert_all_registered(entry.0.shared_type_indices.iter().copied());
if entry.decref("TypeRegistryInner::unregister_type_collection") {
self.unregister_entry(entry.clone());
}
}
log::trace!("Finished unregistering `TypeCollection`");
}
fn unregister_entry(&mut self, entry: RecGroupEntry) {
log::trace!("Attempting to unregister {entry:?}");
debug_assert!(self.drop_stack.is_empty());
let registrations = entry.0.registrations.load(Acquire);
if registrations != 0 {
log::trace!(
" {entry:?} was concurrently resurrected and no longer has \
zero registrations (registrations -> {registrations})",
);
assert_eq!(entry.0.unregistered.load(Acquire), false);
return;
}
if entry.0.unregistered.load(Acquire) {
log::trace!(
" {entry:?} was concurrently resurrected, dropped again, \
and already unregistered"
);
return;
}
self.drop_stack.push(entry);
while let Some(entry) = self.drop_stack.pop() {
log::trace!("Begin unregistering {entry:?}");
self.debug_assert_all_registered(entry.0.shared_type_indices.iter().copied());
assert_eq!(entry.0.registrations.load(Acquire), 0);
assert_eq!(entry.0.unregistered.load(Acquire), false);
entry.0.unregistered.store(true, Release);
debug_assert!(entry.0.hash_consing_key.is_canonicalized_for_hash_consing());
entry
.0
.hash_consing_key
.trace_engine_indices::<_, ()>(&mut |other_index| {
self.debug_assert_registered(other_index);
let other_entry = self.type_to_rec_group[other_index].as_ref().unwrap();
if other_entry.decref("dropping rec group's type references") {
self.drop_stack.push(other_entry.clone());
}
Ok(())
})
.unwrap();
let was_in_map = self.hash_consing_map.remove(&entry);
debug_assert!(was_in_map);
debug_assert_eq!(
entry.0.shared_type_indices.len(),
entry
.0
.shared_type_indices
.iter()
.copied()
.collect::<crate::hash_set::HashSet<_>>()
.len(),
"should not have any duplicate type indices",
);
for ty in entry.0.shared_type_indices.iter().copied() {
log::trace!("removing {ty:?} from registry");
let removed_entry = self.type_to_rec_group[ty].take();
debug_assert_eq!(removed_entry.unwrap(), entry);
if let Some(trampoline_ty) =
self.type_to_trampoline.get(ty).and_then(|x| x.expand())
{
self.debug_assert_registered(trampoline_ty);
self.type_to_trampoline[ty] = None.into();
let trampoline_entry = self.type_to_rec_group[trampoline_ty].as_ref().unwrap();
if trampoline_entry.decref("dropping rec group's trampoline-type references") {
self.drop_stack.push(trampoline_entry.clone());
}
}
if self.type_to_supertypes.get(ty).is_some() {
self.type_to_supertypes[ty] = None;
}
if self.type_to_gc_layout.get(ty).is_some() {
self.type_to_gc_layout[ty] = None;
}
let id = shared_type_index_to_slab_id(ty);
let deallocated_ty = self.types.dealloc(id);
assert!(deallocated_ty.is_some());
}
log::trace!("End unregistering {entry:?}");
}
}
}
#[cfg(debug_assertions)]
impl Drop for TypeRegistryInner {
fn drop(&mut self) {
let TypeRegistryInner {
hash_consing_map,
types,
type_to_rec_group,
type_to_supertypes,
type_to_trampoline,
type_to_gc_layout,
drop_stack,
} = self;
assert!(
hash_consing_map.is_empty(),
"type registry not empty: hash consing map is not empty: {hash_consing_map:#?}"
);
assert!(
types.is_empty(),
"type registry not empty: types slab is not empty: {types:#?}"
);
assert!(
type_to_rec_group.is_empty() || type_to_rec_group.values().all(|x| x.is_none()),
"type registry not empty: type-to-rec-group map is not empty: {type_to_rec_group:#?}"
);
assert!(
type_to_supertypes.is_empty() || type_to_supertypes.values().all(|x| x.is_none()),
"type registry not empty: type-to-supertypes map is not empty: {type_to_supertypes:#?}"
);
assert!(
type_to_trampoline.is_empty() || type_to_trampoline.values().all(|x| x.is_none()),
"type registry not empty: type-to-trampoline map is not empty: {type_to_trampoline:#?}"
);
assert!(
type_to_gc_layout.is_empty() || type_to_gc_layout.values().all(|x| x.is_none()),
"type registry not empty: type-to-gc-layout map is not empty: {type_to_gc_layout:#?}"
);
assert!(
drop_stack.is_empty(),
"type registry not empty: drop stack is not empty: {drop_stack:#?}"
);
}
}
#[derive(Debug)]
pub struct TypeRegistry(RwLock<TypeRegistryInner>);
impl TypeRegistry {
pub fn new() -> Self {
Self(RwLock::new(TypeRegistryInner::default()))
}
#[inline]
pub fn debug_assert_contains(&self, index: VMSharedTypeIndex) {
if cfg!(debug_assertions) {
self.0.read().debug_assert_registered(index);
}
}
pub fn borrow(&self, index: VMSharedTypeIndex) -> Option<Arc<WasmSubType>> {
let id = shared_type_index_to_slab_id(index);
let inner = self.0.read();
inner.types.get(id).and_then(|ty| ty.clone())
}
pub fn layout(&self, index: VMSharedTypeIndex) -> Option<GcLayout> {
let inner = self.0.read();
inner.type_to_gc_layout.get(index).and_then(|l| l.clone())
}
pub fn trampoline_type(&self, index: VMSharedTypeIndex) -> VMSharedTypeIndex {
let slab_id = shared_type_index_to_slab_id(index);
let inner = self.0.read();
inner.debug_assert_registered(index);
let ty = inner.types[slab_id].as_ref().unwrap();
debug_assert!(
ty.is_func(),
"cannot get the trampoline type of a non-function type: {index:?} = {ty:?}"
);
match inner.type_to_trampoline.get(index).and_then(|x| x.expand()) {
Some(ty) => ty,
None => {
index
}
}
}
#[inline]
pub fn is_subtype(&self, sub: VMSharedTypeIndex, sup: VMSharedTypeIndex) -> bool {
if cfg!(debug_assertions) {
self.0.read().debug_assert_registered(sub);
self.0.read().debug_assert_registered(sup);
}
if sub == sup {
return true;
}
self.is_subtype_slow(sub, sup)
}
fn is_subtype_slow(&self, sub: VMSharedTypeIndex, sup: VMSharedTypeIndex) -> bool {
let inner = self.0.read();
let sub_supertypes = inner.supertypes(sub);
let sup_supertypes = inner.supertypes(sup);
sub_supertypes.get(sup_supertypes.len()) == Some(&sup)
}
}