use crate::prelude::*;
use crate::sync::RwLock;
use crate::Engine;
use alloc::borrow::Cow;
use alloc::sync::Arc;
use core::iter;
use core::{
borrow::Borrow,
fmt::{self, Debug},
hash::{Hash, Hasher},
ops::Range,
sync::atomic::{
AtomicUsize,
Ordering::{AcqRel, Acquire},
},
};
use hashbrown::HashSet;
use wasmtime_environ::{
iter_entity_range, packed_option::PackedOption, EngineOrModuleTypeIndex,
ModuleInternedTypeIndex, ModuleTypes, PrimaryMap, SecondaryMap, TypeTrace, VMSharedTypeIndex,
WasmRecGroup, WasmSubType,
};
use wasmtime_slab::{Id as SlabId, Slab};
pub struct TypeCollection {
engine: Engine,
rec_groups: Vec<RecGroupEntry>,
types: PrimaryMap<ModuleInternedTypeIndex, VMSharedTypeIndex>,
trampolines: SecondaryMap<VMSharedTypeIndex, PackedOption<ModuleInternedTypeIndex>>,
}
impl Debug for TypeCollection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let TypeCollection {
engine: _,
rec_groups,
types,
trampolines,
} = self;
f.debug_struct("TypeCollection")
.field("rec_groups", rec_groups)
.field("types", types)
.field("trampolines", trampolines)
.finish_non_exhaustive()
}
}
impl TypeCollection {
pub fn new_for_module(engine: &Engine, module_types: &ModuleTypes) -> Self {
let engine = engine.clone();
let registry = engine.signatures();
let (rec_groups, types) = registry.0.write().register_module_types(module_types);
let mut trampolines = SecondaryMap::with_capacity(types.len());
for (module_ty, trampoline) in module_types.trampoline_types() {
let shared_ty = types[module_ty];
let trampoline_ty = registry.trampoline_type(shared_ty);
trampolines[trampoline_ty] = Some(trampoline).into();
}
Self {
engine,
rec_groups,
types,
trampolines,
}
}
pub fn as_module_map(&self) -> &PrimaryMap<ModuleInternedTypeIndex, VMSharedTypeIndex> {
&self.types
}
#[inline]
pub fn shared_type(&self, index: ModuleInternedTypeIndex) -> Option<VMSharedTypeIndex> {
self.types.get(index).copied()
}
#[inline]
pub fn trampoline_type(&self, ty: VMSharedTypeIndex) -> Option<ModuleInternedTypeIndex> {
let trampoline_ty = self.trampolines[ty].expand();
log::trace!("TypeCollection::trampoline_type({ty:?}) -> {trampoline_ty:?}");
trampoline_ty
}
}
impl Drop for TypeCollection {
fn drop(&mut self) {
if !self.rec_groups.is_empty() {
self.engine
.signatures()
.0
.write()
.unregister_type_collection(self);
}
}
}
#[inline]
fn shared_type_index_to_slab_id(index: VMSharedTypeIndex) -> SlabId {
SlabId::from_raw(index.bits())
}
#[inline]
fn slab_id_to_shared_type_index(id: SlabId) -> VMSharedTypeIndex {
VMSharedTypeIndex::new(id.into_raw())
}
pub struct RegisteredType {
engine: Engine,
entry: RecGroupEntry,
ty: Arc<WasmSubType>,
index: VMSharedTypeIndex,
}
impl Debug for RegisteredType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let RegisteredType {
engine: _,
entry: _,
ty,
index,
} = self;
f.debug_struct("RegisteredType")
.field("index", index)
.field("ty", ty)
.finish_non_exhaustive()
}
}
impl Clone for RegisteredType {
fn clone(&self) -> Self {
self.entry.incref("cloning RegisteredType");
RegisteredType {
engine: self.engine.clone(),
entry: self.entry.clone(),
ty: self.ty.clone(),
index: self.index,
}
}
}
impl Drop for RegisteredType {
fn drop(&mut self) {
if self.entry.decref("dropping RegisteredType") {
self.engine
.signatures()
.0
.write()
.unregister_entry(self.entry.clone());
}
}
}
impl core::ops::Deref for RegisteredType {
type Target = WasmSubType;
fn deref(&self) -> &Self::Target {
&self.ty
}
}
impl PartialEq for RegisteredType {
fn eq(&self, other: &Self) -> bool {
let eq = Arc::ptr_eq(&self.entry.0, &other.entry.0);
if cfg!(debug_assertions) {
if eq {
assert!(Engine::same(&self.engine, &other.engine));
assert_eq!(self.ty, other.ty);
} else {
assert!(self.ty != other.ty || !Engine::same(&self.engine, &other.engine));
}
}
eq
}
}
impl Eq for RegisteredType {}
impl Hash for RegisteredType {
fn hash<H: Hasher>(&self, state: &mut H) {
let ptr = Arc::as_ptr(&self.entry.0);
ptr.hash(state);
}
}
impl RegisteredType {
pub fn new(engine: &Engine, ty: WasmSubType) -> RegisteredType {
let (entry, index, ty) = {
log::trace!("RegisteredType::new({ty:?})");
let mut inner = engine.signatures().0.write();
inner.assert_canonicalized_for_runtime_usage_in_this_registry(&ty);
let entry = inner.register_singleton_rec_group(ty);
let index = entry.0.shared_type_indices[0];
let id = shared_type_index_to_slab_id(index);
let ty = inner.types[id].clone();
(entry, index, ty)
};
RegisteredType::from_parts(engine.clone(), entry, index, ty)
}
pub fn root(engine: &Engine, index: VMSharedTypeIndex) -> Option<RegisteredType> {
let (entry, ty) = {
let id = shared_type_index_to_slab_id(index);
let inner = engine.signatures().0.read();
let ty = inner.types.get(id)?.clone();
let entry = inner.type_to_rec_group[index].clone().unwrap();
entry.incref("RegisteredType::root");
(entry, ty)
};
Some(RegisteredType::from_parts(engine.clone(), entry, index, ty))
}
fn from_parts(
engine: Engine,
entry: RecGroupEntry,
index: VMSharedTypeIndex,
ty: Arc<WasmSubType>,
) -> Self {
debug_assert!(entry.0.registrations.load(Acquire) != 0);
RegisteredType {
engine,
entry,
ty,
index,
}
}
pub fn index(&self) -> VMSharedTypeIndex {
self.index
}
pub fn engine(&self) -> &Engine {
&self.engine
}
}
#[derive(Clone)]
struct RecGroupEntry(Arc<RecGroupEntryInner>);
impl Debug for RecGroupEntry {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct Ptr<'a, P>(&'a P);
impl<P: fmt::Pointer> Debug for Ptr<'_, P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:#p}", *self.0)
}
}
f.debug_struct("RecGroupEntry")
.field("ptr", &Ptr(&self.0))
.field("shared_type_indices", &self.0.shared_type_indices)
.field("hash_consing_key", &self.0.hash_consing_key)
.field("registrations", &self.0.registrations.load(Acquire))
.finish()
}
}
struct RecGroupEntryInner {
hash_consing_key: WasmRecGroup,
shared_type_indices: Box<[VMSharedTypeIndex]>,
registrations: AtomicUsize,
}
impl PartialEq for RecGroupEntry {
fn eq(&self, other: &Self) -> bool {
self.0.hash_consing_key == other.0.hash_consing_key
}
}
impl Eq for RecGroupEntry {}
impl Hash for RecGroupEntry {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash_consing_key.hash(state);
}
}
impl Borrow<WasmRecGroup> for RecGroupEntry {
fn borrow(&self) -> &WasmRecGroup {
&self.0.hash_consing_key
}
}
impl RecGroupEntry {
fn incref(&self, why: &str) {
let old_count = self.0.registrations.fetch_add(1, AcqRel);
log::trace!(
"increment registration count for {self:?} (registrations -> {}): {why}",
old_count + 1
);
}
#[must_use = "caller must remove entry from registry if `decref` returns `true`"]
fn decref(&self, why: &str) -> bool {
let old_count = self.0.registrations.fetch_sub(1, AcqRel);
debug_assert_ne!(old_count, 0);
log::trace!(
"decrement registration count for {self:?} (registrations -> {}): {why}",
old_count - 1
);
old_count == 1
}
}
#[derive(Debug, Default)]
struct TypeRegistryInner {
hash_consing_map: HashSet<RecGroupEntry>,
types: Slab<Arc<WasmSubType>>,
type_to_rec_group: SecondaryMap<VMSharedTypeIndex, Option<RecGroupEntry>>,
type_to_supertypes: SecondaryMap<VMSharedTypeIndex, Option<Box<[VMSharedTypeIndex]>>>,
type_to_trampoline: SecondaryMap<VMSharedTypeIndex, PackedOption<VMSharedTypeIndex>>,
drop_stack: Vec<RecGroupEntry>,
}
impl TypeRegistryInner {
fn register_module_types(
&mut self,
types: &ModuleTypes,
) -> (
Vec<RecGroupEntry>,
PrimaryMap<ModuleInternedTypeIndex, VMSharedTypeIndex>,
) {
log::trace!("Start registering module types");
let mut entries = Vec::with_capacity(types.rec_groups().len());
let mut map = PrimaryMap::<ModuleInternedTypeIndex, VMSharedTypeIndex>::with_capacity(
types.wasm_types().len(),
);
for (_rec_group_index, module_group) in types.rec_groups() {
let entry = self.register_rec_group(
&map,
module_group.clone(),
iter_entity_range(module_group.clone()).map(|ty| types[ty].clone()),
);
for (module_ty, engine_ty) in
iter_entity_range(module_group).zip(entry.0.shared_type_indices.iter())
{
let module_ty2 = map.push(*engine_ty);
assert_eq!(module_ty, module_ty2);
}
entries.push(entry);
}
log::trace!("End registering module types");
(entries, map)
}
fn register_rec_group(
&mut self,
map: &PrimaryMap<ModuleInternedTypeIndex, VMSharedTypeIndex>,
range: Range<ModuleInternedTypeIndex>,
types: impl ExactSizeIterator<Item = WasmSubType>,
) -> RecGroupEntry {
debug_assert_eq!(iter_entity_range(range.clone()).len(), types.len());
let mut non_canon_types = Vec::with_capacity(types.len());
let hash_consing_key = WasmRecGroup {
types: types
.zip(iter_entity_range(range.clone()))
.map(|(mut ty, module_index)| {
non_canon_types.push((module_index, ty.clone()));
ty.canonicalize_for_hash_consing(range.clone(), &mut |idx| {
debug_assert!(idx < range.clone().start);
map[idx]
});
ty
})
.collect::<Box<[_]>>(),
};
if let Some(entry) = self.hash_consing_map.get(&hash_consing_key) {
entry.incref(
"hash consed to already-registered type in `TypeRegistryInner::register_rec_group`",
);
return entry.clone();
}
hash_consing_key
.trace_engine_indices::<_, ()>(&mut |index| {
let entry = &self.type_to_rec_group[index].as_ref().unwrap();
entry.incref(
"new cross-group type reference to existing type in `register_rec_group`",
);
Ok(())
})
.unwrap();
let module_rec_group_start = range.start;
let engine_rec_group_start = u32::try_from(self.types.len()).unwrap();
let shared_type_indices = non_canon_types
.into_iter()
.map(|(module_index, mut ty)| {
ty.canonicalize_for_runtime_usage(&mut |idx| {
if idx < module_rec_group_start {
map[idx]
} else {
let rec_group_offset = idx.as_u32() - module_rec_group_start.as_u32();
VMSharedTypeIndex::from_u32(engine_rec_group_start + rec_group_offset)
}
});
self.insert_one_type_from_rec_group(module_index, ty)
})
.collect();
let entry = RecGroupEntry(Arc::new(RecGroupEntryInner {
hash_consing_key,
shared_type_indices,
registrations: AtomicUsize::new(1),
}));
log::trace!("create new entry {entry:?} (registrations -> 1)");
let is_new_entry = self.hash_consing_map.insert(entry.clone());
debug_assert!(is_new_entry);
for ty in entry.0.shared_type_indices.iter().copied() {
debug_assert!(self.type_to_rec_group[ty].is_none());
self.type_to_rec_group[ty] = Some(entry.clone());
}
for shared_type_index in entry.0.shared_type_indices.iter().copied() {
let slab_id = shared_type_index_to_slab_id(shared_type_index);
if let Some(f) = self.types[slab_id].as_func() {
match f.trampoline_type() {
Cow::Borrowed(_) => {
}
Cow::Owned(trampoline) => {
let trampoline_entry = self.register_singleton_rec_group(WasmSubType {
is_final: true,
supertype: None,
composite_type: wasmtime_environ::WasmCompositeType::Func(trampoline),
});
let trampoline_index = trampoline_entry.0.shared_type_indices[0];
log::trace!(
"Registering trampoline {trampoline_index:?} for function type {shared_type_index:?}"
);
debug_assert_ne!(shared_type_index, trampoline_index);
self.type_to_trampoline[shared_type_index] = Some(trampoline_index).into();
}
}
}
}
entry
}
fn assert_canonicalized_for_runtime_usage_in_this_registry(&self, ty: &WasmSubType) {
ty.trace::<_, ()>(&mut |index| match index {
EngineOrModuleTypeIndex::RecGroup(_) | EngineOrModuleTypeIndex::Module(_) => {
panic!("not canonicalized for runtime usage: {ty:?}")
}
EngineOrModuleTypeIndex::Engine(idx) => {
let id = shared_type_index_to_slab_id(idx);
assert!(
self.types.contains(id),
"canonicalized in a different engine? {ty:?}"
);
Ok(())
}
})
.unwrap();
}
fn insert_one_type_from_rec_group(
&mut self,
module_index: ModuleInternedTypeIndex,
ty: WasmSubType,
) -> VMSharedTypeIndex {
assert!(
ty.is_canonicalized_for_runtime_usage(),
"type is not canonicalized for runtime usage: {ty:?}"
);
let id = self.types.alloc(Arc::new(ty));
let engine_index = slab_id_to_shared_type_index(id);
log::trace!(
"registered type {module_index:?} as {engine_index:?} = {:?}",
&self.types[id]
);
if let Some(supertype) = self.types[id].supertype {
let supertype = supertype.unwrap_engine_type_index();
let supers_supertypes = self.supertypes(supertype);
let mut supertypes = Vec::with_capacity(supers_supertypes.len() + 1);
supertypes.extend(
supers_supertypes
.iter()
.copied()
.chain(iter::once(supertype)),
);
self.type_to_supertypes[engine_index] = Some(supertypes.into_boxed_slice());
}
engine_index
}
fn supertypes(&self, ty: VMSharedTypeIndex) -> &[VMSharedTypeIndex] {
self.type_to_supertypes
.get(ty)
.and_then(|s| s.as_deref())
.unwrap_or(&[])
}
fn register_singleton_rec_group(&mut self, ty: WasmSubType) -> RecGroupEntry {
self.assert_canonicalized_for_runtime_usage_in_this_registry(&ty);
let map = PrimaryMap::default();
let range = ModuleInternedTypeIndex::from_bits(u32::MAX - 1)
..ModuleInternedTypeIndex::from_bits(u32::MAX);
self.register_rec_group(&map, range, iter::once(ty))
}
fn unregister_type_collection(&mut self, collection: &TypeCollection) {
for entry in &collection.rec_groups {
if entry.decref("TypeRegistryInner::unregister_type_collection") {
self.unregister_entry(entry.clone());
}
}
}
fn unregister_entry(&mut self, entry: RecGroupEntry) {
debug_assert!(self.drop_stack.is_empty());
self.drop_stack.push(entry);
while let Some(entry) = self.drop_stack.pop() {
log::trace!("Start unregistering {entry:?}");
let registrations = entry.0.registrations.load(Acquire);
if registrations != 0 {
log::trace!(
"{entry:?} was concurrently resurrected and no longer has \
zero registrations (registrations -> {registrations})",
);
continue;
}
debug_assert!(entry.0.hash_consing_key.is_canonicalized_for_hash_consing());
entry
.0
.hash_consing_key
.trace_engine_indices::<_, ()>(&mut |other_index| {
let other_entry = self.type_to_rec_group[other_index].as_ref().unwrap();
if other_entry.decref(
"referenced by dropped entry in \
`TypeCollection::unregister_entry`",
) {
self.drop_stack.push(other_entry.clone());
}
Ok(())
})
.unwrap();
self.hash_consing_map.remove(&entry);
for ty in entry.0.shared_type_indices.iter().copied() {
log::trace!("removing {ty:?} from registry");
let removed_entry = self.type_to_rec_group[ty].take();
debug_assert_eq!(removed_entry.unwrap(), entry);
if let Some(trampoline_ty) =
self.type_to_trampoline.get(ty).and_then(|x| x.expand())
{
self.type_to_trampoline[ty] = None.into();
let trampoline_entry = self.type_to_rec_group[trampoline_ty].as_ref().unwrap();
if trampoline_entry
.decref("removing reference from a function type to its trampoline type")
{
self.drop_stack.push(trampoline_entry.clone());
}
}
if self.type_to_supertypes.get(ty).is_some() {
self.type_to_supertypes[ty] = None;
}
let id = shared_type_index_to_slab_id(ty);
self.types.dealloc(id);
}
log::trace!("End unregistering {entry:?}");
}
}
}
#[cfg(debug_assertions)]
impl Drop for TypeRegistryInner {
fn drop(&mut self) {
log::trace!("Dropping type registry: {self:#?}");
let TypeRegistryInner {
hash_consing_map,
types,
type_to_rec_group,
type_to_supertypes,
type_to_trampoline,
drop_stack,
} = self;
assert!(
hash_consing_map.is_empty(),
"type registry not empty: hash consing map is not empty: {hash_consing_map:#?}"
);
assert!(
types.is_empty(),
"type registry not empty: types slab is not empty: {types:#?}"
);
assert!(
type_to_rec_group.is_empty() || type_to_rec_group.values().all(|x| x.is_none()),
"type registry not empty: type-to-rec-group map is not empty: {type_to_rec_group:#?}"
);
assert!(
type_to_supertypes.is_empty() || type_to_supertypes.values().all(|x| x.is_none()),
"type registry not empty: type-to-supertypes map is not empty: {type_to_supertypes:#?}"
);
assert!(
type_to_trampoline.is_empty() || type_to_trampoline.values().all(|x| x.is_none()),
"type registry not empty: type-to-trampoline map is not empty: {type_to_trampoline:#?}"
);
assert!(
drop_stack.is_empty(),
"type registry not empty: drop stack is not empty: {drop_stack:#?}"
);
}
}
#[derive(Debug)]
pub struct TypeRegistry(RwLock<TypeRegistryInner>);
impl TypeRegistry {
pub fn new() -> Self {
Self(RwLock::new(TypeRegistryInner::default()))
}
pub fn borrow(&self, index: VMSharedTypeIndex) -> Option<Arc<WasmSubType>> {
let id = shared_type_index_to_slab_id(index);
let inner = self.0.read();
inner.types.get(id).cloned()
}
pub fn trampoline_type(&self, index: VMSharedTypeIndex) -> VMSharedTypeIndex {
let slab_id = shared_type_index_to_slab_id(index);
let inner = self.0.read();
let ty = &inner.types[slab_id];
debug_assert!(
ty.is_func(),
"cannot get the trampoline type of a non-function type: {index:?} = {ty:?}"
);
let trampoline_ty = match inner.type_to_trampoline.get(index).and_then(|x| x.expand()) {
Some(ty) => ty,
None => {
index
}
};
log::trace!("TypeRegistry::trampoline_type({index:?}) -> {trampoline_ty:?}");
trampoline_ty
}
pub fn is_subtype(&self, sub: VMSharedTypeIndex, sup: VMSharedTypeIndex) -> bool {
if sub == sup {
return true;
}
let inner = self.0.read();
let sub_supertypes = inner.supertypes(sub);
let sup_supertypes = inner.supertypes(sup);
sub_supertypes.get(sup_supertypes.len()) == Some(&sup)
}
}