use super::{RecGroupId, TypeAlloc, TypeList};
use crate::{
BinaryReaderError, CompositeInnerType, CompositeType, PackedIndex, RecGroup, Result,
StorageType, UnpackedIndex, ValType, WasmFeatures,
types::{CoreTypeId, TypeIdentifier},
};
pub(crate) trait InternRecGroup {
fn add_type_id(&mut self, id: CoreTypeId);
fn type_id_at(&self, idx: u32, offset: usize) -> Result<CoreTypeId>;
fn types_len(&self) -> u32;
fn features(&self) -> &WasmFeatures;
fn canonicalize_and_intern_rec_group(
&mut self,
types: &mut TypeAlloc,
mut rec_group: RecGroup,
offset: usize,
) -> Result<()>
where
Self: Sized,
{
debug_assert!(rec_group.is_explicit_rec_group() || rec_group.types().len() == 1);
if rec_group.is_explicit_rec_group() && !self.features().gc() {
bail!(
offset,
"rec group usage requires `gc` proposal to be enabled"
);
}
if self.features().needs_type_canonicalization() {
TypeCanonicalizer::new(self, offset).canonicalize_rec_group(&mut rec_group)?;
}
let (is_new, rec_group_id) = types
.intern_canonical_rec_group(self.features().needs_type_canonicalization(), rec_group);
let range = &types[rec_group_id];
let start = range.start.index();
let end = range.end.index();
for i in start..end {
let i = u32::try_from(i).unwrap();
let id = CoreTypeId::from_index(i);
debug_assert!(types.get(id).is_some());
self.add_type_id(id);
if is_new {
self.check_subtype(rec_group_id, id, types, offset)?;
self.check_descriptors(rec_group_id, id, types, offset)?;
}
}
Ok(())
}
fn check_subtype(
&mut self,
rec_group: RecGroupId,
id: CoreTypeId,
types: &mut TypeAlloc,
offset: usize,
) -> Result<()> {
let ty = &types[id];
if !self.features().gc() && (!ty.is_final || ty.supertype_idx.is_some()) {
bail!(offset, "gc proposal must be enabled to use subtypes");
}
self.check_composite_type(&ty.composite_type, &types, offset)?;
let depth = if let Some(supertype_index) = ty.supertype_idx {
debug_assert!(supertype_index.is_canonical());
let sup_id = self.at_packed_index(types, rec_group, supertype_index, offset)?;
if types[sup_id].is_final {
bail!(offset, "sub type cannot have a final super type");
}
if !types.matches(id, sup_id) {
bail!(offset, "sub type must match super type");
}
let depth = types.get_subtyping_depth(sup_id) + 1;
if usize::from(depth) > crate::limits::MAX_WASM_SUBTYPING_DEPTH {
bail!(
offset,
"sub type hierarchy too deep: found depth {}, cannot exceed depth {}",
depth,
crate::limits::MAX_WASM_SUBTYPING_DEPTH,
);
}
depth
} else {
0
};
types.set_subtyping_depth(id, depth);
Ok(())
}
fn check_descriptors(
&mut self,
rec_group: RecGroupId,
id: CoreTypeId,
types: &TypeList,
offset: usize,
) -> Result<()> {
let ty = &types[id].composite_type;
if ty.descriptor_idx.is_some() || ty.describes_idx.is_some() {
if !self.features().custom_descriptors() {
return Err(BinaryReaderError::new(
"custom descriptors proposal must be enabled to use descriptor and describes",
offset,
));
}
match &ty.inner {
CompositeInnerType::Struct(_) => (),
_ => {
return Err(BinaryReaderError::new(
if ty.descriptor_idx.is_some() {
"descriptor clause on non-struct type"
} else {
"describes clause on non-struct type"
},
offset,
));
}
}
}
let map_canonical = |idx: PackedIndex| -> Result<CoreTypeId> {
self.at_packed_index(types, rec_group, idx, offset)
};
let descriptor_idx = if let Some(i) = ty.descriptor_idx {
Some(map_canonical(i)?)
} else {
None
};
let describes_idx = if let Some(i) = ty.describes_idx {
Some(map_canonical(i)?)
} else {
None
};
if let Some(supertype_index) = types[id].supertype_idx {
debug_assert!(supertype_index.is_canonical());
let sup_id = map_canonical(supertype_index)?;
if let Some(descriptor_idx) = descriptor_idx {
if types[sup_id].composite_type.descriptor_idx.is_some()
&& (types[descriptor_idx].supertype_idx.is_none()
|| (map_canonical(types[descriptor_idx].supertype_idx.unwrap())?
!= map_canonical(
types[sup_id].composite_type.descriptor_idx.unwrap(),
)?))
{
bail!(
offset,
"supertype of described type must be described by supertype of descriptor",
);
}
} else if types[sup_id].composite_type.descriptor_idx.is_some() {
bail!(
offset,
"supertype of type without descriptor cannot have descriptor",
);
}
match (
types[id].composite_type.describes_idx,
types[sup_id].composite_type.describes_idx,
) {
(Some(a), Some(b)) => {
let a_id = self.at_packed_index(types, rec_group, a, offset)?;
if types[a_id].supertype_idx.is_none()
|| (map_canonical(types[a_id].supertype_idx.unwrap())? != map_canonical(b)?)
{
bail!(offset, "supertype of descriptor does not match");
}
}
(None, None) => (),
(None, Some(_)) => {
bail!(
offset,
"supertype of non-descriptor type cannot be a descriptor"
);
}
(Some(_), None) => {
bail!(offset, "supertype of descriptor must be a descriptor");
}
}
}
if let Some(descriptor_idx) = descriptor_idx {
let describes_idx = if let Some(i) = types[descriptor_idx].composite_type.describes_idx
{
Some(map_canonical(i)?)
} else {
None
};
if describes_idx.is_none() || id != describes_idx.unwrap() {
bail!(offset, "descriptor with no matching describes",);
}
}
if let Some(describes_idx) = describes_idx {
let descriptor_idx = if let Some(i) = types[describes_idx].composite_type.descriptor_idx
{
Some(map_canonical(i)?)
} else {
None
};
if descriptor_idx.is_none() || id != descriptor_idx.unwrap() {
bail!(offset, "describes with no matching descriptor",);
}
}
Ok(())
}
fn check_composite_type(
&mut self,
ty: &CompositeType,
types: &TypeList,
offset: usize,
) -> Result<()> {
let features = self.features();
let check = |ty: &ValType, shared: bool| {
features
.check_value_type(*ty)
.map_err(|e| BinaryReaderError::new(e, offset))?;
if shared && !types.valtype_is_shared(*ty) {
return Err(BinaryReaderError::new(
"shared composite type must contain shared types",
offset,
));
}
Ok(())
};
if !features.shared_everything_threads() && ty.shared {
return Err(BinaryReaderError::new(
"shared composite types require the shared-everything-threads proposal",
offset,
));
}
match &ty.inner {
CompositeInnerType::Func(t) => {
for vt in t.params().iter().chain(t.results()) {
check(vt, ty.shared)?;
}
if t.results().len() > 1 && !features.multi_value() {
return Err(BinaryReaderError::new(
"func type returns multiple values but the multi-value feature is not enabled",
offset,
));
}
}
CompositeInnerType::Array(t) => {
if !features.gc() {
bail!(
offset,
"array indexed types not supported without the gc feature",
);
}
if !features.gc_types() {
bail!(
offset,
"cannot define array types when gc types are disabled",
);
}
match &t.0.element_type {
StorageType::I8 | StorageType::I16 => {
}
StorageType::Val(value_type) => check(value_type, ty.shared)?,
};
}
CompositeInnerType::Struct(t) => {
if !features.gc() {
bail!(
offset,
"struct indexed types not supported without the gc feature",
);
}
if !features.gc_types() {
bail!(
offset,
"cannot define struct types when gc types are disabled",
);
}
for ft in t.fields.iter() {
match &ft.element_type {
StorageType::I8 | StorageType::I16 => {
}
StorageType::Val(value_type) => check(value_type, ty.shared)?,
}
}
}
CompositeInnerType::Cont(t) => {
if !features.stack_switching() {
bail!(
offset,
"cannot define continuation types when stack switching is disabled",
);
}
if !features.gc_types() {
bail!(
offset,
"cannot define continuation types when gc types are disabled",
);
}
let id = t.0.as_core_type_id().unwrap();
match types[id].composite_type.inner {
CompositeInnerType::Func(_) => (),
_ => bail!(offset, "non-function type {}", id.index()),
}
}
}
Ok(())
}
fn at_packed_index(
&self,
types: &TypeList,
rec_group: RecGroupId,
index: PackedIndex,
offset: usize,
) -> Result<CoreTypeId> {
match index.unpack() {
UnpackedIndex::Id(id) => Ok(id),
UnpackedIndex::Module(idx) => self.type_id_at(idx, offset),
UnpackedIndex::RecGroup(idx) => types.rec_group_local_id(rec_group, idx, offset),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum CanonicalizationMode {
HashConsing,
OnlyIds,
}
pub(crate) struct TypeCanonicalizer<'a> {
module: &'a dyn InternRecGroup,
rec_group_start: u32,
rec_group_len: u32,
offset: usize,
mode: CanonicalizationMode,
within_rec_group: Option<core::ops::Range<CoreTypeId>>,
}
impl<'a> TypeCanonicalizer<'a> {
pub fn new(module: &'a dyn InternRecGroup, offset: usize) -> Self {
let rec_group_start = u32::MAX;
let rec_group_len = 0;
Self {
module,
rec_group_start,
rec_group_len,
offset,
mode: CanonicalizationMode::HashConsing,
within_rec_group: None,
}
}
fn allow_gc(&self) -> bool {
self.module.features().gc()
}
fn canonicalize_rec_group(&mut self, rec_group: &mut RecGroup) -> Result<()> {
self.rec_group_start = self.module.types_len();
self.rec_group_len = u32::try_from(rec_group.types().len()).unwrap();
for (rec_group_local_index, ty) in rec_group.types_mut().enumerate() {
let rec_group_local_index = u32::try_from(rec_group_local_index).unwrap();
let type_index = self.rec_group_start + rec_group_local_index;
if let Some(sup) = ty.supertype_idx.as_mut() {
if sup.as_module_index().map_or(false, |i| i >= type_index) {
bail!(self.offset, "supertypes must be defined before subtypes");
}
}
if let Some(idx) = ty.composite_type.describes_idx.as_mut() {
if idx.as_module_index().map_or(false, |i| i >= type_index) {
bail!(self.offset, "forward describes reference");
}
}
ty.remap_indices(&mut |idx| self.canonicalize_type_index(idx))?;
}
Ok(())
}
fn canonicalize_type_index(&self, ty: &mut PackedIndex) -> Result<()> {
match ty.unpack() {
UnpackedIndex::Id(_) => Ok(()),
UnpackedIndex::Module(index) => {
if index < self.rec_group_start || self.mode == CanonicalizationMode::OnlyIds {
let id = self.module.type_id_at(index, self.offset)?;
if let Some(id) = PackedIndex::from_id(id) {
*ty = id;
return Ok(());
} else {
bail!(
self.offset,
"implementation limit: too many types in `TypeList`"
)
}
}
debug_assert!(self.allow_gc() || self.rec_group_len == 1);
let local = index - self.rec_group_start;
if local < self.rec_group_len {
if self.allow_gc() {
if let Some(id) = PackedIndex::from_rec_group_index(local) {
*ty = id;
return Ok(());
} else {
bail!(
self.offset,
"implementation limit: too many types in a recursion group"
)
}
} else {
bail!(
self.offset,
"unknown type {index}: type index out of bounds because the GC proposal is disabled"
)
}
}
bail!(
self.offset,
"unknown type {index}: type index out of bounds"
)
}
UnpackedIndex::RecGroup(local_index) => match self.mode {
CanonicalizationMode::HashConsing => Ok(()),
CanonicalizationMode::OnlyIds => {
let rec_group_elems = self.within_rec_group.as_ref().expect(
"configured to canonicalize all type reference indices to `CoreTypeId`s \
and found rec-group-local index, but missing `within_rec_group` context",
);
let rec_group_len = rec_group_elems.end.index() - rec_group_elems.start.index();
let rec_group_len = u32::try_from(rec_group_len).unwrap();
assert!(local_index < rec_group_len);
let rec_group_start = u32::try_from(rec_group_elems.start.index()).unwrap();
let id = CoreTypeId::from_index(rec_group_start + local_index);
*ty = PackedIndex::from_id(id).expect(
"should fit in impl limits since we already have the end of the rec group \
constructed successfully",
);
Ok(())
}
},
}
}
}