wasmparser/validator/
operators.rs

1/* Copyright 2019 Mozilla Foundation
2 *
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 *     http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16// The basic validation algorithm here is copied from the "Validation
17// Algorithm" section of the WebAssembly specification -
18// https://webassembly.github.io/spec/core/appendix/algorithm.html.
19//
20// That algorithm is followed pretty closely here, namely `push_operand`,
21// `pop_operand`, `push_ctrl`, and `pop_ctrl`. If anything here is a bit
22// confusing it's recommended to read over that section to see how it maps to
23// the various methods here.
24
25#[cfg(feature = "simd")]
26use crate::VisitSimdOperator;
27use crate::{
28    AbstractHeapType, BinaryReaderError, BlockType, BrTable, Catch, ContType, FieldType, FrameKind,
29    FrameStack, FuncType, GlobalType, Handle, HeapType, Ieee32, Ieee64, MemArg, ModuleArity,
30    RefType, Result, ResumeTable, StorageType, StructType, SubType, TableType, TryTable,
31    UnpackedIndex, ValType, VisitOperator, WasmFeatures, WasmModuleResources,
32    limits::MAX_WASM_FUNCTION_LOCALS,
33};
34use crate::{CompositeInnerType, Ordering, prelude::*};
35use core::ops::{Deref, DerefMut};
36use core::{cmp, iter, mem};
37
38#[cfg(feature = "simd")]
39mod simd;
40
41pub(crate) struct OperatorValidator {
42    pub(super) locals: Locals,
43    local_inits: LocalInits,
44
45    // This is a list of flags for wasm features which are used to gate various
46    // instructions.
47    pub(crate) features: WasmFeatures,
48
49    // Temporary storage used during `match_stack_operands`
50    popped_types_tmp: Vec<MaybeType>,
51
52    /// The `control` list is the list of blocks that we're currently in.
53    control: Vec<Frame>,
54    /// The `operands` is the current type stack.
55    operands: Vec<MaybeType>,
56
57    /// Whether validation is happening in a shared context.
58    shared: bool,
59
60    /// A trace of all operand push/pop operations performed while validating an
61    /// opcode. This is then compared to the arity that we report to double
62    /// check that arity report's correctness. `true` is "push" and `false` is
63    /// "pop".
64    #[cfg(debug_assertions)]
65    pub(crate) pop_push_log: Vec<bool>,
66}
67
68/// Captures the initialization of non-defaultable locals.
69struct LocalInits {
70    /// Records if a local is already initialized.
71    local_inits: Vec<bool>,
72    /// When `local_inits` is modified, the relevant `index` is recorded
73    /// here to be undone when control pops.
74    inits: Vec<u32>,
75    /// The index of the first non-defaultable local.
76    ///
77    /// # Note
78    ///
79    /// This is an optimization so that we only have to perform expensive
80    /// look-ups for locals that have a local index equal to or higher than this.
81    first_non_default_local: u32,
82}
83
84impl Default for LocalInits {
85    fn default() -> Self {
86        Self {
87            local_inits: Vec::default(),
88            inits: Vec::default(),
89            first_non_default_local: u32::MAX,
90        }
91    }
92}
93
94impl LocalInits {
95    /// Defines new function local parameters.
96    pub fn define_params(&mut self, count: usize) {
97        let Some(new_len) = self.local_inits.len().checked_add(count) else {
98            panic!("tried to define too many function locals as parameters: {count}");
99        };
100        self.local_inits.resize(new_len, true);
101    }
102
103    /// Defines `count` function locals of type `ty`.
104    pub fn define_locals(&mut self, count: u32, ty: ValType) {
105        let Ok(count) = usize::try_from(count) else {
106            panic!("tried to define too many function locals: {count}");
107        };
108        let len = self.local_inits.len();
109        let Some(new_len) = len.checked_add(count) else {
110            panic!("tried to define too many function locals: {count}");
111        };
112        let is_defaultable = ty.is_defaultable();
113        if !is_defaultable && self.first_non_default_local == u32::MAX {
114            self.first_non_default_local = len as u32;
115        }
116        self.local_inits.resize(new_len, is_defaultable);
117    }
118
119    /// Returns `true` if the local at `local_index` has already been initialized.
120    #[inline]
121    pub fn is_uninit(&self, local_index: u32) -> bool {
122        if local_index < self.first_non_default_local {
123            return false;
124        }
125        !self.local_inits[local_index as usize]
126    }
127
128    /// Marks the local at `local_index` as initialized.
129    #[inline]
130    pub fn set_init(&mut self, local_index: u32) {
131        if self.is_uninit(local_index) {
132            self.local_inits[local_index as usize] = true;
133            self.inits.push(local_index);
134        }
135    }
136
137    /// Registers a new control frame and returns its `height`.
138    pub fn push_ctrl(&mut self) -> usize {
139        self.inits.len()
140    }
141
142    /// Pops a control frame via its `height`.
143    ///
144    /// This uninitializes all locals that have been initialized within it.
145    pub fn pop_ctrl(&mut self, height: usize) {
146        for local_index in self.inits.split_off(height) {
147            self.local_inits[local_index as usize] = false;
148        }
149    }
150
151    /// Clears the [`LocalInits`].
152    ///
153    /// After this operation `self` will be empty and ready for reuse.
154    pub fn clear(&mut self) {
155        self.local_inits.clear();
156        self.inits.clear();
157        self.first_non_default_local = u32::MAX;
158    }
159
160    /// Returns `true` if `self` is empty.
161    pub fn is_empty(&self) -> bool {
162        self.local_inits.is_empty()
163    }
164}
165
166// No science was performed in the creation of this number, feel free to change
167// it if you so like.
168const MAX_LOCALS_TO_TRACK: u32 = 50;
169
170pub(super) struct Locals {
171    // Total number of locals in the function.
172    num_locals: u32,
173
174    // The first MAX_LOCALS_TO_TRACK locals in a function. This is used to
175    // optimize the theoretically common case where most functions don't have
176    // many locals and don't need a full binary search in the entire local space
177    // below.
178    first: Vec<ValType>,
179
180    // This is a "compressed" list of locals for this function. The list of
181    // locals are represented as a list of tuples. The second element is the
182    // type of the local, and the first element is monotonically increasing as
183    // you visit elements of this list. The first element is the maximum index
184    // of the local, after the previous index, of the type specified.
185    //
186    // This allows us to do a binary search on the list for a local's index for
187    // `local.{get,set,tee}`. We do a binary search for the index desired, and
188    // it either lies in a "hole" where the maximum index is specified later,
189    // or it's at the end of the list meaning it's out of bounds.
190    uncached: Vec<(u32, ValType)>,
191}
192
193/// A Wasm control flow block on the control flow stack during Wasm validation.
194//
195// # Dev. Note
196//
197// This structure corresponds to `ctrl_frame` as specified at in the validation
198// appendix of the wasm spec
199#[derive(Debug, Copy, Clone)]
200pub struct Frame {
201    /// Indicator for what kind of instruction pushed this frame.
202    pub kind: FrameKind,
203    /// The type signature of this frame, represented as a singular return type
204    /// or a type index pointing into the module's types.
205    pub block_type: BlockType,
206    /// The index, below which, this frame cannot modify the operand stack.
207    pub height: usize,
208    /// Whether this frame is unreachable so far.
209    pub unreachable: bool,
210    /// The number of initializations in the stack at the time of its creation
211    pub init_height: usize,
212}
213
214struct OperatorValidatorTemp<'validator, 'resources, T> {
215    offset: usize,
216    inner: &'validator mut OperatorValidator,
217    resources: &'resources T,
218}
219
220#[derive(Default)]
221pub struct OperatorValidatorAllocations {
222    popped_types_tmp: Vec<MaybeType>,
223    control: Vec<Frame>,
224    operands: Vec<MaybeType>,
225    local_inits: LocalInits,
226    locals_first: Vec<ValType>,
227    locals_uncached: Vec<(u32, ValType)>,
228}
229
230/// Type storage within the validator.
231///
232/// When managing the operand stack in unreachable code, the validator may not
233/// fully know an operand's type. this unknown state is known as the `bottom`
234/// type in the WebAssembly specification. Validating further instructions may
235/// give us more information; either partial (`PartialRef`) or fully known.
236#[derive(Debug, Copy, Clone)]
237enum MaybeType<T = ValType> {
238    /// The operand has no available type information due to unreachable code.
239    ///
240    /// This state represents "unknown" and corresponds to the `bottom` type in
241    /// the WebAssembly specification. There are no constraints on what this
242    /// type may be and it can match any other type during validation.
243    Bottom,
244    /// The operand is known to be a reference and we may know its abstract
245    /// type.
246    ///
247    /// This state is not fully `Known`, however, because its type can be
248    /// interpreted as either:
249    /// - `shared` or not-`shared`
250    /// -  nullable or not nullable
251    ///
252    /// No further refinements are required for WebAssembly instructions today
253    /// but this may grow in the future.
254    UnknownRef(Option<AbstractHeapType>),
255    /// The operand is known to have type `T`.
256    Known(T),
257}
258
259// The validator is pretty performance-sensitive and `MaybeType` is the main
260// unit of storage, so assert that it doesn't exceed 4 bytes which is the
261// current expected size.
262#[test]
263fn assert_maybe_type_small() {
264    assert!(core::mem::size_of::<MaybeType>() == 8);
265}
266
267impl core::fmt::Display for MaybeType {
268    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
269        match self {
270            MaybeType::Bottom => write!(f, "bot"),
271            MaybeType::UnknownRef(ty) => {
272                write!(f, "(ref shared? ")?;
273                match ty {
274                    Some(ty) => write!(f, "{}bot", ty.as_str(true))?,
275                    None => write!(f, "bot")?,
276                }
277                write!(f, ")")
278            }
279            MaybeType::Known(ty) => core::fmt::Display::fmt(ty, f),
280        }
281    }
282}
283
284impl From<ValType> for MaybeType {
285    fn from(ty: ValType) -> MaybeType {
286        MaybeType::Known(ty)
287    }
288}
289
290impl From<RefType> for MaybeType {
291    fn from(ty: RefType) -> MaybeType {
292        let ty: ValType = ty.into();
293        ty.into()
294    }
295}
296impl From<MaybeType<RefType>> for MaybeType<ValType> {
297    fn from(ty: MaybeType<RefType>) -> MaybeType<ValType> {
298        match ty {
299            MaybeType::Bottom => MaybeType::Bottom,
300            MaybeType::UnknownRef(ty) => MaybeType::UnknownRef(ty),
301            MaybeType::Known(t) => MaybeType::Known(t.into()),
302        }
303    }
304}
305
306impl MaybeType<RefType> {
307    fn as_non_null(&self) -> MaybeType<RefType> {
308        match self {
309            MaybeType::Bottom => MaybeType::Bottom,
310            MaybeType::UnknownRef(ty) => MaybeType::UnknownRef(*ty),
311            MaybeType::Known(ty) => MaybeType::Known(ty.as_non_null()),
312        }
313    }
314
315    fn is_maybe_shared(&self, resources: &impl WasmModuleResources) -> Option<bool> {
316        match self {
317            MaybeType::Bottom => None,
318            MaybeType::UnknownRef(_) => None,
319            MaybeType::Known(ty) => Some(resources.is_shared(*ty)),
320        }
321    }
322}
323
324impl OperatorValidator {
325    fn new(features: &WasmFeatures, allocs: OperatorValidatorAllocations) -> Self {
326        let OperatorValidatorAllocations {
327            popped_types_tmp,
328            control,
329            operands,
330            local_inits,
331            locals_first,
332            locals_uncached,
333        } = allocs;
334        debug_assert!(popped_types_tmp.is_empty());
335        debug_assert!(control.is_empty());
336        debug_assert!(operands.is_empty());
337        debug_assert!(local_inits.is_empty());
338        debug_assert!(local_inits.is_empty());
339        debug_assert!(locals_first.is_empty());
340        debug_assert!(locals_uncached.is_empty());
341        OperatorValidator {
342            locals: Locals {
343                num_locals: 0,
344                first: locals_first,
345                uncached: locals_uncached,
346            },
347            local_inits,
348            features: *features,
349            popped_types_tmp,
350            operands,
351            control,
352            shared: false,
353            #[cfg(debug_assertions)]
354            pop_push_log: vec![],
355        }
356    }
357
358    /// Creates a new operator validator which will be used to validate a
359    /// function whose type is the `ty` index specified.
360    ///
361    /// The `resources` are used to learn about the function type underlying
362    /// `ty`.
363    pub fn new_func<T>(
364        ty: u32,
365        offset: usize,
366        features: &WasmFeatures,
367        resources: &T,
368        allocs: OperatorValidatorAllocations,
369    ) -> Result<Self>
370    where
371        T: WasmModuleResources,
372    {
373        let mut ret = OperatorValidator::new(features, allocs);
374        ret.control.push(Frame {
375            kind: FrameKind::Block,
376            block_type: BlockType::FuncType(ty),
377            height: 0,
378            unreachable: false,
379            init_height: 0,
380        });
381
382        // Retrieve the function's type via index (`ty`); the `offset` is
383        // necessary due to `sub_type_at`'s error messaging.
384        let sub_ty = OperatorValidatorTemp {
385            offset,
386            inner: &mut ret,
387            resources,
388        }
389        .sub_type_at(ty)?;
390
391        // Set up the function's locals.
392        if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner {
393            for ty in func_ty.params() {
394                ret.locals.define(1, *ty);
395            }
396            ret.local_inits.define_params(func_ty.params().len());
397        } else {
398            bail!(offset, "expected func type at index {ty}, found {sub_ty}")
399        }
400
401        // If we're in a shared function, ensure we do not access unshared
402        // objects.
403        if sub_ty.composite_type.shared {
404            ret.shared = true;
405        }
406        Ok(ret)
407    }
408
409    /// Creates a new operator validator which will be used to validate an
410    /// `init_expr` constant expression which should result in the `ty`
411    /// specified.
412    pub fn new_const_expr(
413        features: &WasmFeatures,
414        ty: ValType,
415        allocs: OperatorValidatorAllocations,
416    ) -> Self {
417        let mut ret = OperatorValidator::new(features, allocs);
418        ret.control.push(Frame {
419            kind: FrameKind::Block,
420            block_type: BlockType::Type(ty),
421            height: 0,
422            unreachable: false,
423            init_height: 0,
424        });
425        ret
426    }
427
428    pub fn define_locals(
429        &mut self,
430        offset: usize,
431        count: u32,
432        mut ty: ValType,
433        resources: &impl WasmModuleResources,
434    ) -> Result<()> {
435        resources.check_value_type(&mut ty, &self.features, offset)?;
436        if count == 0 {
437            return Ok(());
438        }
439        if !self.locals.define(count, ty) {
440            return Err(BinaryReaderError::new(
441                "too many locals: locals exceed maximum",
442                offset,
443            ));
444        }
445        self.local_inits.define_locals(count, ty);
446        Ok(())
447    }
448
449    /// Returns the current operands stack height.
450    pub fn operand_stack_height(&self) -> usize {
451        self.operands.len()
452    }
453
454    /// Returns the optional value type of the value operand at the given
455    /// `depth` from the top of the operand stack.
456    ///
457    /// - Returns `None` if the `depth` is out of bounds.
458    /// - Returns `Some(None)` if there is a value with unknown type
459    /// at the given `depth`.
460    ///
461    /// # Note
462    ///
463    /// A `depth` of 0 will refer to the last operand on the stack.
464    pub fn peek_operand_at(&self, depth: usize) -> Option<Option<ValType>> {
465        Some(match self.operands.iter().rev().nth(depth)? {
466            MaybeType::Known(t) => Some(*t),
467            MaybeType::Bottom | MaybeType::UnknownRef(..) => None,
468        })
469    }
470
471    /// Returns the number of frames on the control flow stack.
472    pub fn control_stack_height(&self) -> usize {
473        self.control.len()
474    }
475
476    /// Validates a relative jump to the `depth` specified.
477    ///
478    /// Returns the type signature of the block that we're jumping to as well
479    /// as the kind of block if the jump is valid. Otherwise returns an error.
480    pub(crate) fn jump(&self, depth: u32) -> Option<(BlockType, FrameKind)> {
481        assert!(!self.control.is_empty());
482        let i = (self.control.len() - 1).checked_sub(depth as usize)?;
483        let frame = &self.control[i];
484        Some((frame.block_type, frame.kind))
485    }
486
487    pub fn get_frame(&self, depth: usize) -> Option<&Frame> {
488        self.control.iter().rev().nth(depth)
489    }
490
491    /// Create a temporary [`OperatorValidatorTemp`] for validation.
492    pub fn with_resources<'a, 'validator, 'resources, T>(
493        &'validator mut self,
494        resources: &'resources T,
495        offset: usize,
496    ) -> impl VisitOperator<'a, Output = Result<()>> + ModuleArity + FrameStack + 'validator
497    where
498        T: WasmModuleResources,
499        'resources: 'validator,
500    {
501        WasmProposalValidator(OperatorValidatorTemp {
502            offset,
503            inner: self,
504            resources,
505        })
506    }
507
508    /// Same as `with_resources` above but guarantees it's able to visit simd
509    /// operators as well.
510    #[cfg(feature = "simd")]
511    pub fn with_resources_simd<'a, 'validator, 'resources, T>(
512        &'validator mut self,
513        resources: &'resources T,
514        offset: usize,
515    ) -> impl VisitSimdOperator<'a, Output = Result<()>> + ModuleArity + 'validator
516    where
517        T: WasmModuleResources,
518        'resources: 'validator,
519    {
520        WasmProposalValidator(OperatorValidatorTemp {
521            offset,
522            inner: self,
523            resources,
524        })
525    }
526
527    pub fn into_allocations(mut self) -> OperatorValidatorAllocations {
528        fn clear<T>(mut tmp: Vec<T>) -> Vec<T> {
529            tmp.clear();
530            tmp
531        }
532        OperatorValidatorAllocations {
533            popped_types_tmp: clear(self.popped_types_tmp),
534            control: clear(self.control),
535            operands: clear(self.operands),
536            local_inits: {
537                self.local_inits.clear();
538                self.local_inits
539            },
540            locals_first: clear(self.locals.first),
541            locals_uncached: clear(self.locals.uncached),
542        }
543    }
544
545    fn record_pop(&mut self) {
546        #[cfg(debug_assertions)]
547        {
548            self.pop_push_log.push(false);
549        }
550    }
551
552    fn record_push(&mut self) {
553        #[cfg(debug_assertions)]
554        {
555            self.pop_push_log.push(true);
556        }
557    }
558}
559
560impl<R> Deref for OperatorValidatorTemp<'_, '_, R> {
561    type Target = OperatorValidator;
562    fn deref(&self) -> &OperatorValidator {
563        self.inner
564    }
565}
566
567impl<R> DerefMut for OperatorValidatorTemp<'_, '_, R> {
568    fn deref_mut(&mut self) -> &mut OperatorValidator {
569        self.inner
570    }
571}
572
573impl<'resources, R> OperatorValidatorTemp<'_, 'resources, R>
574where
575    R: WasmModuleResources,
576{
577    /// Pushes a type onto the operand stack.
578    ///
579    /// This is used by instructions to represent a value that is pushed to the
580    /// operand stack. This can fail, but only if `Type` is feature gated.
581    /// Otherwise the push operation always succeeds.
582    fn push_operand<T>(&mut self, ty: T) -> Result<()>
583    where
584        T: Into<MaybeType>,
585    {
586        let maybe_ty = ty.into();
587
588        if cfg!(debug_assertions) {
589            match maybe_ty {
590                MaybeType::Known(ValType::Ref(r)) => match r.heap_type() {
591                    HeapType::Concrete(index) | HeapType::Exact(index) => {
592                        debug_assert!(
593                            matches!(index, UnpackedIndex::Id(_)),
594                            "only ref types referencing `CoreTypeId`s can \
595                             be pushed to the operand stack"
596                        );
597                    }
598                    _ => {}
599                },
600                _ => {}
601            }
602        }
603
604        self.operands.push(maybe_ty);
605        self.record_push();
606        Ok(())
607    }
608
609    fn push_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<()> {
610        let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index));
611
612        // Canonicalize the module index into an id.
613        self.resources.check_heap_type(&mut heap_ty, self.offset)?;
614        debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_))));
615
616        let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| {
617            format_err!(self.offset, "implementation limit: type index too large")
618        })?;
619
620        self.push_operand(ref_ty)
621    }
622
623    fn push_exact_ref(&mut self, nullable: bool, type_index: u32) -> Result<()> {
624        let mut heap_ty = HeapType::Exact(UnpackedIndex::Module(type_index));
625
626        // Canonicalize the module index into an id.
627        self.resources.check_heap_type(&mut heap_ty, self.offset)?;
628        debug_assert!(matches!(heap_ty, HeapType::Exact(UnpackedIndex::Id(_))));
629
630        let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| {
631            format_err!(self.offset, "implementation limit: type index too large")
632        })?;
633
634        self.push_operand(ref_ty)
635    }
636
637    fn push_exact_ref_if_available(&mut self, nullable: bool, type_index: u32) -> Result<()> {
638        if self.features.custom_descriptors() {
639            self.push_exact_ref(nullable, type_index)
640        } else {
641            self.push_concrete_ref(nullable, type_index)
642        }
643    }
644
645    fn pop_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<MaybeType> {
646        let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index));
647
648        // Canonicalize the module index into an id.
649        self.resources.check_heap_type(&mut heap_ty, self.offset)?;
650        debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_))));
651
652        let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| {
653            format_err!(self.offset, "implementation limit: type index too large")
654        })?;
655
656        self.pop_operand(Some(ref_ty.into()))
657    }
658
659    fn pop_concrete_or_exact_ref(
660        &mut self,
661        nullable: bool,
662        type_index: u32,
663    ) -> Result<(MaybeType, bool)> {
664        let ty = self.pop_concrete_ref(nullable, type_index)?;
665        let is_exact = match ty {
666            MaybeType::Known(ValType::Ref(rt)) if rt.is_exact_type_ref() || rt.is_none_ref() => {
667                let mut heap_ty = HeapType::Exact(UnpackedIndex::Module(type_index));
668                self.resources.check_heap_type(&mut heap_ty, self.offset)?;
669                let expected = RefType::new(nullable, heap_ty).ok_or_else(|| {
670                    format_err!(self.offset, "implementation limit: type index too large")
671                })?;
672                self.resources.is_subtype(rt.into(), expected.into())
673            }
674            MaybeType::Bottom => true,
675            _ => false,
676        };
677        Ok((ty, is_exact))
678    }
679
680    /// Pop the given label types, checking that they are indeed present on the
681    /// stack, and then push them back on again.
682    fn pop_push_label_types(
683        &mut self,
684        label_types: impl PreciseIterator<Item = ValType>,
685    ) -> Result<()> {
686        for ty in label_types.clone().rev() {
687            self.pop_operand(Some(ty))?;
688        }
689        for ty in label_types {
690            self.push_operand(ty)?;
691        }
692        Ok(())
693    }
694
695    /// Attempts to pop a type from the operand stack.
696    ///
697    /// This function is used to remove types from the operand stack. The
698    /// `expected` argument can be used to indicate that a type is required, or
699    /// simply that something is needed to be popped.
700    ///
701    /// If `expected` is `Some(T)` then this will be guaranteed to return
702    /// `T`, and it will only return success if the current block is
703    /// unreachable or if `T` was found at the top of the operand stack.
704    ///
705    /// If `expected` is `None` then it indicates that something must be on the
706    /// operand stack, but it doesn't matter what's on the operand stack. This
707    /// is useful for polymorphic instructions like `select`.
708    ///
709    /// If `Some(T)` is returned then `T` was popped from the operand stack and
710    /// matches `expected`. If `None` is returned then it means that `None` was
711    /// expected and a type was successfully popped, but its exact type is
712    /// indeterminate because the current block is unreachable.
713    fn pop_operand(&mut self, expected: Option<ValType>) -> Result<MaybeType> {
714        // This method is one of the hottest methods in the validator so to
715        // improve codegen this method contains a fast-path success case where
716        // if the top operand on the stack is as expected it's returned
717        // immediately. This is the most common case where the stack will indeed
718        // have the expected type and all we need to do is pop it off.
719        //
720        // Note that this still has to be careful to be correct, though. For
721        // efficiency an operand is unconditionally popped and on success it is
722        // matched against the state of the world to see if we could actually
723        // pop it. If we shouldn't have popped it then it's passed to the slow
724        // path to get pushed back onto the stack.
725        let popped = match self.operands.pop() {
726            Some(MaybeType::Known(actual_ty)) => {
727                if Some(actual_ty) == expected {
728                    if let Some(control) = self.control.last() {
729                        if self.operands.len() >= control.height {
730                            self.record_pop();
731                            return Ok(MaybeType::Known(actual_ty));
732                        }
733                    }
734                }
735                Some(MaybeType::Known(actual_ty))
736            }
737            other => other,
738        };
739
740        self._pop_operand(expected, popped)
741    }
742
743    // This is the "real" implementation of `pop_operand` which is 100%
744    // spec-compliant with little attention paid to efficiency since this is the
745    // slow-path from the actual `pop_operand` function above.
746    #[cold]
747    fn _pop_operand(
748        &mut self,
749        expected: Option<ValType>,
750        popped: Option<MaybeType>,
751    ) -> Result<MaybeType> {
752        self.operands.extend(popped);
753        let control = self.control.last().unwrap();
754        let actual = if self.operands.len() == control.height && control.unreachable {
755            MaybeType::Bottom
756        } else {
757            if self.operands.len() == control.height {
758                let desc = match expected {
759                    Some(ty) => ty_to_str(ty),
760                    None => "a type".into(),
761                };
762                bail!(
763                    self.offset,
764                    "type mismatch: expected {desc} but nothing on stack"
765                )
766            } else {
767                self.operands.pop().unwrap()
768            }
769        };
770        if let Some(expected) = expected {
771            match (actual, expected) {
772                // The bottom type matches all expectations
773                (MaybeType::Bottom, _) => {}
774
775                // The "heap bottom" type only matches other references types,
776                // but not any integer types. Note that if the heap bottom is
777                // known to have a specific abstract heap type then a subtype
778                // check is performed against hte expected type.
779                (MaybeType::UnknownRef(actual_ty), ValType::Ref(expected)) => {
780                    if let Some(actual) = actual_ty {
781                        let expected_shared = self.resources.is_shared(expected);
782                        let actual = RefType::new(
783                            false,
784                            HeapType::Abstract {
785                                shared: expected_shared,
786                                ty: actual,
787                            },
788                        )
789                        .unwrap();
790                        if !self.resources.is_subtype(actual.into(), expected.into()) {
791                            bail!(
792                                self.offset,
793                                "type mismatch: expected {}, found {}",
794                                ty_to_str(expected.into()),
795                                ty_to_str(actual.into())
796                            );
797                        }
798                    }
799                }
800
801                // Use the `is_subtype` predicate to test if a found type matches
802                // the expectation.
803                (MaybeType::Known(actual), expected) => {
804                    if !self.resources.is_subtype(actual, expected) {
805                        bail!(
806                            self.offset,
807                            "type mismatch: expected {}, found {}",
808                            ty_to_str(expected),
809                            ty_to_str(actual)
810                        );
811                    }
812                }
813
814                // A "heap bottom" type cannot match any numeric types.
815                (
816                    MaybeType::UnknownRef(..),
817                    ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128,
818                ) => {
819                    bail!(
820                        self.offset,
821                        "type mismatch: expected {}, found heap type",
822                        ty_to_str(expected)
823                    )
824                }
825            }
826        }
827        self.record_pop();
828        Ok(actual)
829    }
830
831    /// Match expected vs. actual operand.
832    fn match_operand(
833        &mut self,
834        actual: ValType,
835        expected: ValType,
836    ) -> Result<(), BinaryReaderError> {
837        self.push_operand(actual)?;
838        self.pop_operand(Some(expected))?;
839        Ok(())
840    }
841
842    /// Match a type sequence to the top of the stack.
843    fn match_stack_operands(
844        &mut self,
845        expected_tys: impl PreciseIterator<Item = ValType> + 'resources,
846    ) -> Result<()> {
847        let mut popped_types_tmp = mem::take(&mut self.popped_types_tmp);
848        debug_assert!(popped_types_tmp.is_empty());
849        popped_types_tmp.reserve(expected_tys.len());
850
851        for expected_ty in expected_tys.rev() {
852            let actual_ty = self.pop_operand(Some(expected_ty))?;
853            popped_types_tmp.push(actual_ty);
854        }
855        for ty in popped_types_tmp.drain(..).rev() {
856            self.push_operand(ty)?;
857        }
858
859        debug_assert!(self.popped_types_tmp.is_empty());
860        self.popped_types_tmp = popped_types_tmp;
861        Ok(())
862    }
863
864    /// Pop a reference type from the operand stack.
865    fn pop_ref(&mut self, expected: Option<RefType>) -> Result<MaybeType<RefType>> {
866        match self.pop_operand(expected.map(|t| t.into()))? {
867            MaybeType::Bottom => Ok(MaybeType::UnknownRef(None)),
868            MaybeType::UnknownRef(ty) => Ok(MaybeType::UnknownRef(ty)),
869            MaybeType::Known(ValType::Ref(rt)) => Ok(MaybeType::Known(rt)),
870            MaybeType::Known(ty) => bail!(
871                self.offset,
872                "type mismatch: expected ref but found {}",
873                ty_to_str(ty)
874            ),
875        }
876    }
877
878    /// Pop a reference type from the operand stack, checking if it is a subtype
879    /// of a nullable type of `expected` or the shared version of `expected`.
880    ///
881    /// This function returns the popped reference type and its `shared`-ness,
882    /// saving extra lookups for concrete types.
883    fn pop_maybe_shared_ref(&mut self, expected: AbstractHeapType) -> Result<MaybeType<RefType>> {
884        let actual = match self.pop_ref(None)? {
885            MaybeType::Bottom => return Ok(MaybeType::Bottom),
886            MaybeType::UnknownRef(None) => return Ok(MaybeType::UnknownRef(None)),
887            MaybeType::UnknownRef(Some(actual)) => {
888                if !actual.is_subtype_of(expected) {
889                    bail!(
890                        self.offset,
891                        "type mismatch: expected subtype of {}, found {}",
892                        expected.as_str(false),
893                        actual.as_str(false),
894                    )
895                }
896                return Ok(MaybeType::UnknownRef(Some(actual)));
897            }
898            MaybeType::Known(ty) => ty,
899        };
900        // Change our expectation based on whether we're dealing with an actual
901        // shared or unshared type.
902        let is_actual_shared = self.resources.is_shared(actual);
903        let expected = RefType::new(
904            true,
905            HeapType::Abstract {
906                shared: is_actual_shared,
907                ty: expected,
908            },
909        )
910        .unwrap();
911
912        // Check (again) that the actual type is a subtype of the expected type.
913        // Note that `_pop_operand` already does this kind of thing but we leave
914        // that for a future refactoring (TODO).
915        if !self.resources.is_subtype(actual.into(), expected.into()) {
916            bail!(
917                self.offset,
918                "type mismatch: expected subtype of {expected}, found {actual}",
919            )
920        }
921        Ok(MaybeType::Known(actual))
922    }
923
924    /// Fetches the type for the local at `idx`, returning an error if it's out
925    /// of bounds.
926    fn local(&self, idx: u32) -> Result<ValType> {
927        match self.locals.get(idx) {
928            Some(ty) => Ok(ty),
929            None => bail!(
930                self.offset,
931                "unknown local {}: local index out of bounds",
932                idx
933            ),
934        }
935    }
936
937    /// Flags the current control frame as unreachable, additionally truncating
938    /// the currently active operand stack.
939    fn unreachable(&mut self) -> Result<()> {
940        let control = self.control.last_mut().unwrap();
941        control.unreachable = true;
942        let new_height = control.height;
943        self.operands.truncate(new_height);
944        Ok(())
945    }
946
947    /// Pushes a new frame onto the control stack.
948    ///
949    /// This operation is used when entering a new block such as an if, loop,
950    /// or block itself. The `kind` of block is specified which indicates how
951    /// breaks interact with this block's type. Additionally the type signature
952    /// of the block is specified by `ty`.
953    fn push_ctrl(&mut self, kind: FrameKind, ty: BlockType) -> Result<()> {
954        // Push a new frame which has a snapshot of the height of the current
955        // operand stack.
956        let height = self.operands.len();
957        let init_height = self.local_inits.push_ctrl();
958        self.control.push(Frame {
959            kind,
960            block_type: ty,
961            height,
962            unreachable: false,
963            init_height,
964        });
965        // All of the parameters are now also available in this control frame,
966        // so we push them here in order.
967        for ty in self.params(ty)? {
968            self.push_operand(ty)?;
969        }
970        Ok(())
971    }
972
973    /// Pops a frame from the control stack.
974    ///
975    /// This function is used when exiting a block and leaves a block scope.
976    /// Internally this will validate that blocks have the correct result type.
977    fn pop_ctrl(&mut self) -> Result<Frame> {
978        // Read the expected type and expected height of the operand stack the
979        // end of the frame.
980        let frame = self.control.last().unwrap();
981        let ty = frame.block_type;
982        let height = frame.height;
983        let init_height = frame.init_height;
984
985        // reset_locals in the spec
986        self.local_inits.pop_ctrl(init_height);
987
988        // Pop all the result types, in reverse order, from the operand stack.
989        // These types will, possibly, be transferred to the next frame.
990        for ty in self.results(ty)?.rev() {
991            self.pop_operand(Some(ty))?;
992        }
993
994        // Make sure that the operand stack has returned to is original
995        // height...
996        if self.operands.len() != height {
997            bail!(
998                self.offset,
999                "type mismatch: values remaining on stack at end of block"
1000            );
1001        }
1002
1003        // And then we can remove it!
1004        Ok(self.control.pop().unwrap())
1005    }
1006
1007    /// Validates a relative jump to the `depth` specified.
1008    ///
1009    /// Returns the type signature of the block that we're jumping to as well
1010    /// as the kind of block if the jump is valid. Otherwise returns an error.
1011    fn jump(&self, depth: u32) -> Result<(BlockType, FrameKind)> {
1012        match self.inner.jump(depth) {
1013            Some(tup) => Ok(tup),
1014            None => bail!(self.offset, "unknown label: branch depth too large"),
1015        }
1016    }
1017
1018    /// Validates that `memory_index` is valid in this module, and returns the
1019    /// type of address used to index the memory specified.
1020    fn check_memory_index(&self, memory_index: u32) -> Result<ValType> {
1021        match self.resources.memory_at(memory_index) {
1022            Some(mem) => Ok(mem.index_type()),
1023            None => bail!(self.offset, "unknown memory {}", memory_index),
1024        }
1025    }
1026
1027    /// Validates a `memarg for alignment and such (also the memory it
1028    /// references), and returns the type of index used to address the memory.
1029    fn check_memarg(&self, memarg: MemArg) -> Result<ValType> {
1030        let index_ty = self.check_memory_index(memarg.memory)?;
1031        if memarg.align > memarg.max_align {
1032            bail!(
1033                self.offset,
1034                "invalid memop alignment: alignment must not be larger than natural"
1035            );
1036        }
1037        if index_ty == ValType::I32 && memarg.offset > u64::from(u32::MAX) {
1038            bail!(self.offset, "offset out of range: must be <= 2**32");
1039        }
1040        Ok(index_ty)
1041    }
1042
1043    fn check_floats_enabled(&self) -> Result<()> {
1044        if !self.features.floats() {
1045            bail!(self.offset, "floating-point instruction disallowed");
1046        }
1047        Ok(())
1048    }
1049
1050    fn check_shared_memarg(&self, memarg: MemArg) -> Result<ValType> {
1051        if memarg.align != memarg.max_align {
1052            bail!(
1053                self.offset,
1054                "atomic instructions must always specify maximum alignment"
1055            );
1056        }
1057        self.check_memory_index(memarg.memory)
1058    }
1059
1060    /// Validates a block type, primarily with various in-flight proposals.
1061    fn check_block_type(&self, ty: &mut BlockType) -> Result<()> {
1062        match ty {
1063            BlockType::Empty => Ok(()),
1064            BlockType::Type(t) => self
1065                .resources
1066                .check_value_type(t, &self.features, self.offset),
1067            BlockType::FuncType(idx) => {
1068                if !self.features.multi_value() {
1069                    bail!(
1070                        self.offset,
1071                        "blocks, loops, and ifs may only produce a resulttype \
1072                         when multi-value is not enabled",
1073                    );
1074                }
1075                self.func_type_at(*idx)?;
1076                Ok(())
1077            }
1078        }
1079    }
1080
1081    /// Returns the corresponding function type for the `func` item located at
1082    /// `function_index`.
1083    fn type_of_function(&self, function_index: u32) -> Result<&'resources FuncType> {
1084        if let Some(type_index) = self.resources.type_index_of_function(function_index) {
1085            self.func_type_at(type_index)
1086        } else {
1087            bail!(
1088                self.offset,
1089                "unknown function {function_index}: function index out of bounds",
1090            )
1091        }
1092    }
1093
1094    /// Checks a call-style instruction which will be invoking the function `ty`
1095    /// specified.
1096    ///
1097    /// This will pop parameters from the operand stack for the function's
1098    /// parameters and then push the results of the function on the stack.
1099    fn check_call_ty(&mut self, ty: &FuncType) -> Result<()> {
1100        for &ty in ty.params().iter().rev() {
1101            debug_assert_type_indices_are_ids(ty);
1102            self.pop_operand(Some(ty))?;
1103        }
1104        for &ty in ty.results() {
1105            debug_assert_type_indices_are_ids(ty);
1106            self.push_operand(ty)?;
1107        }
1108        Ok(())
1109    }
1110
1111    /// Similar to `check_call_ty` except used for tail-call instructions.
1112    fn check_return_call_ty(&mut self, ty: &FuncType) -> Result<()> {
1113        self.check_func_type_same_results(ty)?;
1114        for &ty in ty.params().iter().rev() {
1115            debug_assert_type_indices_are_ids(ty);
1116            self.pop_operand(Some(ty))?;
1117        }
1118
1119        // Match the results with this function's.
1120        for &ty in ty.results() {
1121            debug_assert_type_indices_are_ids(ty);
1122            self.push_operand(ty)?;
1123        }
1124        self.check_return()?;
1125
1126        Ok(())
1127    }
1128
1129    /// Checks the immediate `type_index` of a `call_ref`-style instruction
1130    /// (also `return_call_ref`).
1131    ///
1132    /// This will validate that the value on the stack is a `(ref type_index)`
1133    /// or a subtype. This will then return the corresponding function type used
1134    /// for this call (to be used with `check_call_ty` or
1135    /// `check_return_call_ty`).
1136    fn check_call_ref_ty(&mut self, type_index: u32) -> Result<&'resources FuncType> {
1137        let unpacked_index = UnpackedIndex::Module(type_index);
1138        let mut hty = HeapType::Concrete(unpacked_index);
1139        self.resources.check_heap_type(&mut hty, self.offset)?;
1140        let expected = RefType::new(true, hty).expect("hty should be previously validated");
1141        self.pop_ref(Some(expected))?;
1142        self.func_type_at(type_index)
1143    }
1144
1145    /// Validates the immediate operands of a `call_indirect` or
1146    /// `return_call_indirect` instruction.
1147    ///
1148    /// This will validate that `table_index` is valid and a funcref table. It
1149    /// will additionally pop the index argument which is used to index into the
1150    /// table.
1151    ///
1152    /// The return value of this function is the function type behind
1153    /// `type_index` which must then be passed to `check_{call,return_call}_ty`.
1154    fn check_call_indirect_ty(
1155        &mut self,
1156        type_index: u32,
1157        table_index: u32,
1158    ) -> Result<&'resources FuncType> {
1159        let tab = self.table_type_at(table_index)?;
1160        if !self
1161            .resources
1162            .is_subtype(ValType::Ref(tab.element_type), ValType::FUNCREF)
1163        {
1164            bail!(
1165                self.offset,
1166                "type mismatch: indirect calls must go through a table with type <= funcref",
1167            );
1168        }
1169        self.pop_operand(Some(tab.index_type()))?;
1170        self.func_type_at(type_index)
1171    }
1172
1173    /// Validates a `return` instruction, popping types from the operand
1174    /// stack that the function needs.
1175    fn check_return(&mut self) -> Result<()> {
1176        assert!(!self.control.is_empty());
1177        for ty in self.results(self.control[0].block_type)?.rev() {
1178            self.pop_operand(Some(ty))?;
1179        }
1180        self.unreachable()?;
1181        Ok(())
1182    }
1183
1184    /// Check that the given type has the same result types as the current
1185    /// function's results.
1186    fn check_func_type_same_results(&self, callee_ty: &FuncType) -> Result<()> {
1187        assert!(!self.control.is_empty());
1188        let caller_rets = self.results(self.control[0].block_type)?;
1189        if callee_ty.results().len() != caller_rets.len()
1190            || !caller_rets
1191                .zip(callee_ty.results())
1192                .all(|(caller_ty, callee_ty)| self.resources.is_subtype(*callee_ty, caller_ty))
1193        {
1194            let caller_rets = self
1195                .results(self.control[0].block_type)?
1196                .map(|ty| format!("{ty}"))
1197                .collect::<Vec<_>>()
1198                .join(" ");
1199            let callee_rets = callee_ty
1200                .results()
1201                .iter()
1202                .map(|ty| format!("{ty}"))
1203                .collect::<Vec<_>>()
1204                .join(" ");
1205            bail!(
1206                self.offset,
1207                "type mismatch: current function requires result type \
1208                 [{caller_rets}] but callee returns [{callee_rets}]"
1209            );
1210        }
1211        Ok(())
1212    }
1213
1214    /// Checks the validity of a common comparison operator.
1215    fn check_cmp_op(&mut self, ty: ValType) -> Result<()> {
1216        self.pop_operand(Some(ty))?;
1217        self.pop_operand(Some(ty))?;
1218        self.push_operand(ValType::I32)?;
1219        Ok(())
1220    }
1221
1222    /// Checks the validity of a common float comparison operator.
1223    fn check_fcmp_op(&mut self, ty: ValType) -> Result<()> {
1224        debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1225        self.check_floats_enabled()?;
1226        self.check_cmp_op(ty)
1227    }
1228
1229    /// Checks the validity of a common unary operator.
1230    fn check_unary_op(&mut self, ty: ValType) -> Result<()> {
1231        self.pop_operand(Some(ty))?;
1232        self.push_operand(ty)?;
1233        Ok(())
1234    }
1235
1236    /// Checks the validity of a common unary float operator.
1237    fn check_funary_op(&mut self, ty: ValType) -> Result<()> {
1238        debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1239        self.check_floats_enabled()?;
1240        self.check_unary_op(ty)
1241    }
1242
1243    /// Checks the validity of a common conversion operator.
1244    fn check_conversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
1245        self.pop_operand(Some(from))?;
1246        self.push_operand(into)?;
1247        Ok(())
1248    }
1249
1250    /// Checks the validity of a common float conversion operator.
1251    fn check_fconversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
1252        debug_assert!(matches!(into, ValType::F32 | ValType::F64));
1253        self.check_floats_enabled()?;
1254        self.check_conversion_op(into, from)
1255    }
1256
1257    /// Checks the validity of a common binary operator.
1258    fn check_binary_op(&mut self, ty: ValType) -> Result<()> {
1259        self.pop_operand(Some(ty))?;
1260        self.pop_operand(Some(ty))?;
1261        self.push_operand(ty)?;
1262        Ok(())
1263    }
1264
1265    /// Checks the validity of a common binary float operator.
1266    fn check_fbinary_op(&mut self, ty: ValType) -> Result<()> {
1267        debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1268        self.check_floats_enabled()?;
1269        self.check_binary_op(ty)
1270    }
1271
1272    /// Checks the validity of an atomic load operator.
1273    fn check_atomic_load(&mut self, memarg: MemArg, load_ty: ValType) -> Result<()> {
1274        let ty = self.check_shared_memarg(memarg)?;
1275        self.pop_operand(Some(ty))?;
1276        self.push_operand(load_ty)?;
1277        Ok(())
1278    }
1279
1280    /// Checks the validity of an atomic store operator.
1281    fn check_atomic_store(&mut self, memarg: MemArg, store_ty: ValType) -> Result<()> {
1282        let ty = self.check_shared_memarg(memarg)?;
1283        self.pop_operand(Some(store_ty))?;
1284        self.pop_operand(Some(ty))?;
1285        Ok(())
1286    }
1287
1288    /// Checks the validity of atomic binary operator on memory.
1289    fn check_atomic_binary_memory_op(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
1290        let ty = self.check_shared_memarg(memarg)?;
1291        self.pop_operand(Some(op_ty))?;
1292        self.pop_operand(Some(ty))?;
1293        self.push_operand(op_ty)?;
1294        Ok(())
1295    }
1296
1297    /// Checks the validity of an atomic compare exchange operator on memories.
1298    fn check_atomic_binary_memory_cmpxchg(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
1299        let ty = self.check_shared_memarg(memarg)?;
1300        self.pop_operand(Some(op_ty))?;
1301        self.pop_operand(Some(op_ty))?;
1302        self.pop_operand(Some(ty))?;
1303        self.push_operand(op_ty)?;
1304        Ok(())
1305    }
1306
1307    /// Common helper for `ref.test` and `ref.cast` downcasting/checking
1308    /// instructions. Returns the given `heap_type` as a `ValType`.
1309    fn check_downcast(&mut self, nullable: bool, mut heap_type: HeapType) -> Result<RefType> {
1310        self.resources
1311            .check_heap_type(&mut heap_type, self.offset)?;
1312
1313        let sub_ty = RefType::new(nullable, heap_type).ok_or_else(|| {
1314            BinaryReaderError::new("implementation limit: type index too large", self.offset)
1315        })?;
1316        let sup_ty = RefType::new(true, self.resources.top_type(&heap_type))
1317            .expect("can't panic with non-concrete heap types");
1318
1319        self.pop_ref(Some(sup_ty))?;
1320        Ok(sub_ty)
1321    }
1322
1323    /// Common helper for both nullable and non-nullable variants of `ref.test`
1324    /// instructions.
1325    fn check_ref_test(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> {
1326        self.check_downcast(nullable, heap_type)?;
1327        self.push_operand(ValType::I32)
1328    }
1329
1330    /// Common helper for both nullable and non-nullable variants of `ref.cast`
1331    /// instructions.
1332    fn check_ref_cast(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> {
1333        let sub_ty = self.check_downcast(nullable, heap_type)?;
1334        self.push_operand(sub_ty)
1335    }
1336
1337    /// Common helper to check type hierarchy for `br_on_cast` operators.
1338    fn check_br_on_cast_type_hierarchy(
1339        &self,
1340        from_ref_type: RefType,
1341        to_ref_type: RefType,
1342    ) -> Result<()> {
1343        if self.features.custom_descriptors() {
1344            // The constraint C |- rt_2 <: rt_1 on branching cast instructions
1345            // before the custom descriptors proposal is relaxed to the constraint
1346            // that rt_1 and rt_2 share some arbitrary valid supertype rt', i.e.
1347            // that rt_1 and rt_2 must be in the same heap type hierarchy.
1348            let from_ref_type_top = self.resources.top_type(&from_ref_type.heap_type());
1349            let to_ref_type_top = self.resources.top_type(&to_ref_type.heap_type());
1350            if from_ref_type_top != to_ref_type_top {
1351                bail!(
1352                    self.offset,
1353                    "type mismatch: {from_ref_type} and {to_ref_type} have different heap type hierarchies"
1354                );
1355            }
1356            return Ok(());
1357        }
1358
1359        if !self
1360            .resources
1361            .is_subtype(to_ref_type.into(), from_ref_type.into())
1362        {
1363            bail!(
1364                self.offset,
1365                "type mismatch: expected {from_ref_type}, found {to_ref_type}"
1366            );
1367        }
1368        Ok(())
1369    }
1370
1371    /// Common helper to check descriptor for the specified type.
1372    fn check_descriptor(&self, heap_type: HeapType) -> Result<u32> {
1373        Ok(match heap_type {
1374            HeapType::Exact(idx) | HeapType::Concrete(idx) => {
1375                if let Some(descriptor_idx) = self
1376                    .sub_type_at(idx.as_module_index().unwrap())?
1377                    .composite_type
1378                    .descriptor_idx
1379                {
1380                    u32::try_from(crate::validator::types::TypeIdentifier::index(
1381                        &descriptor_idx.as_core_type_id().unwrap(),
1382                    ))
1383                    .unwrap()
1384                } else {
1385                    bail!(self.offset, "cast target must have descriptor")
1386                }
1387            }
1388            _ => bail!(self.offset, "unexpected heap type"),
1389        })
1390    }
1391
1392    fn check_maybe_exact_descriptor_ref(&mut self, heap_type: HeapType) -> Result<bool> {
1393        let descriptor_idx = self.check_descriptor(heap_type)?;
1394        let (ty, _is_exact) = self.pop_concrete_or_exact_ref(true, descriptor_idx)?;
1395        let is_exact = if let HeapType::Exact(_) = heap_type {
1396            let mut descriptor_ty = HeapType::Exact(UnpackedIndex::Module(descriptor_idx));
1397            self.resources
1398                .check_heap_type(&mut descriptor_ty, self.offset)?;
1399            let descriptor_ty = ValType::Ref(
1400                RefType::new(true, descriptor_ty)
1401                    .expect("existing heap types should be within our limits"),
1402            );
1403
1404            match ty {
1405                MaybeType::Known(actual) if !self.resources.is_subtype(actual, descriptor_ty) => {
1406                    bail!(
1407                        self.offset,
1408                        "type mismatch: expected descriptor of exact type {descriptor_ty} found {actual}",
1409                    );
1410                }
1411                _ => (),
1412            }
1413            true
1414        } else {
1415            false
1416        };
1417        Ok(is_exact)
1418    }
1419
1420    /// Common helper for both nullable and non-nullable variants of `ref.cast_desc`
1421    /// instructions.
1422    fn check_ref_cast_desc(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> {
1423        let is_exact = self.check_maybe_exact_descriptor_ref(heap_type)?;
1424
1425        self.check_downcast(nullable, heap_type)?;
1426
1427        let idx = {
1428            let mut heap_type = heap_type;
1429            self.resources
1430                .check_heap_type(&mut heap_type, self.offset)?;
1431            match heap_type {
1432                HeapType::Concrete(index) | HeapType::Exact(index) => {
1433                    index.pack().ok_or_else(|| {
1434                        BinaryReaderError::new(
1435                            "implementation limit: type index too large",
1436                            self.offset,
1437                        )
1438                    })?
1439                }
1440                _ => panic!(),
1441            }
1442        };
1443
1444        self.push_operand(if is_exact {
1445            RefType::exact(nullable, idx)
1446        } else {
1447            RefType::concrete(nullable, idx)
1448        })
1449    }
1450
1451    /// Common helper for checking the types of globals accessed with atomic RMW
1452    /// instructions, which only allow `i32` and `i64`.
1453    fn check_atomic_global_rmw_ty(&self, global_index: u32) -> Result<ValType> {
1454        let ty = self.global_type_at(global_index)?.content_type;
1455        if !(ty == ValType::I32 || ty == ValType::I64) {
1456            bail!(
1457                self.offset,
1458                "invalid type: `global.atomic.rmw.*` only allows `i32` and `i64`"
1459            );
1460        }
1461        Ok(ty)
1462    }
1463
1464    /// Common helper for checking the types of structs accessed with atomic RMW
1465    /// instructions, which only allow `i32` and `i64` types.
1466    fn check_struct_atomic_rmw(
1467        &mut self,
1468        op: &'static str,
1469        struct_type_index: u32,
1470        field_index: u32,
1471    ) -> Result<()> {
1472        let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
1473        let field_ty = match field.element_type {
1474            StorageType::Val(ValType::I32) => ValType::I32,
1475            StorageType::Val(ValType::I64) => ValType::I64,
1476            _ => bail!(
1477                self.offset,
1478                "invalid type: `struct.atomic.rmw.{}` only allows `i32` and `i64`",
1479                op
1480            ),
1481        };
1482        self.pop_operand(Some(field_ty))?;
1483        self.pop_concrete_ref(true, struct_type_index)?;
1484        self.push_operand(field_ty)?;
1485        Ok(())
1486    }
1487
1488    /// Common helper for checking the types of arrays accessed with atomic RMW
1489    /// instructions, which only allow `i32` and `i64`.
1490    fn check_array_atomic_rmw(&mut self, op: &'static str, type_index: u32) -> Result<()> {
1491        let field = self.mutable_array_type_at(type_index)?;
1492        let elem_ty = match field.element_type {
1493            StorageType::Val(ValType::I32) => ValType::I32,
1494            StorageType::Val(ValType::I64) => ValType::I64,
1495            _ => bail!(
1496                self.offset,
1497                "invalid type: `array.atomic.rmw.{}` only allows `i32` and `i64`",
1498                op
1499            ),
1500        };
1501        self.pop_operand(Some(elem_ty))?;
1502        self.pop_operand(Some(ValType::I32))?;
1503        self.pop_concrete_ref(true, type_index)?;
1504        self.push_operand(elem_ty)?;
1505        Ok(())
1506    }
1507
1508    fn element_type_at(&self, elem_index: u32) -> Result<RefType> {
1509        match self.resources.element_type_at(elem_index) {
1510            Some(ty) => Ok(ty),
1511            None => bail!(
1512                self.offset,
1513                "unknown elem segment {}: segment index out of bounds",
1514                elem_index
1515            ),
1516        }
1517    }
1518
1519    fn sub_type_at(&self, at: u32) -> Result<&'resources SubType> {
1520        self.resources
1521            .sub_type_at(at)
1522            .ok_or_else(|| format_err!(self.offset, "unknown type: type index out of bounds"))
1523    }
1524
1525    fn struct_type_at(&self, at: u32) -> Result<&'resources StructType> {
1526        let sub_ty = self.sub_type_at(at)?;
1527        if let CompositeInnerType::Struct(struct_ty) = &sub_ty.composite_type.inner {
1528            if self.inner.shared && !sub_ty.composite_type.shared {
1529                bail!(
1530                    self.offset,
1531                    "shared functions cannot access unshared structs",
1532                );
1533            }
1534            Ok(struct_ty)
1535        } else {
1536            bail!(
1537                self.offset,
1538                "expected struct type at index {at}, found {sub_ty}"
1539            )
1540        }
1541    }
1542
1543    fn struct_field_at(&self, struct_type_index: u32, field_index: u32) -> Result<FieldType> {
1544        let field_index = usize::try_from(field_index).map_err(|_| {
1545            BinaryReaderError::new("unknown field: field index out of bounds", self.offset)
1546        })?;
1547        self.struct_type_at(struct_type_index)?
1548            .fields
1549            .get(field_index)
1550            .copied()
1551            .ok_or_else(|| {
1552                BinaryReaderError::new("unknown field: field index out of bounds", self.offset)
1553            })
1554    }
1555
1556    fn mutable_struct_field_at(
1557        &self,
1558        struct_type_index: u32,
1559        field_index: u32,
1560    ) -> Result<FieldType> {
1561        let field = self.struct_field_at(struct_type_index, field_index)?;
1562        if !field.mutable {
1563            bail!(
1564                self.offset,
1565                "invalid struct modification: struct field is immutable"
1566            )
1567        }
1568        Ok(field)
1569    }
1570
1571    fn array_type_at(&self, at: u32) -> Result<FieldType> {
1572        let sub_ty = self.sub_type_at(at)?;
1573        if let CompositeInnerType::Array(array_ty) = &sub_ty.composite_type.inner {
1574            if self.inner.shared && !sub_ty.composite_type.shared {
1575                bail!(
1576                    self.offset,
1577                    "shared functions cannot access unshared arrays",
1578                );
1579            }
1580            Ok(array_ty.0)
1581        } else {
1582            bail!(
1583                self.offset,
1584                "expected array type at index {at}, found {sub_ty}"
1585            )
1586        }
1587    }
1588
1589    fn mutable_array_type_at(&self, at: u32) -> Result<FieldType> {
1590        let field = self.array_type_at(at)?;
1591        if !field.mutable {
1592            bail!(
1593                self.offset,
1594                "invalid array modification: array is immutable"
1595            )
1596        }
1597        Ok(field)
1598    }
1599
1600    fn func_type_at(&self, at: u32) -> Result<&'resources FuncType> {
1601        let sub_ty = self.sub_type_at(at)?;
1602        if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner {
1603            if self.inner.shared && !sub_ty.composite_type.shared {
1604                bail!(
1605                    self.offset,
1606                    "shared functions cannot access unshared functions",
1607                );
1608            }
1609            Ok(func_ty)
1610        } else {
1611            bail!(
1612                self.offset,
1613                "expected func type at index {at}, found {sub_ty}"
1614            )
1615        }
1616    }
1617
1618    fn cont_type_at(&self, at: u32) -> Result<&ContType> {
1619        let sub_ty = self.sub_type_at(at)?;
1620        if let CompositeInnerType::Cont(cont_ty) = &sub_ty.composite_type.inner {
1621            if self.inner.shared && !sub_ty.composite_type.shared {
1622                bail!(
1623                    self.offset,
1624                    "shared continuations cannot access unshared continuations",
1625                );
1626            }
1627            Ok(cont_ty)
1628        } else {
1629            bail!(self.offset, "non-continuation type {at}",)
1630        }
1631    }
1632
1633    fn func_type_of_cont_type(&self, cont_ty: &ContType) -> &'resources FuncType {
1634        let func_id = cont_ty.0.as_core_type_id().expect("valid core type id");
1635        self.resources.sub_type_at_id(func_id).unwrap_func()
1636    }
1637
1638    fn tag_at(&self, at: u32) -> Result<&'resources FuncType> {
1639        self.resources
1640            .tag_at(at)
1641            .ok_or_else(|| format_err!(self.offset, "unknown tag {}: tag index out of bounds", at))
1642    }
1643
1644    // Similar to `tag_at`, but checks that the result type is
1645    // empty. This is necessary when enabling the stack switching
1646    // feature as it allows non-empty result types on tags.
1647    fn exception_tag_at(&self, at: u32) -> Result<&'resources FuncType> {
1648        let func_ty = self.tag_at(at)?;
1649        if func_ty.results().len() != 0 {
1650            bail!(
1651                self.offset,
1652                "invalid exception type: non-empty tag result type"
1653            );
1654        }
1655        Ok(func_ty)
1656    }
1657
1658    fn global_type_at(&self, at: u32) -> Result<GlobalType> {
1659        if let Some(ty) = self.resources.global_at(at) {
1660            if self.inner.shared && !ty.shared {
1661                bail!(
1662                    self.offset,
1663                    "shared functions cannot access unshared globals",
1664                );
1665            }
1666            Ok(ty)
1667        } else {
1668            bail!(self.offset, "unknown global: global index out of bounds");
1669        }
1670    }
1671
1672    /// Validates that the `table` is valid and returns the type it points to.
1673    fn table_type_at(&self, table: u32) -> Result<TableType> {
1674        match self.resources.table_at(table) {
1675            Some(ty) => {
1676                if self.inner.shared && !ty.shared {
1677                    bail!(
1678                        self.offset,
1679                        "shared functions cannot access unshared tables",
1680                    );
1681                }
1682                Ok(ty)
1683            }
1684            None => bail!(
1685                self.offset,
1686                "unknown table {table}: table index out of bounds"
1687            ),
1688        }
1689    }
1690
1691    fn params(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1692        Ok(match ty {
1693            BlockType::Empty | BlockType::Type(_) => Either::B(None.into_iter()),
1694            BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.params().iter().copied()),
1695        })
1696    }
1697
1698    fn results(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1699        Ok(match ty {
1700            BlockType::Empty => Either::B(None.into_iter()),
1701            BlockType::Type(t) => Either::B(Some(t).into_iter()),
1702            BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.results().iter().copied()),
1703        })
1704    }
1705
1706    fn label_types(
1707        &self,
1708        ty: BlockType,
1709        kind: FrameKind,
1710    ) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1711        Ok(match kind {
1712            FrameKind::Loop => Either::A(self.params(ty)?),
1713            _ => Either::B(self.results(ty)?),
1714        })
1715    }
1716
1717    fn check_data_segment(&self, data_index: u32) -> Result<()> {
1718        match self.resources.data_count() {
1719            None => bail!(self.offset, "data count section required"),
1720            Some(count) if data_index < count => Ok(()),
1721            Some(_) => bail!(self.offset, "unknown data segment {data_index}"),
1722        }
1723    }
1724
1725    fn check_resume_table(
1726        &mut self,
1727        table: ResumeTable,
1728        type_index: u32, // The type index annotation on the `resume` instruction, which `table` appears on.
1729    ) -> Result<&'resources FuncType> {
1730        let cont_ty = self.cont_type_at(type_index)?;
1731        // ts1 -> ts2
1732        let old_func_ty = self.func_type_of_cont_type(cont_ty);
1733        for handle in table.handlers {
1734            match handle {
1735                Handle::OnLabel { tag, label } => {
1736                    // ts1' -> ts2'
1737                    let tag_ty = self.tag_at(tag)?;
1738                    // ts1'' (ref (cont $ft))
1739                    let block = self.jump(label)?;
1740                    // Pop the continuation reference.
1741                    match self.label_types(block.0, block.1)?.last() {
1742                        Some(ValType::Ref(rt)) if rt.is_concrete_type_ref() => {
1743                            let sub_ty = self.resources.sub_type_at_id(
1744                                rt.type_index()
1745                                    .unwrap()
1746                                    .as_core_type_id()
1747                                    .expect("canonicalized index"),
1748                            );
1749                            let new_cont = if let CompositeInnerType::Cont(cont) =
1750                                &sub_ty.composite_type.inner
1751                            {
1752                                cont
1753                            } else {
1754                                bail!(self.offset, "non-continuation type");
1755                            };
1756                            let new_func_ty = self.func_type_of_cont_type(&new_cont);
1757                            // Check that (ts2' -> ts2) <: $ft
1758                            if new_func_ty.params().len() != tag_ty.results().len()
1759                                || !self.is_subtype_many(new_func_ty.params(), tag_ty.results())
1760                                || old_func_ty.results().len() != new_func_ty.results().len()
1761                                || !self
1762                                    .is_subtype_many(old_func_ty.results(), new_func_ty.results())
1763                            {
1764                                bail!(self.offset, "type mismatch in continuation type")
1765                            }
1766                            let expected_nargs = tag_ty.params().len() + 1;
1767                            let actual_nargs = self.label_types(block.0, block.1)?.len();
1768                            if actual_nargs != expected_nargs {
1769                                bail!(
1770                                    self.offset,
1771                                    "type mismatch: expected {expected_nargs} label result(s), but label is annotated with {actual_nargs} results"
1772                                )
1773                            }
1774
1775                            let labeltys =
1776                                self.label_types(block.0, block.1)?.take(expected_nargs - 1);
1777
1778                            // Check that ts1'' <: ts1'.
1779                            for (tagty, &lblty) in labeltys.zip(tag_ty.params()) {
1780                                if !self.resources.is_subtype(lblty, tagty) {
1781                                    bail!(
1782                                        self.offset,
1783                                        "type mismatch between tag type and label type"
1784                                    )
1785                                }
1786                            }
1787                        }
1788                        Some(ty) => {
1789                            bail!(self.offset, "type mismatch: {}", ty_to_str(ty))
1790                        }
1791                        _ => bail!(
1792                            self.offset,
1793                            "type mismatch: instruction requires continuation reference type but label has none"
1794                        ),
1795                    }
1796                }
1797                Handle::OnSwitch { tag } => {
1798                    let tag_ty = self.tag_at(tag)?;
1799                    if tag_ty.params().len() != 0 {
1800                        bail!(self.offset, "type mismatch: non-empty tag parameter type")
1801                    }
1802                }
1803            }
1804        }
1805        Ok(old_func_ty)
1806    }
1807
1808    /// Applies `is_subtype` pointwise two equally sized collections
1809    /// (i.e. equally sized after skipped elements).
1810    fn is_subtype_many(&mut self, ts1: &[ValType], ts2: &[ValType]) -> bool {
1811        debug_assert!(ts1.len() == ts2.len());
1812        ts1.iter()
1813            .zip(ts2.iter())
1814            .all(|(ty1, ty2)| self.resources.is_subtype(*ty1, *ty2))
1815    }
1816
1817    fn check_binop128(&mut self) -> Result<()> {
1818        self.pop_operand(Some(ValType::I64))?;
1819        self.pop_operand(Some(ValType::I64))?;
1820        self.pop_operand(Some(ValType::I64))?;
1821        self.pop_operand(Some(ValType::I64))?;
1822        self.push_operand(ValType::I64)?;
1823        self.push_operand(ValType::I64)?;
1824        Ok(())
1825    }
1826
1827    fn check_i64_mul_wide(&mut self) -> Result<()> {
1828        self.pop_operand(Some(ValType::I64))?;
1829        self.pop_operand(Some(ValType::I64))?;
1830        self.push_operand(ValType::I64)?;
1831        self.push_operand(ValType::I64)?;
1832        Ok(())
1833    }
1834
1835    fn check_enabled(&self, flag: bool, desc: &str) -> Result<()> {
1836        if flag {
1837            return Ok(());
1838        }
1839        bail!(self.offset, "{desc} support is not enabled");
1840    }
1841}
1842
1843pub fn ty_to_str(ty: ValType) -> &'static str {
1844    match ty {
1845        ValType::I32 => "i32",
1846        ValType::I64 => "i64",
1847        ValType::F32 => "f32",
1848        ValType::F64 => "f64",
1849        ValType::V128 => "v128",
1850        ValType::Ref(r) => r.wat(),
1851    }
1852}
1853
1854/// A wrapper "visitor" around the real operator validator internally which
1855/// exists to check that the required wasm feature is enabled to proceed with
1856/// validation.
1857///
1858/// This validator is macro-generated to ensure that the proposal listed in this
1859/// crate's macro matches the one that's validated here. Each instruction's
1860/// visit method validates the specified proposal is enabled and then delegates
1861/// to `OperatorValidatorTemp` to perform the actual opcode validation.
1862struct WasmProposalValidator<'validator, 'resources, T>(
1863    OperatorValidatorTemp<'validator, 'resources, T>,
1864);
1865
1866#[cfg_attr(not(feature = "simd"), allow(unused_macro_rules))]
1867macro_rules! validate_proposal {
1868    ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => {
1869        $(
1870            fn $visit(&mut self $($(,$arg: $argty)*)?) -> Result<()> {
1871                validate_proposal!(validate self $proposal / $op);
1872                self.0.$visit($( $($arg),* )?)
1873            }
1874        )*
1875    };
1876
1877    (validate self mvp / $op:ident) => {};
1878
1879    // These opcodes are handled specially below as they were introduced in the
1880    // bulk memory proposal but are gated by the `bulk_memory_opt`
1881    // "sub-proposal".
1882    (validate self $proposal:ident / MemoryFill) => {};
1883    (validate self $proposal:ident / MemoryCopy) => {};
1884
1885    (validate $self:ident $proposal:ident / $op:ident) => {
1886        $self.0.check_enabled($self.0.features.$proposal(), validate_proposal!(desc $proposal))?
1887    };
1888
1889    (desc simd) => ("SIMD");
1890    (desc relaxed_simd) => ("relaxed SIMD");
1891    (desc threads) => ("threads");
1892    (desc shared_everything_threads) => ("shared-everything-threads");
1893    (desc saturating_float_to_int) => ("saturating float to int conversions");
1894    (desc reference_types) => ("reference types");
1895    (desc bulk_memory) => ("bulk memory");
1896    (desc sign_extension) => ("sign extension operations");
1897    (desc exceptions) => ("exceptions");
1898    (desc tail_call) => ("tail calls");
1899    (desc function_references) => ("function references");
1900    (desc memory_control) => ("memory control");
1901    (desc gc) => ("gc");
1902    (desc legacy_exceptions) => ("legacy exceptions");
1903    (desc stack_switching) => ("stack switching");
1904    (desc wide_arithmetic) => ("wide arithmetic");
1905    (desc custom_descriptors) => ("custom descriptors operations");
1906}
1907
1908impl<'a, T> VisitOperator<'a> for WasmProposalValidator<'_, '_, T>
1909where
1910    T: WasmModuleResources,
1911{
1912    type Output = Result<()>;
1913
1914    #[cfg(feature = "simd")]
1915    fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
1916        Some(self)
1917    }
1918
1919    crate::for_each_visit_operator!(validate_proposal);
1920}
1921
1922#[cfg(feature = "simd")]
1923impl<'a, T> VisitSimdOperator<'a> for WasmProposalValidator<'_, '_, T>
1924where
1925    T: WasmModuleResources,
1926{
1927    crate::for_each_visit_simd_operator!(validate_proposal);
1928}
1929
1930#[track_caller]
1931#[inline]
1932fn debug_assert_type_indices_are_ids(ty: ValType) {
1933    if cfg!(debug_assertions) {
1934        if let ValType::Ref(r) = ty {
1935            if let HeapType::Concrete(idx) = r.heap_type() {
1936                debug_assert!(
1937                    matches!(idx, UnpackedIndex::Id(_)),
1938                    "type reference should be a `CoreTypeId`, found {idx:?}"
1939                );
1940            }
1941        }
1942    }
1943}
1944
1945impl<'a, T> VisitOperator<'a> for OperatorValidatorTemp<'_, '_, T>
1946where
1947    T: WasmModuleResources,
1948{
1949    type Output = Result<()>;
1950
1951    #[cfg(feature = "simd")]
1952    fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
1953        Some(self)
1954    }
1955
1956    fn visit_nop(&mut self) -> Self::Output {
1957        Ok(())
1958    }
1959    fn visit_unreachable(&mut self) -> Self::Output {
1960        self.unreachable()?;
1961        Ok(())
1962    }
1963    fn visit_block(&mut self, mut ty: BlockType) -> Self::Output {
1964        self.check_block_type(&mut ty)?;
1965        for ty in self.params(ty)?.rev() {
1966            self.pop_operand(Some(ty))?;
1967        }
1968        self.push_ctrl(FrameKind::Block, ty)?;
1969        Ok(())
1970    }
1971    fn visit_loop(&mut self, mut ty: BlockType) -> Self::Output {
1972        self.check_block_type(&mut ty)?;
1973        for ty in self.params(ty)?.rev() {
1974            self.pop_operand(Some(ty))?;
1975        }
1976        self.push_ctrl(FrameKind::Loop, ty)?;
1977        Ok(())
1978    }
1979    fn visit_if(&mut self, mut ty: BlockType) -> Self::Output {
1980        self.check_block_type(&mut ty)?;
1981        self.pop_operand(Some(ValType::I32))?;
1982        for ty in self.params(ty)?.rev() {
1983            self.pop_operand(Some(ty))?;
1984        }
1985        self.push_ctrl(FrameKind::If, ty)?;
1986        Ok(())
1987    }
1988    fn visit_else(&mut self) -> Self::Output {
1989        let frame = self.pop_ctrl()?;
1990        if frame.kind != FrameKind::If {
1991            bail!(self.offset, "else found outside of an `if` block");
1992        }
1993        self.push_ctrl(FrameKind::Else, frame.block_type)?;
1994        Ok(())
1995    }
1996    fn visit_try_table(&mut self, mut ty: TryTable) -> Self::Output {
1997        self.check_block_type(&mut ty.ty)?;
1998        for ty in self.params(ty.ty)?.rev() {
1999            self.pop_operand(Some(ty))?;
2000        }
2001        let exn_type = ValType::from(RefType::EXN);
2002        for catch in ty.catches {
2003            match catch {
2004                Catch::One { tag, label } => {
2005                    let tag = self.exception_tag_at(tag)?;
2006                    let (ty, kind) = self.jump(label)?;
2007                    let params = tag.params();
2008                    let types = self.label_types(ty, kind)?;
2009                    if params.len() != types.len() {
2010                        bail!(
2011                            self.offset,
2012                            "type mismatch: catch label must have same number of types as tag"
2013                        );
2014                    }
2015                    for (expected, actual) in types.zip(params) {
2016                        self.match_operand(*actual, expected)?;
2017                    }
2018                }
2019                Catch::OneRef { tag, label } => {
2020                    let tag = self.exception_tag_at(tag)?;
2021                    let (ty, kind) = self.jump(label)?;
2022                    let tag_params = tag.params().iter().copied();
2023                    let label_types = self.label_types(ty, kind)?;
2024                    if tag_params.len() + 1 != label_types.len() {
2025                        bail!(
2026                            self.offset,
2027                            "type mismatch: catch_ref label must have one \
2028                             more type than tag types",
2029                        );
2030                    }
2031                    for (expected_label_type, actual_tag_param) in
2032                        label_types.zip(tag_params.chain([exn_type]))
2033                    {
2034                        self.match_operand(actual_tag_param, expected_label_type)?;
2035                    }
2036                }
2037
2038                Catch::All { label } => {
2039                    let (ty, kind) = self.jump(label)?;
2040                    if self.label_types(ty, kind)?.len() != 0 {
2041                        bail!(
2042                            self.offset,
2043                            "type mismatch: catch_all label must have no result types"
2044                        );
2045                    }
2046                }
2047
2048                Catch::AllRef { label } => {
2049                    let (ty, kind) = self.jump(label)?;
2050                    let mut types = self.label_types(ty, kind)?;
2051                    let ty = match (types.next(), types.next()) {
2052                        (Some(ty), None) => ty,
2053                        _ => {
2054                            bail!(
2055                                self.offset,
2056                                "type mismatch: catch_all_ref label must have \
2057                                 exactly one result type"
2058                            );
2059                        }
2060                    };
2061                    if !self.resources.is_subtype(exn_type, ty) {
2062                        bail!(
2063                            self.offset,
2064                            "type mismatch: catch_all_ref label must a \
2065                             subtype of (ref exn)"
2066                        );
2067                    }
2068                }
2069            }
2070        }
2071        self.push_ctrl(FrameKind::TryTable, ty.ty)?;
2072        Ok(())
2073    }
2074    fn visit_throw(&mut self, index: u32) -> Self::Output {
2075        // Check values associated with the exception.
2076        let ty = self.exception_tag_at(index)?;
2077        for ty in ty.clone().params().iter().rev() {
2078            self.pop_operand(Some(*ty))?;
2079        }
2080        // this should be validated when the tag was defined in the module
2081        debug_assert!(ty.results().is_empty());
2082        self.unreachable()?;
2083        Ok(())
2084    }
2085    fn visit_throw_ref(&mut self) -> Self::Output {
2086        self.pop_operand(Some(ValType::EXNREF))?;
2087        self.unreachable()?;
2088        Ok(())
2089    }
2090    fn visit_end(&mut self) -> Self::Output {
2091        let mut frame = self.pop_ctrl()?;
2092
2093        // Note that this `if` isn't included in the appendix right
2094        // now, but it's used to allow for `if` statements that are
2095        // missing an `else` block which have the same parameter/return
2096        // types on the block (since that's valid).
2097        if frame.kind == FrameKind::If {
2098            self.push_ctrl(FrameKind::Else, frame.block_type)?;
2099            frame = self.pop_ctrl()?;
2100        }
2101        for ty in self.results(frame.block_type)? {
2102            self.push_operand(ty)?;
2103        }
2104        if self.control.is_empty() {
2105            assert_ne!(self.offset, 0);
2106        }
2107        Ok(())
2108    }
2109    fn visit_br(&mut self, relative_depth: u32) -> Self::Output {
2110        let (ty, kind) = self.jump(relative_depth)?;
2111        for ty in self.label_types(ty, kind)?.rev() {
2112            self.pop_operand(Some(ty))?;
2113        }
2114        self.unreachable()?;
2115        Ok(())
2116    }
2117    fn visit_br_if(&mut self, relative_depth: u32) -> Self::Output {
2118        self.pop_operand(Some(ValType::I32))?;
2119        let (ty, kind) = self.jump(relative_depth)?;
2120        let label_types = self.label_types(ty, kind)?;
2121        self.pop_push_label_types(label_types)?;
2122        Ok(())
2123    }
2124    fn visit_br_table(&mut self, table: BrTable) -> Self::Output {
2125        self.pop_operand(Some(ValType::I32))?;
2126        let default = self.jump(table.default())?;
2127        let default_types = self.label_types(default.0, default.1)?;
2128        for element in table.targets() {
2129            let relative_depth = element?;
2130            let block = self.jump(relative_depth)?;
2131            let label_tys = self.label_types(block.0, block.1)?;
2132            if label_tys.len() != default_types.len() {
2133                bail!(
2134                    self.offset,
2135                    "type mismatch: br_table target labels have different number of types"
2136                );
2137            }
2138            self.match_stack_operands(label_tys)?;
2139        }
2140        for ty in default_types.rev() {
2141            self.pop_operand(Some(ty))?;
2142        }
2143        self.unreachable()?;
2144        Ok(())
2145    }
2146    fn visit_return(&mut self) -> Self::Output {
2147        self.check_return()?;
2148        Ok(())
2149    }
2150    fn visit_call(&mut self, function_index: u32) -> Self::Output {
2151        let ty = self.type_of_function(function_index)?;
2152        self.check_call_ty(ty)?;
2153        Ok(())
2154    }
2155    fn visit_return_call(&mut self, function_index: u32) -> Self::Output {
2156        let ty = self.type_of_function(function_index)?;
2157        self.check_return_call_ty(ty)?;
2158        Ok(())
2159    }
2160    fn visit_call_ref(&mut self, type_index: u32) -> Self::Output {
2161        let ty = self.check_call_ref_ty(type_index)?;
2162        self.check_call_ty(ty)?;
2163        Ok(())
2164    }
2165    fn visit_return_call_ref(&mut self, type_index: u32) -> Self::Output {
2166        let ty = self.check_call_ref_ty(type_index)?;
2167        self.check_return_call_ty(ty)?;
2168        Ok(())
2169    }
2170    fn visit_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
2171        let ty = self.check_call_indirect_ty(type_index, table_index)?;
2172        self.check_call_ty(ty)?;
2173        Ok(())
2174    }
2175    fn visit_return_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
2176        let ty = self.check_call_indirect_ty(type_index, table_index)?;
2177        self.check_return_call_ty(ty)?;
2178        Ok(())
2179    }
2180    fn visit_drop(&mut self) -> Self::Output {
2181        self.pop_operand(None)?;
2182        Ok(())
2183    }
2184    fn visit_select(&mut self) -> Self::Output {
2185        self.pop_operand(Some(ValType::I32))?;
2186        let ty1 = self.pop_operand(None)?;
2187        let ty2 = self.pop_operand(None)?;
2188
2189        let ty = match (ty1, ty2) {
2190            // All heap-related types aren't allowed with the `select`
2191            // instruction
2192            (MaybeType::UnknownRef(..), _)
2193            | (_, MaybeType::UnknownRef(..))
2194            | (MaybeType::Known(ValType::Ref(_)), _)
2195            | (_, MaybeType::Known(ValType::Ref(_))) => {
2196                bail!(
2197                    self.offset,
2198                    "type mismatch: select only takes integral types"
2199                )
2200            }
2201
2202            // If one operand is the "bottom" type then whatever the other
2203            // operand is is the result of the `select`
2204            (MaybeType::Bottom, t) | (t, MaybeType::Bottom) => t,
2205
2206            // Otherwise these are two integral types and they must match for
2207            // `select` to typecheck.
2208            (t @ MaybeType::Known(t1), MaybeType::Known(t2)) => {
2209                if t1 != t2 {
2210                    bail!(
2211                        self.offset,
2212                        "type mismatch: select operands have different types"
2213                    );
2214                }
2215                t
2216            }
2217        };
2218        self.push_operand(ty)?;
2219        Ok(())
2220    }
2221    fn visit_typed_select(&mut self, mut ty: ValType) -> Self::Output {
2222        self.resources
2223            .check_value_type(&mut ty, &self.features, self.offset)?;
2224        self.pop_operand(Some(ValType::I32))?;
2225        self.pop_operand(Some(ty))?;
2226        self.pop_operand(Some(ty))?;
2227        self.push_operand(ty)?;
2228        Ok(())
2229    }
2230    fn visit_typed_select_multi(&mut self, tys: Vec<ValType>) -> Self::Output {
2231        debug_assert!(tys.len() != 1);
2232        bail!(self.offset, "invalid result arity");
2233    }
2234    fn visit_local_get(&mut self, local_index: u32) -> Self::Output {
2235        let ty = self.local(local_index)?;
2236        debug_assert_type_indices_are_ids(ty);
2237        if self.local_inits.is_uninit(local_index) {
2238            bail!(self.offset, "uninitialized local: {}", local_index);
2239        }
2240        self.push_operand(ty)?;
2241        Ok(())
2242    }
2243    fn visit_local_set(&mut self, local_index: u32) -> Self::Output {
2244        let ty = self.local(local_index)?;
2245        self.pop_operand(Some(ty))?;
2246        self.local_inits.set_init(local_index);
2247        Ok(())
2248    }
2249    fn visit_local_tee(&mut self, local_index: u32) -> Self::Output {
2250        let expected_ty = self.local(local_index)?;
2251        self.pop_operand(Some(expected_ty))?;
2252        self.local_inits.set_init(local_index);
2253        self.push_operand(expected_ty)?;
2254        Ok(())
2255    }
2256    fn visit_global_get(&mut self, global_index: u32) -> Self::Output {
2257        let ty = self.global_type_at(global_index)?.content_type;
2258        debug_assert_type_indices_are_ids(ty);
2259        self.push_operand(ty)?;
2260        Ok(())
2261    }
2262    fn visit_global_atomic_get(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output {
2263        self.visit_global_get(global_index)?;
2264        // No validation of `ordering` is needed because `global.atomic.get` can
2265        // be used on both shared and unshared globals. But we do need to limit
2266        // which types can be used with this instruction.
2267        let ty = self.global_type_at(global_index)?.content_type;
2268        let supertype = RefType::ANYREF.into();
2269        if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) {
2270            bail!(
2271                self.offset,
2272                "invalid type: `global.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
2273            );
2274        }
2275        Ok(())
2276    }
2277    fn visit_global_set(&mut self, global_index: u32) -> Self::Output {
2278        let ty = self.global_type_at(global_index)?;
2279        if !ty.mutable {
2280            bail!(
2281                self.offset,
2282                "global is immutable: cannot modify it with `global.set`"
2283            );
2284        }
2285        self.pop_operand(Some(ty.content_type))?;
2286        Ok(())
2287    }
2288    fn visit_global_atomic_set(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output {
2289        self.visit_global_set(global_index)?;
2290        // No validation of `ordering` is needed because `global.atomic.get` can
2291        // be used on both shared and unshared globals.
2292        let ty = self.global_type_at(global_index)?.content_type;
2293        let supertype = RefType::ANYREF.into();
2294        if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) {
2295            bail!(
2296                self.offset,
2297                "invalid type: `global.atomic.set` only allows `i32`, `i64` and subtypes of `anyref`"
2298            );
2299        }
2300        Ok(())
2301    }
2302    fn visit_global_atomic_rmw_add(
2303        &mut self,
2304        _ordering: crate::Ordering,
2305        global_index: u32,
2306    ) -> Self::Output {
2307        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2308        self.check_unary_op(ty)
2309    }
2310    fn visit_global_atomic_rmw_sub(
2311        &mut self,
2312        _ordering: crate::Ordering,
2313        global_index: u32,
2314    ) -> Self::Output {
2315        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2316        self.check_unary_op(ty)
2317    }
2318    fn visit_global_atomic_rmw_and(
2319        &mut self,
2320        _ordering: crate::Ordering,
2321        global_index: u32,
2322    ) -> Self::Output {
2323        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2324        self.check_unary_op(ty)
2325    }
2326    fn visit_global_atomic_rmw_or(
2327        &mut self,
2328        _ordering: crate::Ordering,
2329        global_index: u32,
2330    ) -> Self::Output {
2331        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2332        self.check_unary_op(ty)
2333    }
2334    fn visit_global_atomic_rmw_xor(
2335        &mut self,
2336        _ordering: crate::Ordering,
2337        global_index: u32,
2338    ) -> Self::Output {
2339        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2340        self.check_unary_op(ty)
2341    }
2342    fn visit_global_atomic_rmw_xchg(
2343        &mut self,
2344        _ordering: crate::Ordering,
2345        global_index: u32,
2346    ) -> Self::Output {
2347        let ty = self.global_type_at(global_index)?.content_type;
2348        if !(ty == ValType::I32
2349            || ty == ValType::I64
2350            || self.resources.is_subtype(ty, RefType::ANYREF.into()))
2351        {
2352            bail!(
2353                self.offset,
2354                "invalid type: `global.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
2355            );
2356        }
2357        self.check_unary_op(ty)
2358    }
2359    fn visit_global_atomic_rmw_cmpxchg(
2360        &mut self,
2361        _ordering: crate::Ordering,
2362        global_index: u32,
2363    ) -> Self::Output {
2364        let ty = self.global_type_at(global_index)?.content_type;
2365        if !(ty == ValType::I32
2366            || ty == ValType::I64
2367            || self.resources.is_subtype(ty, RefType::EQREF.into()))
2368        {
2369            bail!(
2370                self.offset,
2371                "invalid type: `global.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
2372            );
2373        }
2374        self.check_binary_op(ty)
2375    }
2376
2377    fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output {
2378        let ty = self.check_memarg(memarg)?;
2379        self.pop_operand(Some(ty))?;
2380        self.push_operand(ValType::I32)?;
2381        Ok(())
2382    }
2383    fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output {
2384        let ty = self.check_memarg(memarg)?;
2385        self.pop_operand(Some(ty))?;
2386        self.push_operand(ValType::I64)?;
2387        Ok(())
2388    }
2389    fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output {
2390        self.check_floats_enabled()?;
2391        let ty = self.check_memarg(memarg)?;
2392        self.pop_operand(Some(ty))?;
2393        self.push_operand(ValType::F32)?;
2394        Ok(())
2395    }
2396    fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output {
2397        self.check_floats_enabled()?;
2398        let ty = self.check_memarg(memarg)?;
2399        self.pop_operand(Some(ty))?;
2400        self.push_operand(ValType::F64)?;
2401        Ok(())
2402    }
2403    fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2404        let ty = self.check_memarg(memarg)?;
2405        self.pop_operand(Some(ty))?;
2406        self.push_operand(ValType::I32)?;
2407        Ok(())
2408    }
2409    fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2410        self.visit_i32_load8_s(memarg)
2411    }
2412    fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2413        let ty = self.check_memarg(memarg)?;
2414        self.pop_operand(Some(ty))?;
2415        self.push_operand(ValType::I32)?;
2416        Ok(())
2417    }
2418    fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2419        self.visit_i32_load16_s(memarg)
2420    }
2421    fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2422        let ty = self.check_memarg(memarg)?;
2423        self.pop_operand(Some(ty))?;
2424        self.push_operand(ValType::I64)?;
2425        Ok(())
2426    }
2427    fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2428        self.visit_i64_load8_s(memarg)
2429    }
2430    fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2431        let ty = self.check_memarg(memarg)?;
2432        self.pop_operand(Some(ty))?;
2433        self.push_operand(ValType::I64)?;
2434        Ok(())
2435    }
2436    fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2437        self.visit_i64_load16_s(memarg)
2438    }
2439    fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output {
2440        let ty = self.check_memarg(memarg)?;
2441        self.pop_operand(Some(ty))?;
2442        self.push_operand(ValType::I64)?;
2443        Ok(())
2444    }
2445    fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2446        self.visit_i64_load32_s(memarg)
2447    }
2448    fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output {
2449        let ty = self.check_memarg(memarg)?;
2450        self.pop_operand(Some(ValType::I32))?;
2451        self.pop_operand(Some(ty))?;
2452        Ok(())
2453    }
2454    fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output {
2455        let ty = self.check_memarg(memarg)?;
2456        self.pop_operand(Some(ValType::I64))?;
2457        self.pop_operand(Some(ty))?;
2458        Ok(())
2459    }
2460    fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output {
2461        self.check_floats_enabled()?;
2462        let ty = self.check_memarg(memarg)?;
2463        self.pop_operand(Some(ValType::F32))?;
2464        self.pop_operand(Some(ty))?;
2465        Ok(())
2466    }
2467    fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output {
2468        self.check_floats_enabled()?;
2469        let ty = self.check_memarg(memarg)?;
2470        self.pop_operand(Some(ValType::F64))?;
2471        self.pop_operand(Some(ty))?;
2472        Ok(())
2473    }
2474    fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output {
2475        let ty = self.check_memarg(memarg)?;
2476        self.pop_operand(Some(ValType::I32))?;
2477        self.pop_operand(Some(ty))?;
2478        Ok(())
2479    }
2480    fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output {
2481        let ty = self.check_memarg(memarg)?;
2482        self.pop_operand(Some(ValType::I32))?;
2483        self.pop_operand(Some(ty))?;
2484        Ok(())
2485    }
2486    fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output {
2487        let ty = self.check_memarg(memarg)?;
2488        self.pop_operand(Some(ValType::I64))?;
2489        self.pop_operand(Some(ty))?;
2490        Ok(())
2491    }
2492    fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output {
2493        let ty = self.check_memarg(memarg)?;
2494        self.pop_operand(Some(ValType::I64))?;
2495        self.pop_operand(Some(ty))?;
2496        Ok(())
2497    }
2498    fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output {
2499        let ty = self.check_memarg(memarg)?;
2500        self.pop_operand(Some(ValType::I64))?;
2501        self.pop_operand(Some(ty))?;
2502        Ok(())
2503    }
2504    fn visit_memory_size(&mut self, mem: u32) -> Self::Output {
2505        let index_ty = self.check_memory_index(mem)?;
2506        self.push_operand(index_ty)?;
2507        Ok(())
2508    }
2509    fn visit_memory_grow(&mut self, mem: u32) -> Self::Output {
2510        let index_ty = self.check_memory_index(mem)?;
2511        self.pop_operand(Some(index_ty))?;
2512        self.push_operand(index_ty)?;
2513        Ok(())
2514    }
2515    fn visit_i32_const(&mut self, _value: i32) -> Self::Output {
2516        self.push_operand(ValType::I32)?;
2517        Ok(())
2518    }
2519    fn visit_i64_const(&mut self, _value: i64) -> Self::Output {
2520        self.push_operand(ValType::I64)?;
2521        Ok(())
2522    }
2523    fn visit_f32_const(&mut self, _value: Ieee32) -> Self::Output {
2524        self.check_floats_enabled()?;
2525        self.push_operand(ValType::F32)?;
2526        Ok(())
2527    }
2528    fn visit_f64_const(&mut self, _value: Ieee64) -> Self::Output {
2529        self.check_floats_enabled()?;
2530        self.push_operand(ValType::F64)?;
2531        Ok(())
2532    }
2533    fn visit_i32_eqz(&mut self) -> Self::Output {
2534        self.pop_operand(Some(ValType::I32))?;
2535        self.push_operand(ValType::I32)?;
2536        Ok(())
2537    }
2538    fn visit_i32_eq(&mut self) -> Self::Output {
2539        self.check_cmp_op(ValType::I32)
2540    }
2541    fn visit_i32_ne(&mut self) -> Self::Output {
2542        self.check_cmp_op(ValType::I32)
2543    }
2544    fn visit_i32_lt_s(&mut self) -> Self::Output {
2545        self.check_cmp_op(ValType::I32)
2546    }
2547    fn visit_i32_lt_u(&mut self) -> Self::Output {
2548        self.check_cmp_op(ValType::I32)
2549    }
2550    fn visit_i32_gt_s(&mut self) -> Self::Output {
2551        self.check_cmp_op(ValType::I32)
2552    }
2553    fn visit_i32_gt_u(&mut self) -> Self::Output {
2554        self.check_cmp_op(ValType::I32)
2555    }
2556    fn visit_i32_le_s(&mut self) -> Self::Output {
2557        self.check_cmp_op(ValType::I32)
2558    }
2559    fn visit_i32_le_u(&mut self) -> Self::Output {
2560        self.check_cmp_op(ValType::I32)
2561    }
2562    fn visit_i32_ge_s(&mut self) -> Self::Output {
2563        self.check_cmp_op(ValType::I32)
2564    }
2565    fn visit_i32_ge_u(&mut self) -> Self::Output {
2566        self.check_cmp_op(ValType::I32)
2567    }
2568    fn visit_i64_eqz(&mut self) -> Self::Output {
2569        self.pop_operand(Some(ValType::I64))?;
2570        self.push_operand(ValType::I32)?;
2571        Ok(())
2572    }
2573    fn visit_i64_eq(&mut self) -> Self::Output {
2574        self.check_cmp_op(ValType::I64)
2575    }
2576    fn visit_i64_ne(&mut self) -> Self::Output {
2577        self.check_cmp_op(ValType::I64)
2578    }
2579    fn visit_i64_lt_s(&mut self) -> Self::Output {
2580        self.check_cmp_op(ValType::I64)
2581    }
2582    fn visit_i64_lt_u(&mut self) -> Self::Output {
2583        self.check_cmp_op(ValType::I64)
2584    }
2585    fn visit_i64_gt_s(&mut self) -> Self::Output {
2586        self.check_cmp_op(ValType::I64)
2587    }
2588    fn visit_i64_gt_u(&mut self) -> Self::Output {
2589        self.check_cmp_op(ValType::I64)
2590    }
2591    fn visit_i64_le_s(&mut self) -> Self::Output {
2592        self.check_cmp_op(ValType::I64)
2593    }
2594    fn visit_i64_le_u(&mut self) -> Self::Output {
2595        self.check_cmp_op(ValType::I64)
2596    }
2597    fn visit_i64_ge_s(&mut self) -> Self::Output {
2598        self.check_cmp_op(ValType::I64)
2599    }
2600    fn visit_i64_ge_u(&mut self) -> Self::Output {
2601        self.check_cmp_op(ValType::I64)
2602    }
2603    fn visit_f32_eq(&mut self) -> Self::Output {
2604        self.check_fcmp_op(ValType::F32)
2605    }
2606    fn visit_f32_ne(&mut self) -> Self::Output {
2607        self.check_fcmp_op(ValType::F32)
2608    }
2609    fn visit_f32_lt(&mut self) -> Self::Output {
2610        self.check_fcmp_op(ValType::F32)
2611    }
2612    fn visit_f32_gt(&mut self) -> Self::Output {
2613        self.check_fcmp_op(ValType::F32)
2614    }
2615    fn visit_f32_le(&mut self) -> Self::Output {
2616        self.check_fcmp_op(ValType::F32)
2617    }
2618    fn visit_f32_ge(&mut self) -> Self::Output {
2619        self.check_fcmp_op(ValType::F32)
2620    }
2621    fn visit_f64_eq(&mut self) -> Self::Output {
2622        self.check_fcmp_op(ValType::F64)
2623    }
2624    fn visit_f64_ne(&mut self) -> Self::Output {
2625        self.check_fcmp_op(ValType::F64)
2626    }
2627    fn visit_f64_lt(&mut self) -> Self::Output {
2628        self.check_fcmp_op(ValType::F64)
2629    }
2630    fn visit_f64_gt(&mut self) -> Self::Output {
2631        self.check_fcmp_op(ValType::F64)
2632    }
2633    fn visit_f64_le(&mut self) -> Self::Output {
2634        self.check_fcmp_op(ValType::F64)
2635    }
2636    fn visit_f64_ge(&mut self) -> Self::Output {
2637        self.check_fcmp_op(ValType::F64)
2638    }
2639    fn visit_i32_clz(&mut self) -> Self::Output {
2640        self.check_unary_op(ValType::I32)
2641    }
2642    fn visit_i32_ctz(&mut self) -> Self::Output {
2643        self.check_unary_op(ValType::I32)
2644    }
2645    fn visit_i32_popcnt(&mut self) -> Self::Output {
2646        self.check_unary_op(ValType::I32)
2647    }
2648    fn visit_i32_add(&mut self) -> Self::Output {
2649        self.check_binary_op(ValType::I32)
2650    }
2651    fn visit_i32_sub(&mut self) -> Self::Output {
2652        self.check_binary_op(ValType::I32)
2653    }
2654    fn visit_i32_mul(&mut self) -> Self::Output {
2655        self.check_binary_op(ValType::I32)
2656    }
2657    fn visit_i32_div_s(&mut self) -> Self::Output {
2658        self.check_binary_op(ValType::I32)
2659    }
2660    fn visit_i32_div_u(&mut self) -> Self::Output {
2661        self.check_binary_op(ValType::I32)
2662    }
2663    fn visit_i32_rem_s(&mut self) -> Self::Output {
2664        self.check_binary_op(ValType::I32)
2665    }
2666    fn visit_i32_rem_u(&mut self) -> Self::Output {
2667        self.check_binary_op(ValType::I32)
2668    }
2669    fn visit_i32_and(&mut self) -> Self::Output {
2670        self.check_binary_op(ValType::I32)
2671    }
2672    fn visit_i32_or(&mut self) -> Self::Output {
2673        self.check_binary_op(ValType::I32)
2674    }
2675    fn visit_i32_xor(&mut self) -> Self::Output {
2676        self.check_binary_op(ValType::I32)
2677    }
2678    fn visit_i32_shl(&mut self) -> Self::Output {
2679        self.check_binary_op(ValType::I32)
2680    }
2681    fn visit_i32_shr_s(&mut self) -> Self::Output {
2682        self.check_binary_op(ValType::I32)
2683    }
2684    fn visit_i32_shr_u(&mut self) -> Self::Output {
2685        self.check_binary_op(ValType::I32)
2686    }
2687    fn visit_i32_rotl(&mut self) -> Self::Output {
2688        self.check_binary_op(ValType::I32)
2689    }
2690    fn visit_i32_rotr(&mut self) -> Self::Output {
2691        self.check_binary_op(ValType::I32)
2692    }
2693    fn visit_i64_clz(&mut self) -> Self::Output {
2694        self.check_unary_op(ValType::I64)
2695    }
2696    fn visit_i64_ctz(&mut self) -> Self::Output {
2697        self.check_unary_op(ValType::I64)
2698    }
2699    fn visit_i64_popcnt(&mut self) -> Self::Output {
2700        self.check_unary_op(ValType::I64)
2701    }
2702    fn visit_i64_add(&mut self) -> Self::Output {
2703        self.check_binary_op(ValType::I64)
2704    }
2705    fn visit_i64_sub(&mut self) -> Self::Output {
2706        self.check_binary_op(ValType::I64)
2707    }
2708    fn visit_i64_mul(&mut self) -> Self::Output {
2709        self.check_binary_op(ValType::I64)
2710    }
2711    fn visit_i64_div_s(&mut self) -> Self::Output {
2712        self.check_binary_op(ValType::I64)
2713    }
2714    fn visit_i64_div_u(&mut self) -> Self::Output {
2715        self.check_binary_op(ValType::I64)
2716    }
2717    fn visit_i64_rem_s(&mut self) -> Self::Output {
2718        self.check_binary_op(ValType::I64)
2719    }
2720    fn visit_i64_rem_u(&mut self) -> Self::Output {
2721        self.check_binary_op(ValType::I64)
2722    }
2723    fn visit_i64_and(&mut self) -> Self::Output {
2724        self.check_binary_op(ValType::I64)
2725    }
2726    fn visit_i64_or(&mut self) -> Self::Output {
2727        self.check_binary_op(ValType::I64)
2728    }
2729    fn visit_i64_xor(&mut self) -> Self::Output {
2730        self.check_binary_op(ValType::I64)
2731    }
2732    fn visit_i64_shl(&mut self) -> Self::Output {
2733        self.check_binary_op(ValType::I64)
2734    }
2735    fn visit_i64_shr_s(&mut self) -> Self::Output {
2736        self.check_binary_op(ValType::I64)
2737    }
2738    fn visit_i64_shr_u(&mut self) -> Self::Output {
2739        self.check_binary_op(ValType::I64)
2740    }
2741    fn visit_i64_rotl(&mut self) -> Self::Output {
2742        self.check_binary_op(ValType::I64)
2743    }
2744    fn visit_i64_rotr(&mut self) -> Self::Output {
2745        self.check_binary_op(ValType::I64)
2746    }
2747    fn visit_f32_abs(&mut self) -> Self::Output {
2748        self.check_funary_op(ValType::F32)
2749    }
2750    fn visit_f32_neg(&mut self) -> Self::Output {
2751        self.check_funary_op(ValType::F32)
2752    }
2753    fn visit_f32_ceil(&mut self) -> Self::Output {
2754        self.check_funary_op(ValType::F32)
2755    }
2756    fn visit_f32_floor(&mut self) -> Self::Output {
2757        self.check_funary_op(ValType::F32)
2758    }
2759    fn visit_f32_trunc(&mut self) -> Self::Output {
2760        self.check_funary_op(ValType::F32)
2761    }
2762    fn visit_f32_nearest(&mut self) -> Self::Output {
2763        self.check_funary_op(ValType::F32)
2764    }
2765    fn visit_f32_sqrt(&mut self) -> Self::Output {
2766        self.check_funary_op(ValType::F32)
2767    }
2768    fn visit_f32_add(&mut self) -> Self::Output {
2769        self.check_fbinary_op(ValType::F32)
2770    }
2771    fn visit_f32_sub(&mut self) -> Self::Output {
2772        self.check_fbinary_op(ValType::F32)
2773    }
2774    fn visit_f32_mul(&mut self) -> Self::Output {
2775        self.check_fbinary_op(ValType::F32)
2776    }
2777    fn visit_f32_div(&mut self) -> Self::Output {
2778        self.check_fbinary_op(ValType::F32)
2779    }
2780    fn visit_f32_min(&mut self) -> Self::Output {
2781        self.check_fbinary_op(ValType::F32)
2782    }
2783    fn visit_f32_max(&mut self) -> Self::Output {
2784        self.check_fbinary_op(ValType::F32)
2785    }
2786    fn visit_f32_copysign(&mut self) -> Self::Output {
2787        self.check_fbinary_op(ValType::F32)
2788    }
2789    fn visit_f64_abs(&mut self) -> Self::Output {
2790        self.check_funary_op(ValType::F64)
2791    }
2792    fn visit_f64_neg(&mut self) -> Self::Output {
2793        self.check_funary_op(ValType::F64)
2794    }
2795    fn visit_f64_ceil(&mut self) -> Self::Output {
2796        self.check_funary_op(ValType::F64)
2797    }
2798    fn visit_f64_floor(&mut self) -> Self::Output {
2799        self.check_funary_op(ValType::F64)
2800    }
2801    fn visit_f64_trunc(&mut self) -> Self::Output {
2802        self.check_funary_op(ValType::F64)
2803    }
2804    fn visit_f64_nearest(&mut self) -> Self::Output {
2805        self.check_funary_op(ValType::F64)
2806    }
2807    fn visit_f64_sqrt(&mut self) -> Self::Output {
2808        self.check_funary_op(ValType::F64)
2809    }
2810    fn visit_f64_add(&mut self) -> Self::Output {
2811        self.check_fbinary_op(ValType::F64)
2812    }
2813    fn visit_f64_sub(&mut self) -> Self::Output {
2814        self.check_fbinary_op(ValType::F64)
2815    }
2816    fn visit_f64_mul(&mut self) -> Self::Output {
2817        self.check_fbinary_op(ValType::F64)
2818    }
2819    fn visit_f64_div(&mut self) -> Self::Output {
2820        self.check_fbinary_op(ValType::F64)
2821    }
2822    fn visit_f64_min(&mut self) -> Self::Output {
2823        self.check_fbinary_op(ValType::F64)
2824    }
2825    fn visit_f64_max(&mut self) -> Self::Output {
2826        self.check_fbinary_op(ValType::F64)
2827    }
2828    fn visit_f64_copysign(&mut self) -> Self::Output {
2829        self.check_fbinary_op(ValType::F64)
2830    }
2831    fn visit_i32_wrap_i64(&mut self) -> Self::Output {
2832        self.check_conversion_op(ValType::I32, ValType::I64)
2833    }
2834    fn visit_i32_trunc_f32_s(&mut self) -> Self::Output {
2835        self.check_conversion_op(ValType::I32, ValType::F32)
2836    }
2837    fn visit_i32_trunc_f32_u(&mut self) -> Self::Output {
2838        self.check_conversion_op(ValType::I32, ValType::F32)
2839    }
2840    fn visit_i32_trunc_f64_s(&mut self) -> Self::Output {
2841        self.check_conversion_op(ValType::I32, ValType::F64)
2842    }
2843    fn visit_i32_trunc_f64_u(&mut self) -> Self::Output {
2844        self.check_conversion_op(ValType::I32, ValType::F64)
2845    }
2846    fn visit_i64_extend_i32_s(&mut self) -> Self::Output {
2847        self.check_conversion_op(ValType::I64, ValType::I32)
2848    }
2849    fn visit_i64_extend_i32_u(&mut self) -> Self::Output {
2850        self.check_conversion_op(ValType::I64, ValType::I32)
2851    }
2852    fn visit_i64_trunc_f32_s(&mut self) -> Self::Output {
2853        self.check_conversion_op(ValType::I64, ValType::F32)
2854    }
2855    fn visit_i64_trunc_f32_u(&mut self) -> Self::Output {
2856        self.check_conversion_op(ValType::I64, ValType::F32)
2857    }
2858    fn visit_i64_trunc_f64_s(&mut self) -> Self::Output {
2859        self.check_conversion_op(ValType::I64, ValType::F64)
2860    }
2861    fn visit_i64_trunc_f64_u(&mut self) -> Self::Output {
2862        self.check_conversion_op(ValType::I64, ValType::F64)
2863    }
2864    fn visit_f32_convert_i32_s(&mut self) -> Self::Output {
2865        self.check_fconversion_op(ValType::F32, ValType::I32)
2866    }
2867    fn visit_f32_convert_i32_u(&mut self) -> Self::Output {
2868        self.check_fconversion_op(ValType::F32, ValType::I32)
2869    }
2870    fn visit_f32_convert_i64_s(&mut self) -> Self::Output {
2871        self.check_fconversion_op(ValType::F32, ValType::I64)
2872    }
2873    fn visit_f32_convert_i64_u(&mut self) -> Self::Output {
2874        self.check_fconversion_op(ValType::F32, ValType::I64)
2875    }
2876    fn visit_f32_demote_f64(&mut self) -> Self::Output {
2877        self.check_fconversion_op(ValType::F32, ValType::F64)
2878    }
2879    fn visit_f64_convert_i32_s(&mut self) -> Self::Output {
2880        self.check_fconversion_op(ValType::F64, ValType::I32)
2881    }
2882    fn visit_f64_convert_i32_u(&mut self) -> Self::Output {
2883        self.check_fconversion_op(ValType::F64, ValType::I32)
2884    }
2885    fn visit_f64_convert_i64_s(&mut self) -> Self::Output {
2886        self.check_fconversion_op(ValType::F64, ValType::I64)
2887    }
2888    fn visit_f64_convert_i64_u(&mut self) -> Self::Output {
2889        self.check_fconversion_op(ValType::F64, ValType::I64)
2890    }
2891    fn visit_f64_promote_f32(&mut self) -> Self::Output {
2892        self.check_fconversion_op(ValType::F64, ValType::F32)
2893    }
2894    fn visit_i32_reinterpret_f32(&mut self) -> Self::Output {
2895        self.check_conversion_op(ValType::I32, ValType::F32)
2896    }
2897    fn visit_i64_reinterpret_f64(&mut self) -> Self::Output {
2898        self.check_conversion_op(ValType::I64, ValType::F64)
2899    }
2900    fn visit_f32_reinterpret_i32(&mut self) -> Self::Output {
2901        self.check_fconversion_op(ValType::F32, ValType::I32)
2902    }
2903    fn visit_f64_reinterpret_i64(&mut self) -> Self::Output {
2904        self.check_fconversion_op(ValType::F64, ValType::I64)
2905    }
2906    fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output {
2907        self.check_conversion_op(ValType::I32, ValType::F32)
2908    }
2909    fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output {
2910        self.check_conversion_op(ValType::I32, ValType::F32)
2911    }
2912    fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output {
2913        self.check_conversion_op(ValType::I32, ValType::F64)
2914    }
2915    fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output {
2916        self.check_conversion_op(ValType::I32, ValType::F64)
2917    }
2918    fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output {
2919        self.check_conversion_op(ValType::I64, ValType::F32)
2920    }
2921    fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output {
2922        self.check_conversion_op(ValType::I64, ValType::F32)
2923    }
2924    fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output {
2925        self.check_conversion_op(ValType::I64, ValType::F64)
2926    }
2927    fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output {
2928        self.check_conversion_op(ValType::I64, ValType::F64)
2929    }
2930    fn visit_i32_extend8_s(&mut self) -> Self::Output {
2931        self.check_unary_op(ValType::I32)
2932    }
2933    fn visit_i32_extend16_s(&mut self) -> Self::Output {
2934        self.check_unary_op(ValType::I32)
2935    }
2936    fn visit_i64_extend8_s(&mut self) -> Self::Output {
2937        self.check_unary_op(ValType::I64)
2938    }
2939    fn visit_i64_extend16_s(&mut self) -> Self::Output {
2940        self.check_unary_op(ValType::I64)
2941    }
2942    fn visit_i64_extend32_s(&mut self) -> Self::Output {
2943        self.check_unary_op(ValType::I64)
2944    }
2945    fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2946        self.check_atomic_load(memarg, ValType::I32)
2947    }
2948    fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2949        self.check_atomic_load(memarg, ValType::I32)
2950    }
2951    fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2952        self.check_atomic_load(memarg, ValType::I32)
2953    }
2954    fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2955        self.check_atomic_load(memarg, ValType::I64)
2956    }
2957    fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2958        self.check_atomic_load(memarg, ValType::I64)
2959    }
2960    fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2961        self.check_atomic_load(memarg, ValType::I64)
2962    }
2963    fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2964        self.check_atomic_load(memarg, ValType::I64)
2965    }
2966    fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2967        self.check_atomic_store(memarg, ValType::I32)
2968    }
2969    fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2970        self.check_atomic_store(memarg, ValType::I32)
2971    }
2972    fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2973        self.check_atomic_store(memarg, ValType::I32)
2974    }
2975    fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2976        self.check_atomic_store(memarg, ValType::I64)
2977    }
2978    fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output {
2979        self.check_atomic_store(memarg, ValType::I64)
2980    }
2981    fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2982        self.check_atomic_store(memarg, ValType::I64)
2983    }
2984    fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2985        self.check_atomic_store(memarg, ValType::I64)
2986    }
2987    fn visit_i32_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
2988        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2989    }
2990    fn visit_i32_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
2991        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2992    }
2993    fn visit_i32_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
2994        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2995    }
2996    fn visit_i32_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
2997        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2998    }
2999    fn visit_i32_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
3000        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3001    }
3002    fn visit_i32_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
3003        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3004    }
3005    fn visit_i32_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
3006        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3007    }
3008    fn visit_i32_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
3009        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3010    }
3011    fn visit_i32_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
3012        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3013    }
3014    fn visit_i32_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
3015        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3016    }
3017    fn visit_i32_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
3018        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3019    }
3020    fn visit_i32_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
3021        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3022    }
3023    fn visit_i32_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
3024        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3025    }
3026    fn visit_i32_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
3027        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3028    }
3029    fn visit_i32_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
3030        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3031    }
3032    fn visit_i64_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
3033        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3034    }
3035    fn visit_i64_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
3036        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3037    }
3038    fn visit_i64_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
3039        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3040    }
3041    fn visit_i64_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
3042        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3043    }
3044    fn visit_i64_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
3045        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3046    }
3047    fn visit_i64_atomic_rmw32_add_u(&mut self, memarg: MemArg) -> Self::Output {
3048        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3049    }
3050    fn visit_i64_atomic_rmw32_sub_u(&mut self, memarg: MemArg) -> Self::Output {
3051        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3052    }
3053    fn visit_i64_atomic_rmw32_and_u(&mut self, memarg: MemArg) -> Self::Output {
3054        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3055    }
3056    fn visit_i64_atomic_rmw32_or_u(&mut self, memarg: MemArg) -> Self::Output {
3057        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3058    }
3059    fn visit_i64_atomic_rmw32_xor_u(&mut self, memarg: MemArg) -> Self::Output {
3060        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3061    }
3062    fn visit_i64_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
3063        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3064    }
3065    fn visit_i64_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
3066        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3067    }
3068    fn visit_i64_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
3069        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3070    }
3071    fn visit_i64_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
3072        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3073    }
3074    fn visit_i64_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
3075        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3076    }
3077    fn visit_i64_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
3078        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3079    }
3080    fn visit_i64_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
3081        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3082    }
3083    fn visit_i64_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
3084        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3085    }
3086    fn visit_i64_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
3087        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3088    }
3089    fn visit_i64_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
3090        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3091    }
3092    fn visit_i32_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
3093        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3094    }
3095    fn visit_i32_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
3096        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3097    }
3098    fn visit_i32_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
3099        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3100    }
3101    fn visit_i32_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
3102        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
3103    }
3104    fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
3105        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
3106    }
3107    fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
3108        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
3109    }
3110    fn visit_i64_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
3111        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3112    }
3113    fn visit_i64_atomic_rmw32_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
3114        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3115    }
3116    fn visit_i64_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
3117        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3118    }
3119    fn visit_i64_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
3120        self.check_atomic_binary_memory_op(memarg, ValType::I64)
3121    }
3122    fn visit_i64_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
3123        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
3124    }
3125    fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
3126        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
3127    }
3128    fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
3129        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
3130    }
3131    fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
3132        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
3133    }
3134    fn visit_memory_atomic_notify(&mut self, memarg: MemArg) -> Self::Output {
3135        self.check_atomic_binary_memory_op(memarg, ValType::I32)
3136    }
3137    fn visit_memory_atomic_wait32(&mut self, memarg: MemArg) -> Self::Output {
3138        let ty = self.check_shared_memarg(memarg)?;
3139        self.pop_operand(Some(ValType::I64))?;
3140        self.pop_operand(Some(ValType::I32))?;
3141        self.pop_operand(Some(ty))?;
3142        self.push_operand(ValType::I32)?;
3143        Ok(())
3144    }
3145    fn visit_memory_atomic_wait64(&mut self, memarg: MemArg) -> Self::Output {
3146        let ty = self.check_shared_memarg(memarg)?;
3147        self.pop_operand(Some(ValType::I64))?;
3148        self.pop_operand(Some(ValType::I64))?;
3149        self.pop_operand(Some(ty))?;
3150        self.push_operand(ValType::I32)?;
3151        Ok(())
3152    }
3153    fn visit_atomic_fence(&mut self) -> Self::Output {
3154        Ok(())
3155    }
3156    fn visit_ref_null(&mut self, mut heap_type: HeapType) -> Self::Output {
3157        if let Some(ty) = RefType::new(true, heap_type) {
3158            self.features
3159                .check_ref_type(ty)
3160                .map_err(|e| BinaryReaderError::new(e, self.offset))?;
3161        }
3162        self.resources
3163            .check_heap_type(&mut heap_type, self.offset)?;
3164        let ty = ValType::Ref(
3165            RefType::new(true, heap_type).expect("existing heap types should be within our limits"),
3166        );
3167        self.push_operand(ty)?;
3168        Ok(())
3169    }
3170
3171    fn visit_ref_as_non_null(&mut self) -> Self::Output {
3172        let ty = self.pop_ref(None)?.as_non_null();
3173        self.push_operand(ty)?;
3174        Ok(())
3175    }
3176    fn visit_br_on_null(&mut self, relative_depth: u32) -> Self::Output {
3177        let ref_ty = self.pop_ref(None)?.as_non_null();
3178        let (ft, kind) = self.jump(relative_depth)?;
3179        let label_types = self.label_types(ft, kind)?;
3180        self.pop_push_label_types(label_types)?;
3181        self.push_operand(ref_ty)?;
3182        Ok(())
3183    }
3184    fn visit_br_on_non_null(&mut self, relative_depth: u32) -> Self::Output {
3185        let (ft, kind) = self.jump(relative_depth)?;
3186
3187        let mut label_types = self.label_types(ft, kind)?;
3188        let expected = match label_types.next_back() {
3189            None => bail!(
3190                self.offset,
3191                "type mismatch: br_on_non_null target has no label types",
3192            ),
3193            Some(ValType::Ref(ty)) => ty,
3194            Some(_) => bail!(
3195                self.offset,
3196                "type mismatch: br_on_non_null target does not end with heap type",
3197            ),
3198        };
3199        self.pop_ref(Some(expected.nullable()))?;
3200
3201        self.pop_push_label_types(label_types)?;
3202        Ok(())
3203    }
3204    fn visit_ref_is_null(&mut self) -> Self::Output {
3205        self.pop_ref(None)?;
3206        self.push_operand(ValType::I32)?;
3207        Ok(())
3208    }
3209    fn visit_ref_func(&mut self, function_index: u32) -> Self::Output {
3210        let type_id = match self.resources.type_id_of_function(function_index) {
3211            Some(id) => id,
3212            None => bail!(
3213                self.offset,
3214                "unknown function {}: function index out of bounds",
3215                function_index,
3216            ),
3217        };
3218        if !self.resources.is_function_referenced(function_index) {
3219            bail!(self.offset, "undeclared function reference");
3220        }
3221
3222        let index = UnpackedIndex::Id(type_id);
3223        let hty = if self.features.custom_descriptors()
3224            && self.resources.has_function_exact_type(function_index)
3225        {
3226            HeapType::Exact(index)
3227        } else {
3228            HeapType::Concrete(index)
3229        };
3230        let ty = ValType::Ref(RefType::new(false, hty).ok_or_else(|| {
3231            BinaryReaderError::new("implementation limit: type index too large", self.offset)
3232        })?);
3233        self.push_operand(ty)?;
3234        Ok(())
3235    }
3236    fn visit_ref_eq(&mut self) -> Self::Output {
3237        let a = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?;
3238        let b = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?;
3239        let a_is_shared = a.is_maybe_shared(&self.resources);
3240        let b_is_shared = b.is_maybe_shared(&self.resources);
3241        match (a_is_shared, b_is_shared) {
3242            // One or both of the types are from unreachable code; assume
3243            // the shared-ness matches.
3244            (None, Some(_)) | (Some(_), None) | (None, None) => {}
3245
3246            (Some(is_a_shared), Some(is_b_shared)) => {
3247                if is_a_shared != is_b_shared {
3248                    bail!(
3249                        self.offset,
3250                        "type mismatch: expected `ref.eq` types to match `shared`-ness"
3251                    );
3252                }
3253            }
3254        }
3255        self.push_operand(ValType::I32)
3256    }
3257    fn visit_memory_init(&mut self, segment: u32, mem: u32) -> Self::Output {
3258        let ty = self.check_memory_index(mem)?;
3259        self.check_data_segment(segment)?;
3260        self.pop_operand(Some(ValType::I32))?;
3261        self.pop_operand(Some(ValType::I32))?;
3262        self.pop_operand(Some(ty))?;
3263        Ok(())
3264    }
3265    fn visit_data_drop(&mut self, segment: u32) -> Self::Output {
3266        self.check_data_segment(segment)?;
3267        Ok(())
3268    }
3269    fn visit_memory_copy(&mut self, dst: u32, src: u32) -> Self::Output {
3270        self.check_enabled(self.features.bulk_memory_opt(), "bulk memory")?;
3271        let dst_ty = self.check_memory_index(dst)?;
3272        let src_ty = self.check_memory_index(src)?;
3273
3274        // The length operand here is the smaller of src/dst, which is
3275        // i32 if one is i32
3276        self.pop_operand(Some(match src_ty {
3277            ValType::I32 => ValType::I32,
3278            _ => dst_ty,
3279        }))?;
3280
3281        // ... and the offset into each memory is required to be
3282        // whatever the indexing type is for that memory
3283        self.pop_operand(Some(src_ty))?;
3284        self.pop_operand(Some(dst_ty))?;
3285        Ok(())
3286    }
3287    fn visit_memory_fill(&mut self, mem: u32) -> Self::Output {
3288        self.check_enabled(self.features.bulk_memory_opt(), "bulk memory")?;
3289        let ty = self.check_memory_index(mem)?;
3290        self.pop_operand(Some(ty))?;
3291        self.pop_operand(Some(ValType::I32))?;
3292        self.pop_operand(Some(ty))?;
3293        Ok(())
3294    }
3295    fn visit_memory_discard(&mut self, mem: u32) -> Self::Output {
3296        let ty = self.check_memory_index(mem)?;
3297        self.pop_operand(Some(ty))?;
3298        self.pop_operand(Some(ty))?;
3299        Ok(())
3300    }
3301    fn visit_table_init(&mut self, segment: u32, table: u32) -> Self::Output {
3302        let table = self.table_type_at(table)?;
3303        let segment_ty = self.element_type_at(segment)?;
3304        if !self
3305            .resources
3306            .is_subtype(ValType::Ref(segment_ty), ValType::Ref(table.element_type))
3307        {
3308            bail!(self.offset, "type mismatch");
3309        }
3310        self.pop_operand(Some(ValType::I32))?;
3311        self.pop_operand(Some(ValType::I32))?;
3312        self.pop_operand(Some(table.index_type()))?;
3313        Ok(())
3314    }
3315    fn visit_elem_drop(&mut self, segment: u32) -> Self::Output {
3316        self.element_type_at(segment)?;
3317        Ok(())
3318    }
3319    fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output {
3320        let src = self.table_type_at(src_table)?;
3321        let dst = self.table_type_at(dst_table)?;
3322        if !self.resources.is_subtype(
3323            ValType::Ref(src.element_type),
3324            ValType::Ref(dst.element_type),
3325        ) {
3326            bail!(self.offset, "type mismatch");
3327        }
3328
3329        // The length operand here is the smaller of src/dst, which is
3330        // i32 if one is i32
3331        self.pop_operand(Some(match src.index_type() {
3332            ValType::I32 => ValType::I32,
3333            _ => dst.index_type(),
3334        }))?;
3335
3336        // ... and the offset into each table is required to be
3337        // whatever the indexing type is for that table
3338        self.pop_operand(Some(src.index_type()))?;
3339        self.pop_operand(Some(dst.index_type()))?;
3340        Ok(())
3341    }
3342    fn visit_table_get(&mut self, table: u32) -> Self::Output {
3343        let table = self.table_type_at(table)?;
3344        debug_assert_type_indices_are_ids(table.element_type.into());
3345        self.pop_operand(Some(table.index_type()))?;
3346        self.push_operand(table.element_type)?;
3347        Ok(())
3348    }
3349    fn visit_table_atomic_get(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3350        self.visit_table_get(table)?;
3351        // No validation of `ordering` is needed because `table.atomic.get` can
3352        // be used on both shared and unshared tables. But we do need to limit
3353        // which types can be used with this instruction.
3354        let ty = self.table_type_at(table)?.element_type;
3355        let supertype = RefType::ANYREF.shared().unwrap();
3356        if !self.resources.is_subtype(ty.into(), supertype.into()) {
3357            bail!(
3358                self.offset,
3359                "invalid type: `table.atomic.get` only allows subtypes of `anyref`"
3360            );
3361        }
3362        Ok(())
3363    }
3364    fn visit_table_set(&mut self, table: u32) -> Self::Output {
3365        let table = self.table_type_at(table)?;
3366        debug_assert_type_indices_are_ids(table.element_type.into());
3367        self.pop_operand(Some(table.element_type.into()))?;
3368        self.pop_operand(Some(table.index_type()))?;
3369        Ok(())
3370    }
3371    fn visit_table_atomic_set(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3372        self.visit_table_set(table)?;
3373        // No validation of `ordering` is needed because `table.atomic.set` can
3374        // be used on both shared and unshared tables. But we do need to limit
3375        // which types can be used with this instruction.
3376        let ty = self.table_type_at(table)?.element_type;
3377        let supertype = RefType::ANYREF.shared().unwrap();
3378        if !self.resources.is_subtype(ty.into(), supertype.into()) {
3379            bail!(
3380                self.offset,
3381                "invalid type: `table.atomic.set` only allows subtypes of `anyref`"
3382            );
3383        }
3384        Ok(())
3385    }
3386    fn visit_table_grow(&mut self, table: u32) -> Self::Output {
3387        let table = self.table_type_at(table)?;
3388        debug_assert_type_indices_are_ids(table.element_type.into());
3389        self.pop_operand(Some(table.index_type()))?;
3390        self.pop_operand(Some(table.element_type.into()))?;
3391        self.push_operand(table.index_type())?;
3392        Ok(())
3393    }
3394    fn visit_table_size(&mut self, table: u32) -> Self::Output {
3395        let table = self.table_type_at(table)?;
3396        self.push_operand(table.index_type())?;
3397        Ok(())
3398    }
3399    fn visit_table_fill(&mut self, table: u32) -> Self::Output {
3400        let table = self.table_type_at(table)?;
3401        debug_assert_type_indices_are_ids(table.element_type.into());
3402        self.pop_operand(Some(table.index_type()))?;
3403        self.pop_operand(Some(table.element_type.into()))?;
3404        self.pop_operand(Some(table.index_type()))?;
3405        Ok(())
3406    }
3407    fn visit_table_atomic_rmw_xchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3408        let table = self.table_type_at(table)?;
3409        let elem_ty = table.element_type.into();
3410        debug_assert_type_indices_are_ids(elem_ty);
3411        let supertype = RefType::ANYREF.shared().unwrap();
3412        if !self.resources.is_subtype(elem_ty, supertype.into()) {
3413            bail!(
3414                self.offset,
3415                "invalid type: `table.atomic.rmw.xchg` only allows subtypes of `anyref`"
3416            );
3417        }
3418        self.pop_operand(Some(elem_ty))?;
3419        self.pop_operand(Some(table.index_type()))?;
3420        self.push_operand(elem_ty)?;
3421        Ok(())
3422    }
3423    fn visit_table_atomic_rmw_cmpxchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3424        let table = self.table_type_at(table)?;
3425        let elem_ty = table.element_type.into();
3426        debug_assert_type_indices_are_ids(elem_ty);
3427        let supertype = RefType::EQREF.shared().unwrap();
3428        if !self.resources.is_subtype(elem_ty, supertype.into()) {
3429            bail!(
3430                self.offset,
3431                "invalid type: `table.atomic.rmw.cmpxchg` only allows subtypes of `eqref`"
3432            );
3433        }
3434        self.pop_operand(Some(elem_ty))?;
3435        self.pop_operand(Some(elem_ty))?;
3436        self.pop_operand(Some(table.index_type()))?;
3437        self.push_operand(elem_ty)?;
3438        Ok(())
3439    }
3440    fn visit_struct_new(&mut self, struct_type_index: u32) -> Self::Output {
3441        if let Some(_) = self
3442            .sub_type_at(struct_type_index)?
3443            .composite_type
3444            .descriptor_idx
3445        {
3446            bail!(
3447                self.offset,
3448                "type with descriptor requires descriptor allocation: `struct.new` with type {struct_type_index}"
3449            );
3450        }
3451
3452        let struct_ty = self.struct_type_at(struct_type_index)?;
3453        for ty in struct_ty.fields.iter().rev() {
3454            self.pop_operand(Some(ty.element_type.unpack()))?;
3455        }
3456        self.push_exact_ref_if_available(false, struct_type_index)?;
3457        Ok(())
3458    }
3459    fn visit_struct_new_default(&mut self, type_index: u32) -> Self::Output {
3460        if let Some(_) = self.sub_type_at(type_index)?.composite_type.descriptor_idx {
3461            bail!(
3462                self.offset,
3463                "type with descriptor requires descriptor allocation: `struct.new_default` with type {type_index}"
3464            );
3465        }
3466
3467        let ty = self.struct_type_at(type_index)?;
3468        for field in ty.fields.iter() {
3469            let val_ty = field.element_type.unpack();
3470            if !val_ty.is_defaultable() {
3471                bail!(
3472                    self.offset,
3473                    "invalid `struct.new_default`: {val_ty} field is not defaultable"
3474                );
3475            }
3476        }
3477        self.push_exact_ref_if_available(false, type_index)?;
3478        Ok(())
3479    }
3480    fn visit_struct_new_desc(&mut self, struct_type_index: u32) -> Self::Output {
3481        if let Some(descriptor_idx) = self
3482            .sub_type_at(struct_type_index)?
3483            .composite_type
3484            .descriptor_idx
3485        {
3486            let ty = ValType::Ref(RefType::exact(true, descriptor_idx));
3487            self.pop_operand(Some(ty))?;
3488        } else {
3489            bail!(
3490                self.offset,
3491                "invalid `struct.new_desc`: type {struct_type_index} is not described"
3492            );
3493        }
3494        let struct_ty = self.struct_type_at(struct_type_index)?;
3495        for ty in struct_ty.fields.iter().rev() {
3496            self.pop_operand(Some(ty.element_type.unpack()))?;
3497        }
3498        self.push_exact_ref_if_available(false, struct_type_index)?;
3499        Ok(())
3500    }
3501    fn visit_struct_new_default_desc(&mut self, type_index: u32) -> Self::Output {
3502        if let Some(descriptor_idx) = self.sub_type_at(type_index)?.composite_type.descriptor_idx {
3503            let ty = ValType::Ref(RefType::exact(true, descriptor_idx));
3504            self.pop_operand(Some(ty))?;
3505        } else {
3506            bail!(
3507                self.offset,
3508                "invalid `struct.new_default_desc`: type {type_index} is not described"
3509            );
3510        }
3511        let ty = self.struct_type_at(type_index)?;
3512        for field in ty.fields.iter() {
3513            let val_ty = field.element_type.unpack();
3514            if !val_ty.is_defaultable() {
3515                bail!(
3516                    self.offset,
3517                    "invalid `struct.new_default`: {val_ty} field is not defaultable"
3518                );
3519            }
3520        }
3521        self.push_exact_ref_if_available(false, type_index)?;
3522        Ok(())
3523    }
3524    fn visit_struct_get(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3525        let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3526        if field_ty.element_type.is_packed() {
3527            bail!(
3528                self.offset,
3529                "can only use struct `get` with non-packed storage types"
3530            )
3531        }
3532        self.pop_concrete_ref(true, struct_type_index)?;
3533        self.push_operand(field_ty.element_type.unpack())
3534    }
3535    fn visit_struct_atomic_get(
3536        &mut self,
3537        _ordering: Ordering,
3538        struct_type_index: u32,
3539        field_index: u32,
3540    ) -> Self::Output {
3541        self.visit_struct_get(struct_type_index, field_index)?;
3542        // The `atomic` version has some additional type restrictions.
3543        let ty = self
3544            .struct_field_at(struct_type_index, field_index)?
3545            .element_type;
3546        let is_valid_type = match ty {
3547            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3548            StorageType::Val(v) => self
3549                .resources
3550                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3551            _ => false,
3552        };
3553        if !is_valid_type {
3554            bail!(
3555                self.offset,
3556                "invalid type: `struct.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
3557            );
3558        }
3559        Ok(())
3560    }
3561    fn visit_struct_get_s(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3562        let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3563        if !field_ty.element_type.is_packed() {
3564            bail!(
3565                self.offset,
3566                "cannot use struct.get_s with non-packed storage types"
3567            )
3568        }
3569        self.pop_concrete_ref(true, struct_type_index)?;
3570        self.push_operand(field_ty.element_type.unpack())
3571    }
3572    fn visit_struct_atomic_get_s(
3573        &mut self,
3574        _ordering: Ordering,
3575        struct_type_index: u32,
3576        field_index: u32,
3577    ) -> Self::Output {
3578        self.visit_struct_get_s(struct_type_index, field_index)?;
3579        // This instruction has the same type restrictions as the non-`atomic` version.
3580        debug_assert!(matches!(
3581            self.struct_field_at(struct_type_index, field_index)?
3582                .element_type,
3583            StorageType::I8 | StorageType::I16
3584        ));
3585        Ok(())
3586    }
3587    fn visit_struct_get_u(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3588        let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3589        if !field_ty.element_type.is_packed() {
3590            bail!(
3591                self.offset,
3592                "cannot use struct.get_u with non-packed storage types"
3593            )
3594        }
3595        self.pop_concrete_ref(true, struct_type_index)?;
3596        self.push_operand(field_ty.element_type.unpack())
3597    }
3598    fn visit_struct_atomic_get_u(
3599        &mut self,
3600        _ordering: Ordering,
3601        struct_type_index: u32,
3602        field_index: u32,
3603    ) -> Self::Output {
3604        self.visit_struct_get_s(struct_type_index, field_index)?;
3605        // This instruction has the same type restrictions as the non-`atomic` version.
3606        debug_assert!(matches!(
3607            self.struct_field_at(struct_type_index, field_index)?
3608                .element_type,
3609            StorageType::I8 | StorageType::I16
3610        ));
3611        Ok(())
3612    }
3613    fn visit_struct_set(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3614        let field_ty = self.mutable_struct_field_at(struct_type_index, field_index)?;
3615        self.pop_operand(Some(field_ty.element_type.unpack()))?;
3616        self.pop_concrete_ref(true, struct_type_index)?;
3617        Ok(())
3618    }
3619    fn visit_struct_atomic_set(
3620        &mut self,
3621        _ordering: Ordering,
3622        struct_type_index: u32,
3623        field_index: u32,
3624    ) -> Self::Output {
3625        self.visit_struct_set(struct_type_index, field_index)?;
3626        // The `atomic` version has some additional type restrictions.
3627        let ty = self
3628            .struct_field_at(struct_type_index, field_index)?
3629            .element_type;
3630        let is_valid_type = match ty {
3631            StorageType::I8 | StorageType::I16 => true,
3632            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3633            StorageType::Val(v) => self
3634                .resources
3635                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3636        };
3637        if !is_valid_type {
3638            bail!(
3639                self.offset,
3640                "invalid type: `struct.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`"
3641            );
3642        }
3643        Ok(())
3644    }
3645    fn visit_struct_atomic_rmw_add(
3646        &mut self,
3647        _ordering: Ordering,
3648        struct_type_index: u32,
3649        field_index: u32,
3650    ) -> Self::Output {
3651        self.check_struct_atomic_rmw("add", struct_type_index, field_index)
3652    }
3653    fn visit_struct_atomic_rmw_sub(
3654        &mut self,
3655        _ordering: Ordering,
3656        struct_type_index: u32,
3657        field_index: u32,
3658    ) -> Self::Output {
3659        self.check_struct_atomic_rmw("sub", struct_type_index, field_index)
3660    }
3661    fn visit_struct_atomic_rmw_and(
3662        &mut self,
3663        _ordering: Ordering,
3664        struct_type_index: u32,
3665        field_index: u32,
3666    ) -> Self::Output {
3667        self.check_struct_atomic_rmw("and", struct_type_index, field_index)
3668    }
3669    fn visit_struct_atomic_rmw_or(
3670        &mut self,
3671        _ordering: Ordering,
3672        struct_type_index: u32,
3673        field_index: u32,
3674    ) -> Self::Output {
3675        self.check_struct_atomic_rmw("or", struct_type_index, field_index)
3676    }
3677    fn visit_struct_atomic_rmw_xor(
3678        &mut self,
3679        _ordering: Ordering,
3680        struct_type_index: u32,
3681        field_index: u32,
3682    ) -> Self::Output {
3683        self.check_struct_atomic_rmw("xor", struct_type_index, field_index)
3684    }
3685    fn visit_struct_atomic_rmw_xchg(
3686        &mut self,
3687        _ordering: Ordering,
3688        struct_type_index: u32,
3689        field_index: u32,
3690    ) -> Self::Output {
3691        let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
3692        let is_valid_type = match field.element_type {
3693            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3694            StorageType::Val(v) => self
3695                .resources
3696                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3697            _ => false,
3698        };
3699        if !is_valid_type {
3700            bail!(
3701                self.offset,
3702                "invalid type: `struct.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
3703            );
3704        }
3705        let field_ty = field.element_type.unpack();
3706        self.pop_operand(Some(field_ty))?;
3707        self.pop_concrete_ref(true, struct_type_index)?;
3708        self.push_operand(field_ty)?;
3709        Ok(())
3710    }
3711    fn visit_struct_atomic_rmw_cmpxchg(
3712        &mut self,
3713        _ordering: Ordering,
3714        struct_type_index: u32,
3715        field_index: u32,
3716    ) -> Self::Output {
3717        let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
3718        let is_valid_type = match field.element_type {
3719            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3720            StorageType::Val(v) => self
3721                .resources
3722                .is_subtype(v, RefType::EQREF.shared().unwrap().into()),
3723            _ => false,
3724        };
3725        if !is_valid_type {
3726            bail!(
3727                self.offset,
3728                "invalid type: `struct.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
3729            );
3730        }
3731        let field_ty = field.element_type.unpack();
3732        self.pop_operand(Some(field_ty))?;
3733        self.pop_operand(Some(field_ty))?;
3734        self.pop_concrete_ref(true, struct_type_index)?;
3735        self.push_operand(field_ty)?;
3736        Ok(())
3737    }
3738    fn visit_array_new(&mut self, type_index: u32) -> Self::Output {
3739        let array_ty = self.array_type_at(type_index)?;
3740        self.pop_operand(Some(ValType::I32))?;
3741        self.pop_operand(Some(array_ty.element_type.unpack()))?;
3742        self.push_exact_ref_if_available(false, type_index)
3743    }
3744    fn visit_array_new_default(&mut self, type_index: u32) -> Self::Output {
3745        let ty = self.array_type_at(type_index)?;
3746        let val_ty = ty.element_type.unpack();
3747        if !val_ty.is_defaultable() {
3748            bail!(
3749                self.offset,
3750                "invalid `array.new_default`: {val_ty} field is not defaultable"
3751            );
3752        }
3753        self.pop_operand(Some(ValType::I32))?;
3754        self.push_exact_ref_if_available(false, type_index)
3755    }
3756    fn visit_array_new_fixed(&mut self, type_index: u32, n: u32) -> Self::Output {
3757        let array_ty = self.array_type_at(type_index)?;
3758        let elem_ty = array_ty.element_type.unpack();
3759        for _ in 0..n {
3760            self.pop_operand(Some(elem_ty))?;
3761        }
3762        self.push_exact_ref_if_available(false, type_index)
3763    }
3764    fn visit_array_new_data(&mut self, type_index: u32, data_index: u32) -> Self::Output {
3765        let array_ty = self.array_type_at(type_index)?;
3766        let elem_ty = array_ty.element_type.unpack();
3767        match elem_ty {
3768            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {}
3769            ValType::Ref(_) => bail!(
3770                self.offset,
3771                "type mismatch: array.new_data can only create arrays with numeric and vector elements"
3772            ),
3773        }
3774        self.check_data_segment(data_index)?;
3775        self.pop_operand(Some(ValType::I32))?;
3776        self.pop_operand(Some(ValType::I32))?;
3777        self.push_exact_ref_if_available(false, type_index)
3778    }
3779    fn visit_array_new_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output {
3780        let array_ty = self.array_type_at(type_index)?;
3781        let array_ref_ty = match array_ty.element_type.unpack() {
3782            ValType::Ref(rt) => rt,
3783            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!(
3784                self.offset,
3785                "type mismatch: array.new_elem can only create arrays with reference elements"
3786            ),
3787        };
3788        let elem_ref_ty = self.element_type_at(elem_index)?;
3789        if !self
3790            .resources
3791            .is_subtype(elem_ref_ty.into(), array_ref_ty.into())
3792        {
3793            bail!(
3794                self.offset,
3795                "invalid array.new_elem instruction: element segment {elem_index} type mismatch: \
3796                 expected {array_ref_ty}, found {elem_ref_ty}"
3797            )
3798        }
3799        self.pop_operand(Some(ValType::I32))?;
3800        self.pop_operand(Some(ValType::I32))?;
3801        self.push_exact_ref_if_available(false, type_index)
3802    }
3803    fn visit_array_get(&mut self, type_index: u32) -> Self::Output {
3804        let array_ty = self.array_type_at(type_index)?;
3805        let elem_ty = array_ty.element_type;
3806        if elem_ty.is_packed() {
3807            bail!(
3808                self.offset,
3809                "cannot use array.get with packed storage types"
3810            )
3811        }
3812        self.pop_operand(Some(ValType::I32))?;
3813        self.pop_concrete_ref(true, type_index)?;
3814        self.push_operand(elem_ty.unpack())
3815    }
3816    fn visit_array_atomic_get(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3817        self.visit_array_get(type_index)?;
3818        // The `atomic` version has some additional type restrictions.
3819        let elem_ty = self.array_type_at(type_index)?.element_type;
3820        let is_valid_type = match elem_ty {
3821            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3822            StorageType::Val(v) => self
3823                .resources
3824                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3825            _ => false,
3826        };
3827        if !is_valid_type {
3828            bail!(
3829                self.offset,
3830                "invalid type: `array.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
3831            );
3832        }
3833        Ok(())
3834    }
3835    fn visit_array_get_s(&mut self, type_index: u32) -> Self::Output {
3836        let array_ty = self.array_type_at(type_index)?;
3837        let elem_ty = array_ty.element_type;
3838        if !elem_ty.is_packed() {
3839            bail!(
3840                self.offset,
3841                "cannot use array.get_s with non-packed storage types"
3842            )
3843        }
3844        self.pop_operand(Some(ValType::I32))?;
3845        self.pop_concrete_ref(true, type_index)?;
3846        self.push_operand(elem_ty.unpack())
3847    }
3848    fn visit_array_atomic_get_s(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3849        self.visit_array_get_s(type_index)?;
3850        // This instruction has the same type restrictions as the non-`atomic` version.
3851        debug_assert!(matches!(
3852            self.array_type_at(type_index)?.element_type,
3853            StorageType::I8 | StorageType::I16
3854        ));
3855        Ok(())
3856    }
3857    fn visit_array_get_u(&mut self, type_index: u32) -> Self::Output {
3858        let array_ty = self.array_type_at(type_index)?;
3859        let elem_ty = array_ty.element_type;
3860        if !elem_ty.is_packed() {
3861            bail!(
3862                self.offset,
3863                "cannot use array.get_u with non-packed storage types"
3864            )
3865        }
3866        self.pop_operand(Some(ValType::I32))?;
3867        self.pop_concrete_ref(true, type_index)?;
3868        self.push_operand(elem_ty.unpack())
3869    }
3870    fn visit_array_atomic_get_u(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3871        self.visit_array_get_u(type_index)?;
3872        // This instruction has the same type restrictions as the non-`atomic` version.
3873        debug_assert!(matches!(
3874            self.array_type_at(type_index)?.element_type,
3875            StorageType::I8 | StorageType::I16
3876        ));
3877        Ok(())
3878    }
3879    fn visit_array_set(&mut self, type_index: u32) -> Self::Output {
3880        let array_ty = self.mutable_array_type_at(type_index)?;
3881        self.pop_operand(Some(array_ty.element_type.unpack()))?;
3882        self.pop_operand(Some(ValType::I32))?;
3883        self.pop_concrete_ref(true, type_index)?;
3884        Ok(())
3885    }
3886    fn visit_array_atomic_set(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3887        self.visit_array_set(type_index)?;
3888        // The `atomic` version has some additional type restrictions.
3889        let elem_ty = self.array_type_at(type_index)?.element_type;
3890        let is_valid_type = match elem_ty {
3891            StorageType::I8 | StorageType::I16 => true,
3892            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3893            StorageType::Val(v) => self
3894                .resources
3895                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3896        };
3897        if !is_valid_type {
3898            bail!(
3899                self.offset,
3900                "invalid type: `array.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`"
3901            );
3902        }
3903        Ok(())
3904    }
3905    fn visit_array_len(&mut self) -> Self::Output {
3906        self.pop_maybe_shared_ref(AbstractHeapType::Array)?;
3907        self.push_operand(ValType::I32)
3908    }
3909    fn visit_array_fill(&mut self, array_type_index: u32) -> Self::Output {
3910        let array_ty = self.mutable_array_type_at(array_type_index)?;
3911        self.pop_operand(Some(ValType::I32))?;
3912        self.pop_operand(Some(array_ty.element_type.unpack()))?;
3913        self.pop_operand(Some(ValType::I32))?;
3914        self.pop_concrete_ref(true, array_type_index)?;
3915        Ok(())
3916    }
3917    fn visit_array_copy(&mut self, type_index_dst: u32, type_index_src: u32) -> Self::Output {
3918        let array_ty_dst = self.mutable_array_type_at(type_index_dst)?;
3919        let array_ty_src = self.array_type_at(type_index_src)?;
3920        match (array_ty_dst.element_type, array_ty_src.element_type) {
3921            (StorageType::I8, StorageType::I8) => {}
3922            (StorageType::I8, ty) => bail!(
3923                self.offset,
3924                "array types do not match: expected i8, found {ty}"
3925            ),
3926            (StorageType::I16, StorageType::I16) => {}
3927            (StorageType::I16, ty) => bail!(
3928                self.offset,
3929                "array types do not match: expected i16, found {ty}"
3930            ),
3931            (StorageType::Val(dst), StorageType::Val(src)) => {
3932                if !self.resources.is_subtype(src, dst) {
3933                    bail!(
3934                        self.offset,
3935                        "array types do not match: expected {dst}, found {src}"
3936                    )
3937                }
3938            }
3939            (StorageType::Val(dst), src) => {
3940                bail!(
3941                    self.offset,
3942                    "array types do not match: expected {dst}, found {src}"
3943                )
3944            }
3945        }
3946        self.pop_operand(Some(ValType::I32))?;
3947        self.pop_operand(Some(ValType::I32))?;
3948        self.pop_concrete_ref(true, type_index_src)?;
3949        self.pop_operand(Some(ValType::I32))?;
3950        self.pop_concrete_ref(true, type_index_dst)?;
3951        Ok(())
3952    }
3953    fn visit_array_init_data(
3954        &mut self,
3955        array_type_index: u32,
3956        array_data_index: u32,
3957    ) -> Self::Output {
3958        let array_ty = self.mutable_array_type_at(array_type_index)?;
3959        let val_ty = array_ty.element_type.unpack();
3960        match val_ty {
3961            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {}
3962            ValType::Ref(_) => bail!(
3963                self.offset,
3964                "invalid array.init_data: array type is not numeric or vector"
3965            ),
3966        }
3967        self.check_data_segment(array_data_index)?;
3968        self.pop_operand(Some(ValType::I32))?;
3969        self.pop_operand(Some(ValType::I32))?;
3970        self.pop_operand(Some(ValType::I32))?;
3971        self.pop_concrete_ref(true, array_type_index)?;
3972        Ok(())
3973    }
3974    fn visit_array_init_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output {
3975        let array_ty = self.mutable_array_type_at(type_index)?;
3976        let array_ref_ty = match array_ty.element_type.unpack() {
3977            ValType::Ref(rt) => rt,
3978            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!(
3979                self.offset,
3980                "type mismatch: array.init_elem can only create arrays with reference elements"
3981            ),
3982        };
3983        let elem_ref_ty = self.element_type_at(elem_index)?;
3984        if !self
3985            .resources
3986            .is_subtype(elem_ref_ty.into(), array_ref_ty.into())
3987        {
3988            bail!(
3989                self.offset,
3990                "invalid array.init_elem instruction: element segment {elem_index} type mismatch: \
3991                 expected {array_ref_ty}, found {elem_ref_ty}"
3992            )
3993        }
3994        self.pop_operand(Some(ValType::I32))?;
3995        self.pop_operand(Some(ValType::I32))?;
3996        self.pop_operand(Some(ValType::I32))?;
3997        self.pop_concrete_ref(true, type_index)?;
3998        Ok(())
3999    }
4000    fn visit_array_atomic_rmw_add(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4001        self.check_array_atomic_rmw("add", type_index)
4002    }
4003    fn visit_array_atomic_rmw_sub(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4004        self.check_array_atomic_rmw("sub", type_index)
4005    }
4006    fn visit_array_atomic_rmw_and(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4007        self.check_array_atomic_rmw("and", type_index)
4008    }
4009    fn visit_array_atomic_rmw_or(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4010        self.check_array_atomic_rmw("or", type_index)
4011    }
4012    fn visit_array_atomic_rmw_xor(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4013        self.check_array_atomic_rmw("xor", type_index)
4014    }
4015    fn visit_array_atomic_rmw_xchg(
4016        &mut self,
4017        _ordering: Ordering,
4018        type_index: u32,
4019    ) -> Self::Output {
4020        let field = self.mutable_array_type_at(type_index)?;
4021        let is_valid_type = match field.element_type {
4022            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
4023            StorageType::Val(v) => self
4024                .resources
4025                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
4026            _ => false,
4027        };
4028        if !is_valid_type {
4029            bail!(
4030                self.offset,
4031                "invalid type: `array.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
4032            );
4033        }
4034        let elem_ty = field.element_type.unpack();
4035        self.pop_operand(Some(elem_ty))?;
4036        self.pop_operand(Some(ValType::I32))?;
4037        self.pop_concrete_ref(true, type_index)?;
4038        self.push_operand(elem_ty)?;
4039        Ok(())
4040    }
4041    fn visit_array_atomic_rmw_cmpxchg(
4042        &mut self,
4043        _ordering: Ordering,
4044        type_index: u32,
4045    ) -> Self::Output {
4046        let field = self.mutable_array_type_at(type_index)?;
4047        let is_valid_type = match field.element_type {
4048            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
4049            StorageType::Val(v) => self
4050                .resources
4051                .is_subtype(v, RefType::EQREF.shared().unwrap().into()),
4052            _ => false,
4053        };
4054        if !is_valid_type {
4055            bail!(
4056                self.offset,
4057                "invalid type: `array.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
4058            );
4059        }
4060        let elem_ty = field.element_type.unpack();
4061        self.pop_operand(Some(elem_ty))?;
4062        self.pop_operand(Some(elem_ty))?;
4063        self.pop_operand(Some(ValType::I32))?;
4064        self.pop_concrete_ref(true, type_index)?;
4065        self.push_operand(elem_ty)?;
4066        Ok(())
4067    }
4068    fn visit_any_convert_extern(&mut self) -> Self::Output {
4069        let any_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Extern)? {
4070            MaybeType::Bottom | MaybeType::UnknownRef(_) => {
4071                MaybeType::UnknownRef(Some(AbstractHeapType::Any))
4072            }
4073            MaybeType::Known(ty) => {
4074                let shared = self.resources.is_shared(ty);
4075                let heap_type = HeapType::Abstract {
4076                    shared,
4077                    ty: AbstractHeapType::Any,
4078                };
4079                let any_ref = RefType::new(ty.is_nullable(), heap_type).unwrap();
4080                MaybeType::Known(any_ref)
4081            }
4082        };
4083        self.push_operand(any_ref)
4084    }
4085    fn visit_extern_convert_any(&mut self) -> Self::Output {
4086        let extern_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Any)? {
4087            MaybeType::Bottom | MaybeType::UnknownRef(_) => {
4088                MaybeType::UnknownRef(Some(AbstractHeapType::Extern))
4089            }
4090            MaybeType::Known(ty) => {
4091                let shared = self.resources.is_shared(ty);
4092                let heap_type = HeapType::Abstract {
4093                    shared,
4094                    ty: AbstractHeapType::Extern,
4095                };
4096                let extern_ref = RefType::new(ty.is_nullable(), heap_type).unwrap();
4097                MaybeType::Known(extern_ref)
4098            }
4099        };
4100        self.push_operand(extern_ref)
4101    }
4102    fn visit_ref_test_non_null(&mut self, heap_type: HeapType) -> Self::Output {
4103        self.check_ref_test(false, heap_type)
4104    }
4105    fn visit_ref_test_nullable(&mut self, heap_type: HeapType) -> Self::Output {
4106        self.check_ref_test(true, heap_type)
4107    }
4108    fn visit_ref_cast_non_null(&mut self, heap_type: HeapType) -> Self::Output {
4109        self.check_ref_cast(false, heap_type)
4110    }
4111    fn visit_ref_cast_nullable(&mut self, heap_type: HeapType) -> Self::Output {
4112        self.check_ref_cast(true, heap_type)
4113    }
4114    fn visit_br_on_cast(
4115        &mut self,
4116        relative_depth: u32,
4117        mut from_ref_type: RefType,
4118        mut to_ref_type: RefType,
4119    ) -> Self::Output {
4120        self.resources
4121            .check_ref_type(&mut from_ref_type, self.offset)?;
4122        self.resources
4123            .check_ref_type(&mut to_ref_type, self.offset)?;
4124
4125        self.check_br_on_cast_type_hierarchy(from_ref_type, to_ref_type)?;
4126
4127        let (block_ty, frame_kind) = self.jump(relative_depth)?;
4128        let mut label_types = self.label_types(block_ty, frame_kind)?;
4129
4130        match label_types.next_back() {
4131            Some(label_ty) if self.resources.is_subtype(to_ref_type.into(), label_ty) => {
4132                self.pop_operand(Some(from_ref_type.into()))?;
4133            }
4134            Some(label_ty) => bail!(
4135                self.offset,
4136                "type mismatch: casting to type {to_ref_type}, but it does not match \
4137                 label result type {label_ty}"
4138            ),
4139            None => bail!(
4140                self.offset,
4141                "type mismatch: br_on_cast to label with empty types, must have a reference type"
4142            ),
4143        };
4144
4145        self.pop_push_label_types(label_types)?;
4146        let diff_ty = RefType::difference(from_ref_type, to_ref_type);
4147        self.push_operand(diff_ty)?;
4148        Ok(())
4149    }
4150    fn visit_br_on_cast_fail(
4151        &mut self,
4152        relative_depth: u32,
4153        mut from_ref_type: RefType,
4154        mut to_ref_type: RefType,
4155    ) -> Self::Output {
4156        self.resources
4157            .check_ref_type(&mut from_ref_type, self.offset)?;
4158        self.resources
4159            .check_ref_type(&mut to_ref_type, self.offset)?;
4160
4161        self.check_br_on_cast_type_hierarchy(from_ref_type, to_ref_type)?;
4162
4163        let (block_ty, frame_kind) = self.jump(relative_depth)?;
4164        let mut label_tys = self.label_types(block_ty, frame_kind)?;
4165
4166        let diff_ty = RefType::difference(from_ref_type, to_ref_type);
4167        match label_tys.next_back() {
4168            Some(label_ty) if self.resources.is_subtype(diff_ty.into(), label_ty) => {
4169                self.pop_operand(Some(from_ref_type.into()))?;
4170            }
4171            Some(label_ty) => bail!(
4172                self.offset,
4173                "type mismatch: expected label result type {label_ty}, found {diff_ty}"
4174            ),
4175            None => bail!(
4176                self.offset,
4177                "type mismatch: expected a reference type, found nothing"
4178            ),
4179        }
4180
4181        self.pop_push_label_types(label_tys)?;
4182        self.push_operand(to_ref_type)?;
4183        Ok(())
4184    }
4185    fn visit_ref_i31(&mut self) -> Self::Output {
4186        self.pop_operand(Some(ValType::I32))?;
4187        self.push_operand(ValType::Ref(RefType::I31))
4188    }
4189    fn visit_ref_i31_shared(&mut self) -> Self::Output {
4190        self.pop_operand(Some(ValType::I32))?;
4191        self.push_operand(ValType::Ref(
4192            RefType::I31.shared().expect("i31 is abstract"),
4193        ))
4194    }
4195    fn visit_i31_get_s(&mut self) -> Self::Output {
4196        self.pop_maybe_shared_ref(AbstractHeapType::I31)?;
4197        self.push_operand(ValType::I32)
4198    }
4199    fn visit_i31_get_u(&mut self) -> Self::Output {
4200        self.pop_maybe_shared_ref(AbstractHeapType::I31)?;
4201        self.push_operand(ValType::I32)
4202    }
4203    fn visit_try(&mut self, mut ty: BlockType) -> Self::Output {
4204        self.check_block_type(&mut ty)?;
4205        for ty in self.params(ty)?.rev() {
4206            self.pop_operand(Some(ty))?;
4207        }
4208        self.push_ctrl(FrameKind::LegacyTry, ty)?;
4209        Ok(())
4210    }
4211    fn visit_catch(&mut self, index: u32) -> Self::Output {
4212        let frame = self.pop_ctrl()?;
4213        if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch {
4214            bail!(self.offset, "catch found outside of an `try` block");
4215        }
4216        // Start a new frame and push `exnref` value.
4217        let height = self.operands.len();
4218        let init_height = self.local_inits.push_ctrl();
4219        self.control.push(Frame {
4220            kind: FrameKind::LegacyCatch,
4221            block_type: frame.block_type,
4222            height,
4223            unreachable: false,
4224            init_height,
4225        });
4226        // Push exception argument types.
4227        let ty = self.exception_tag_at(index)?;
4228        for ty in ty.params() {
4229            self.push_operand(*ty)?;
4230        }
4231        Ok(())
4232    }
4233    fn visit_rethrow(&mut self, relative_depth: u32) -> Self::Output {
4234        // This is not a jump, but we need to check that the `rethrow`
4235        // targets an actual `catch` to get the exception.
4236        let (_, kind) = self.jump(relative_depth)?;
4237        if kind != FrameKind::LegacyCatch && kind != FrameKind::LegacyCatchAll {
4238            bail!(
4239                self.offset,
4240                "invalid rethrow label: target was not a `catch` block"
4241            );
4242        }
4243        self.unreachable()?;
4244        Ok(())
4245    }
4246    fn visit_delegate(&mut self, relative_depth: u32) -> Self::Output {
4247        let frame = self.pop_ctrl()?;
4248        if frame.kind != FrameKind::LegacyTry {
4249            bail!(self.offset, "delegate found outside of an `try` block");
4250        }
4251        // This operation is not a jump, but we need to check the
4252        // depth for validity
4253        let _ = self.jump(relative_depth)?;
4254        for ty in self.results(frame.block_type)? {
4255            self.push_operand(ty)?;
4256        }
4257        Ok(())
4258    }
4259    fn visit_catch_all(&mut self) -> Self::Output {
4260        let frame = self.pop_ctrl()?;
4261        if frame.kind == FrameKind::LegacyCatchAll {
4262            bail!(self.offset, "only one catch_all allowed per `try` block");
4263        } else if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch {
4264            bail!(self.offset, "catch_all found outside of a `try` block");
4265        }
4266        let height = self.operands.len();
4267        let init_height = self.local_inits.push_ctrl();
4268        self.control.push(Frame {
4269            kind: FrameKind::LegacyCatchAll,
4270            block_type: frame.block_type,
4271            height,
4272            unreachable: false,
4273            init_height,
4274        });
4275        Ok(())
4276    }
4277    fn visit_cont_new(&mut self, type_index: u32) -> Self::Output {
4278        let cont_ty = self.cont_type_at(type_index)?;
4279        let rt = RefType::concrete(true, cont_ty.0);
4280        self.pop_ref(Some(rt))?;
4281        self.push_concrete_ref(false, type_index)?;
4282        Ok(())
4283    }
4284    fn visit_cont_bind(&mut self, argument_index: u32, result_index: u32) -> Self::Output {
4285        // [ts1 ts1'] -> [ts2]
4286        let arg_cont = self.cont_type_at(argument_index)?;
4287        let arg_func = self.func_type_of_cont_type(arg_cont);
4288        // [ts1''] -> [ts2']
4289        let res_cont = self.cont_type_at(result_index)?;
4290        let res_func = self.func_type_of_cont_type(res_cont);
4291
4292        // Verify that the argument's domain is at least as large as the
4293        // result's domain.
4294        if arg_func.params().len() < res_func.params().len() {
4295            bail!(self.offset, "type mismatch in continuation arguments");
4296        }
4297
4298        let argcnt = arg_func.params().len() - res_func.params().len();
4299
4300        // Check that [ts1'] -> [ts2] <: [ts1''] -> [ts2']
4301        if !self.is_subtype_many(res_func.params(), &arg_func.params()[argcnt..])
4302            || arg_func.results().len() != res_func.results().len()
4303            || !self.is_subtype_many(arg_func.results(), res_func.results())
4304        {
4305            bail!(self.offset, "type mismatch in continuation types");
4306        }
4307
4308        // Check that the continuation is available on the stack.
4309        self.pop_concrete_ref(true, argument_index)?;
4310
4311        // Check that the argument prefix is available on the stack.
4312        for &ty in arg_func.params().iter().take(argcnt).rev() {
4313            self.pop_operand(Some(ty))?;
4314        }
4315
4316        // Construct the result type.
4317        self.push_concrete_ref(false, result_index)?;
4318
4319        Ok(())
4320    }
4321    fn visit_suspend(&mut self, tag_index: u32) -> Self::Output {
4322        let ft = &self.tag_at(tag_index)?;
4323        for &ty in ft.params().iter().rev() {
4324            self.pop_operand(Some(ty))?;
4325        }
4326        for &ty in ft.results() {
4327            self.push_operand(ty)?;
4328        }
4329        Ok(())
4330    }
4331    fn visit_resume(&mut self, type_index: u32, table: ResumeTable) -> Self::Output {
4332        // [ts1] -> [ts2]
4333        let ft = self.check_resume_table(table, type_index)?;
4334        self.pop_concrete_ref(true, type_index)?;
4335        // Check that ts1 are available on the stack.
4336        for &ty in ft.params().iter().rev() {
4337            self.pop_operand(Some(ty))?;
4338        }
4339
4340        // Make ts2 available on the stack.
4341        for &ty in ft.results() {
4342            self.push_operand(ty)?;
4343        }
4344        Ok(())
4345    }
4346    fn visit_resume_throw(
4347        &mut self,
4348        type_index: u32,
4349        tag_index: u32,
4350        table: ResumeTable,
4351    ) -> Self::Output {
4352        // [ts1] -> [ts2]
4353        let ft = self.check_resume_table(table, type_index)?;
4354        // [ts1'] -> []
4355        let tag_ty = self.exception_tag_at(tag_index)?;
4356        if tag_ty.results().len() != 0 {
4357            bail!(self.offset, "type mismatch: non-empty tag result type")
4358        }
4359        self.pop_concrete_ref(true, type_index)?;
4360        // Check that ts1' are available on the stack.
4361        for &ty in tag_ty.params().iter().rev() {
4362            self.pop_operand(Some(ty))?;
4363        }
4364
4365        // Make ts2 available on the stack.
4366        for &ty in ft.results() {
4367            self.push_operand(ty)?;
4368        }
4369        Ok(())
4370    }
4371    fn visit_switch(&mut self, type_index: u32, tag_index: u32) -> Self::Output {
4372        // [t1* (ref null $ct2)] -> [te1*]
4373        let cont_ty = self.cont_type_at(type_index)?;
4374        let func_ty = self.func_type_of_cont_type(cont_ty);
4375        // [] -> [t*]
4376        let tag_ty = self.tag_at(tag_index)?;
4377        if tag_ty.params().len() != 0 {
4378            bail!(self.offset, "type mismatch: non-empty tag parameter type")
4379        }
4380        // Extract the other continuation reference
4381        match func_ty.params().last() {
4382            Some(ValType::Ref(rt)) if rt.is_concrete_type_ref() => {
4383                let other_cont_id = rt
4384                    .type_index()
4385                    .unwrap()
4386                    .unpack()
4387                    .as_core_type_id()
4388                    .expect("expected canonicalized index");
4389                let sub_ty = self.resources.sub_type_at_id(other_cont_id);
4390                let other_cont_ty =
4391                    if let CompositeInnerType::Cont(cont) = &sub_ty.composite_type.inner {
4392                        cont
4393                    } else {
4394                        bail!(self.offset, "non-continuation type");
4395                    };
4396                let other_func_ty = self.func_type_of_cont_type(&other_cont_ty);
4397                if func_ty.results().len() != tag_ty.results().len()
4398                    || !self.is_subtype_many(func_ty.results(), tag_ty.results())
4399                    || other_func_ty.results().len() != tag_ty.results().len()
4400                    || !self.is_subtype_many(tag_ty.results(), other_func_ty.results())
4401                {
4402                    bail!(self.offset, "type mismatch in continuation types")
4403                }
4404
4405                // Pop the continuation reference.
4406                self.pop_concrete_ref(true, type_index)?;
4407
4408                // Check that the arguments t1* are available on the
4409                // stack.
4410                for &ty in func_ty.params().iter().rev().skip(1) {
4411                    self.pop_operand(Some(ty))?;
4412                }
4413
4414                // Make the results t2* available on the stack.
4415                for &ty in other_func_ty.params() {
4416                    self.push_operand(ty)?;
4417                }
4418            }
4419            Some(ty) => bail!(
4420                self.offset,
4421                "type mismatch: expected a continuation reference, found {}",
4422                ty_to_str(*ty)
4423            ),
4424            None => bail!(
4425                self.offset,
4426                "type mismatch: instruction requires a continuation reference"
4427            ),
4428        }
4429        Ok(())
4430    }
4431    fn visit_i64_add128(&mut self) -> Result<()> {
4432        self.check_binop128()
4433    }
4434    fn visit_i64_sub128(&mut self) -> Result<()> {
4435        self.check_binop128()
4436    }
4437    fn visit_i64_mul_wide_s(&mut self) -> Result<()> {
4438        self.check_i64_mul_wide()
4439    }
4440    fn visit_i64_mul_wide_u(&mut self) -> Result<()> {
4441        self.check_i64_mul_wide()
4442    }
4443
4444    fn visit_ref_get_desc(&mut self, type_index: u32) -> Self::Output {
4445        let (_, is_exact) = self.pop_concrete_or_exact_ref(true, type_index)?;
4446        match self.sub_type_at(type_index)?.composite_type.descriptor_idx {
4447            Some(descriptor_idx) => {
4448                let ref_ty = if is_exact {
4449                    RefType::exact(false, descriptor_idx)
4450                } else {
4451                    RefType::concrete(false, descriptor_idx)
4452                };
4453                self.push_operand(ref_ty)
4454            }
4455            None => bail!(self.offset, "expected type with descriptor"),
4456        }
4457    }
4458
4459    fn visit_ref_cast_desc_non_null(&mut self, heap_type: HeapType) -> Self::Output {
4460        self.check_ref_cast_desc(false, heap_type)
4461    }
4462    fn visit_ref_cast_desc_nullable(&mut self, heap_type: HeapType) -> Self::Output {
4463        self.check_ref_cast_desc(true, heap_type)
4464    }
4465    fn visit_br_on_cast_desc(
4466        &mut self,
4467        relative_depth: u32,
4468        mut from_ref_type: RefType,
4469        mut to_ref_type: RefType,
4470    ) -> Self::Output {
4471        let described_ty = to_ref_type.heap_type();
4472
4473        self.resources
4474            .check_ref_type(&mut from_ref_type, self.offset)?;
4475        self.resources
4476            .check_ref_type(&mut to_ref_type, self.offset)?;
4477
4478        self.check_br_on_cast_type_hierarchy(from_ref_type, to_ref_type)?;
4479
4480        self.check_maybe_exact_descriptor_ref(described_ty)?;
4481
4482        let (block_ty, frame_kind) = self.jump(relative_depth)?;
4483        let mut label_types = self.label_types(block_ty, frame_kind)?;
4484
4485        match label_types.next_back() {
4486            Some(label_ty) if self.resources.is_subtype(to_ref_type.into(), label_ty) => {
4487                self.pop_operand(Some(from_ref_type.into()))?;
4488            }
4489            Some(label_ty) => bail!(
4490                self.offset,
4491                "type mismatch: casting to type {to_ref_type}, but it does not match \
4492                 label result type {label_ty}"
4493            ),
4494            None => bail!(
4495                self.offset,
4496                "type mismatch: br_on_cast to label with empty types, must have a reference type"
4497            ),
4498        };
4499
4500        self.pop_push_label_types(label_types)?;
4501        let diff_ty = RefType::difference(from_ref_type, to_ref_type);
4502        self.push_operand(diff_ty)?;
4503        Ok(())
4504    }
4505    fn visit_br_on_cast_desc_fail(
4506        &mut self,
4507        relative_depth: u32,
4508        mut from_ref_type: RefType,
4509        mut to_ref_type: RefType,
4510    ) -> Self::Output {
4511        let described_ty = to_ref_type.heap_type();
4512
4513        self.resources
4514            .check_ref_type(&mut from_ref_type, self.offset)?;
4515        self.resources
4516            .check_ref_type(&mut to_ref_type, self.offset)?;
4517
4518        self.check_br_on_cast_type_hierarchy(from_ref_type, to_ref_type)?;
4519
4520        self.check_maybe_exact_descriptor_ref(described_ty)?;
4521
4522        let (block_ty, frame_kind) = self.jump(relative_depth)?;
4523        let mut label_tys = self.label_types(block_ty, frame_kind)?;
4524
4525        let diff_ty = RefType::difference(from_ref_type, to_ref_type);
4526        match label_tys.next_back() {
4527            Some(label_ty) if self.resources.is_subtype(diff_ty.into(), label_ty) => {
4528                self.pop_operand(Some(from_ref_type.into()))?;
4529            }
4530            Some(label_ty) => bail!(
4531                self.offset,
4532                "type mismatch: expected label result type {label_ty}, found {diff_ty}"
4533            ),
4534            None => bail!(
4535                self.offset,
4536                "type mismatch: expected a reference type, found nothing"
4537            ),
4538        }
4539
4540        self.pop_push_label_types(label_tys)?;
4541        self.push_operand(to_ref_type)?;
4542        Ok(())
4543    }
4544}
4545
4546#[derive(Clone, Debug)]
4547enum Either<A, B> {
4548    A(A),
4549    B(B),
4550}
4551
4552impl<A, B> Iterator for Either<A, B>
4553where
4554    A: Iterator,
4555    B: Iterator<Item = A::Item>,
4556{
4557    type Item = A::Item;
4558    fn next(&mut self) -> Option<A::Item> {
4559        match self {
4560            Either::A(a) => a.next(),
4561            Either::B(b) => b.next(),
4562        }
4563    }
4564}
4565
4566impl<A, B> DoubleEndedIterator for Either<A, B>
4567where
4568    A: DoubleEndedIterator,
4569    B: DoubleEndedIterator<Item = A::Item>,
4570{
4571    fn next_back(&mut self) -> Option<A::Item> {
4572        match self {
4573            Either::A(a) => a.next_back(),
4574            Either::B(b) => b.next_back(),
4575        }
4576    }
4577}
4578
4579impl<A, B> ExactSizeIterator for Either<A, B>
4580where
4581    A: ExactSizeIterator,
4582    B: ExactSizeIterator<Item = A::Item>,
4583{
4584    fn len(&self) -> usize {
4585        match self {
4586            Either::A(a) => a.len(),
4587            Either::B(b) => b.len(),
4588        }
4589    }
4590}
4591
4592trait PreciseIterator: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug {}
4593impl<T: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug> PreciseIterator for T {}
4594
4595impl Locals {
4596    /// Defines another group of `count` local variables of type `ty`.
4597    ///
4598    /// Returns `true` if the definition was successful. Local variable
4599    /// definition is unsuccessful in case the amount of total variables
4600    /// after definition exceeds the allowed maximum number.
4601    fn define(&mut self, count: u32, ty: ValType) -> bool {
4602        if count == 0 {
4603            return true;
4604        }
4605        let vacant_first = MAX_LOCALS_TO_TRACK.saturating_sub(self.num_locals);
4606        match self.num_locals.checked_add(count) {
4607            Some(num_locals) if num_locals > MAX_WASM_FUNCTION_LOCALS => return false,
4608            None => return false,
4609            Some(num_locals) => self.num_locals = num_locals,
4610        };
4611        let push_to_first = cmp::min(vacant_first, count);
4612        self.first
4613            .extend(iter::repeat(ty).take(push_to_first as usize));
4614        let num_uncached = count - push_to_first;
4615        if num_uncached > 0 {
4616            let max_uncached_idx = self.num_locals - 1;
4617            self.uncached.push((max_uncached_idx, ty));
4618        }
4619        true
4620    }
4621
4622    /// Returns the number of defined local variables.
4623    pub(super) fn len_locals(&self) -> u32 {
4624        self.num_locals
4625    }
4626
4627    /// Returns the type of the local variable at the given index if any.
4628    #[inline]
4629    pub(super) fn get(&self, idx: u32) -> Option<ValType> {
4630        match self.first.get(idx as usize) {
4631            Some(ty) => Some(*ty),
4632            None => self.get_bsearch(idx),
4633        }
4634    }
4635
4636    fn get_bsearch(&self, idx: u32) -> Option<ValType> {
4637        match self.uncached.binary_search_by_key(&idx, |(idx, _)| *idx) {
4638            // If this index would be inserted at the end of the list, then the
4639            // index is out of bounds and we return an error.
4640            Err(i) if i == self.uncached.len() => None,
4641
4642            // If `Ok` is returned we found the index exactly, or if `Err` is
4643            // returned the position is the one which is the least index
4644            // greater that `idx`, which is still the type of `idx` according
4645            // to our "compressed" representation. In both cases we access the
4646            // list at index `i`.
4647            Ok(i) | Err(i) => Some(self.uncached[i].1),
4648        }
4649    }
4650}
4651
4652impl<R> ModuleArity for WasmProposalValidator<'_, '_, R>
4653where
4654    R: WasmModuleResources,
4655{
4656    fn tag_type_arity(&self, at: u32) -> Option<(u32, u32)> {
4657        self.0
4658            .resources
4659            .tag_at(at)
4660            .map(|x| (x.params().len() as u32, x.results().len() as u32))
4661    }
4662
4663    fn type_index_of_function(&self, function_idx: u32) -> Option<u32> {
4664        self.0.resources.type_index_of_function(function_idx)
4665    }
4666
4667    fn sub_type_at(&self, type_idx: u32) -> Option<&SubType> {
4668        Some(self.0.sub_type_at(type_idx).ok()?)
4669    }
4670
4671    fn func_type_of_cont_type(&self, c: &ContType) -> Option<&FuncType> {
4672        Some(self.0.func_type_of_cont_type(c))
4673    }
4674
4675    fn sub_type_of_ref_type(&self, rt: &RefType) -> Option<&SubType> {
4676        let id = rt.type_index()?.as_core_type_id()?;
4677        Some(self.0.resources.sub_type_at_id(id))
4678    }
4679
4680    fn control_stack_height(&self) -> u32 {
4681        self.0.control.len() as u32
4682    }
4683
4684    fn label_block(&self, depth: u32) -> Option<(BlockType, FrameKind)> {
4685        self.0.jump(depth).ok()
4686    }
4687}
4688
4689impl<R> FrameStack for WasmProposalValidator<'_, '_, R>
4690where
4691    R: WasmModuleResources,
4692{
4693    fn current_frame(&self) -> Option<FrameKind> {
4694        Some(self.0.control.last()?.kind)
4695    }
4696}