wasm_smith/
core.rs

1//! Generating arbitrary core Wasm modules.
2
3mod code_builder;
4pub(crate) mod encode;
5mod terminate;
6
7use crate::{Config, arbitrary_loop, limited_string, unique_string};
8use arbitrary::{Arbitrary, Result, Unstructured};
9use code_builder::CodeBuilderAllocations;
10use flagset::{FlagSet, flags};
11use std::collections::{HashMap, HashSet};
12use std::fmt;
13use std::mem;
14use std::ops::Range;
15use std::rc::Rc;
16use std::str::{self, FromStr};
17use wasm_encoder::{
18    AbstractHeapType, ArrayType, BlockType, ConstExpr, ExportKind, FieldType, HeapType, RefType,
19    StorageType, StructType, ValType,
20};
21pub(crate) use wasm_encoder::{GlobalType, MemoryType, TableType};
22
23// NB: these constants are used to control the rate at which various events
24// occur. For more information see where these constants are used. Their values
25// are somewhat random in the sense that they're not scientifically determined
26// or anything like that, I just threw a bunch of random data at wasm-smith and
27// measured various rates of ooms/traps/etc and adjusted these so abnormal
28// events were ~1% of the time.
29const CHANCE_OFFSET_INBOUNDS: usize = 10; // bigger = less traps
30const CHANCE_SEGMENT_ON_EMPTY: usize = 10; // bigger = less traps
31const PCT_INBOUNDS: f64 = 0.995; // bigger = less traps
32
33type Instruction = wasm_encoder::Instruction<'static>;
34
35/// A pseudo-random WebAssembly module.
36///
37/// Construct instances of this type (with default configuration) with [the
38/// `Arbitrary`
39/// trait](https://docs.rs/arbitrary/*/arbitrary/trait.Arbitrary.html).
40///
41/// ## Configuring Generated Modules
42///
43/// To configure the shape of generated module, create a
44/// [`Config`][crate::Config] and then call [`Module::new`][crate::Module::new]
45/// with it.
46pub struct Module {
47    config: Config,
48    duplicate_imports_behavior: DuplicateImportsBehavior,
49    valtypes: Vec<ValType>,
50
51    /// All types locally defined in this module (available in the type index
52    /// space).
53    types: Vec<SubType>,
54
55    /// Non-overlapping ranges within `types` that belong to the same rec
56    /// group. All of `types` is covered by these ranges. When GC is not
57    /// enabled, these are all single-element ranges.
58    rec_groups: Vec<Range<usize>>,
59
60    /// A map from a super type to all of its sub types.
61    super_to_sub_types: HashMap<u32, Vec<u32>>,
62
63    /// Indices within `types` that are not final types.
64    can_subtype: Vec<u32>,
65
66    /// Whether we should encode a types section, even if `self.types` is empty.
67    should_encode_types: bool,
68
69    /// Whether we should propagate sharedness to types generated inside
70    /// `propagate_shared`.
71    must_share: bool,
72
73    /// All of this module's imports. These don't have their own index space,
74    /// but instead introduce entries to each imported entity's associated index
75    /// space.
76    imports: Vec<Import>,
77
78    /// Whether we should encode an imports section, even if `self.imports` is
79    /// empty.
80    should_encode_imports: bool,
81
82    /// Indices within `types` that are array types.
83    array_types: Vec<u32>,
84
85    /// Indices within `types` that are function types.
86    func_types: Vec<u32>,
87
88    /// Indices within `types that are struct types.
89    struct_types: Vec<u32>,
90
91    /// Number of imported items into this module.
92    num_imports: usize,
93
94    /// The number of tags defined in this module (not imported or
95    /// aliased).
96    num_defined_tags: usize,
97
98    /// The number of functions defined in this module (not imported or
99    /// aliased).
100    num_defined_funcs: usize,
101
102    /// Initialization expressions for all defined tables in this module.
103    defined_tables: Vec<Option<ConstExpr>>,
104
105    /// The number of memories defined in this module (not imported or
106    /// aliased).
107    num_defined_memories: usize,
108
109    /// The indexes and initialization expressions of globals defined in this
110    /// module.
111    defined_globals: Vec<(u32, ConstExpr)>,
112
113    /// All tags available to this module, sorted by their index. The list
114    /// entry is the type of each tag.
115    tags: Vec<TagType>,
116
117    /// All functions available to this module, sorted by their index. The list
118    /// entry points to the index in this module where the function type is
119    /// defined (if available) and provides the type of the function.
120    funcs: Vec<(u32, Rc<FuncType>)>,
121
122    /// All tables available to this module, sorted by their index. The list
123    /// entry is the type of each table.
124    tables: Vec<TableType>,
125
126    /// All globals available to this module, sorted by their index. The list
127    /// entry is the type of each global.
128    globals: Vec<GlobalType>,
129
130    /// All memories available to this module, sorted by their index. The list
131    /// entry is the type of each memory.
132    memories: Vec<MemoryType>,
133
134    exports: Vec<(String, ExportKind, u32)>,
135    start: Option<u32>,
136    elems: Vec<ElementSegment>,
137    code: Vec<Code>,
138    data: Vec<DataSegment>,
139
140    /// The predicted size of the effective type of this module, based on this
141    /// module's size of the types of imports/exports.
142    type_size: u32,
143
144    /// Names currently exported from this module.
145    export_names: HashSet<String>,
146
147    /// Reusable buffer in `self.arbitrary_const_expr` to amortize the cost of
148    /// allocation.
149    const_expr_choices: Vec<Box<dyn Fn(&mut Unstructured, ValType) -> Result<ConstExpr>>>,
150
151    /// What the maximum type index that can be referenced is.
152    max_type_limit: MaxTypeLimit,
153
154    /// Some known-interesting values, such as powers of two, values just before
155    /// or just after a memory size, etc...
156    interesting_values32: Vec<u32>,
157    interesting_values64: Vec<u64>,
158}
159
160impl<'a> Arbitrary<'a> for Module {
161    fn arbitrary(u: &mut Unstructured<'a>) -> Result<Self> {
162        Module::new(Config::default(), u)
163    }
164}
165
166impl fmt::Debug for Module {
167    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
168        f.debug_struct("Module")
169            .field("config", &self.config)
170            .field(&"...", &"...")
171            .finish()
172    }
173}
174
175#[derive(Debug, Clone, Copy, PartialEq, Eq)]
176pub(crate) enum DuplicateImportsBehavior {
177    Allowed,
178    Disallowed,
179}
180
181#[derive(Debug, Clone, Copy, PartialEq, Eq)]
182enum AllowEmptyRecGroup {
183    Yes,
184    No,
185}
186
187#[derive(Debug, Clone, Copy, PartialEq, Eq)]
188enum MaxTypeLimit {
189    ModuleTypes,
190    Num(u32),
191}
192
193impl Module {
194    /// Returns a reference to the internal configuration.
195    pub fn config(&self) -> &Config {
196        &self.config
197    }
198
199    /// Creates a new `Module` with the specified `config` for
200    /// configuration and `Unstructured` for the DNA of this module.
201    pub fn new(config: Config, u: &mut Unstructured<'_>) -> Result<Self> {
202        Self::new_internal(config, u, DuplicateImportsBehavior::Allowed)
203    }
204
205    pub(crate) fn new_internal(
206        config: Config,
207        u: &mut Unstructured<'_>,
208        duplicate_imports_behavior: DuplicateImportsBehavior,
209    ) -> Result<Self> {
210        let mut module = Module::empty(config, duplicate_imports_behavior);
211        module.build(u)?;
212        Ok(module)
213    }
214
215    fn empty(mut config: Config, duplicate_imports_behavior: DuplicateImportsBehavior) -> Self {
216        config.sanitize();
217        Module {
218            config,
219            duplicate_imports_behavior,
220            valtypes: Vec::new(),
221            types: Vec::new(),
222            rec_groups: Vec::new(),
223            can_subtype: Vec::new(),
224            super_to_sub_types: HashMap::new(),
225            should_encode_types: false,
226            imports: Vec::new(),
227            should_encode_imports: false,
228            array_types: Vec::new(),
229            func_types: Vec::new(),
230            struct_types: Vec::new(),
231            num_imports: 0,
232            num_defined_tags: 0,
233            num_defined_funcs: 0,
234            defined_tables: Vec::new(),
235            num_defined_memories: 0,
236            defined_globals: Vec::new(),
237            tags: Vec::new(),
238            funcs: Vec::new(),
239            tables: Vec::new(),
240            globals: Vec::new(),
241            memories: Vec::new(),
242            exports: Vec::new(),
243            start: None,
244            elems: Vec::new(),
245            code: Vec::new(),
246            data: Vec::new(),
247            type_size: 0,
248            export_names: HashSet::new(),
249            const_expr_choices: Vec::new(),
250            max_type_limit: MaxTypeLimit::ModuleTypes,
251            interesting_values32: Vec::new(),
252            interesting_values64: Vec::new(),
253            must_share: false,
254        }
255    }
256}
257
258#[derive(Clone, Debug, PartialEq, Eq, Hash)]
259pub(crate) struct SubType {
260    pub(crate) is_final: bool,
261    pub(crate) supertype: Option<u32>,
262    pub(crate) composite_type: CompositeType,
263    /// How "deep" this subtype's supertype hierarchy is. The base case is 1 and
264    /// if `supertype` is present it's `1 + supertype.depth`.
265    depth: u32,
266}
267
268impl SubType {
269    fn unwrap_struct(&self) -> &StructType {
270        self.composite_type.unwrap_struct()
271    }
272
273    fn unwrap_func(&self) -> &Rc<FuncType> {
274        self.composite_type.unwrap_func()
275    }
276
277    fn unwrap_array(&self) -> &ArrayType {
278        self.composite_type.unwrap_array()
279    }
280}
281
282#[derive(Clone, Debug, PartialEq, Eq, Hash)]
283pub(crate) struct CompositeType {
284    pub inner: CompositeInnerType,
285    pub shared: bool,
286}
287
288impl CompositeType {
289    #[cfg(any(feature = "component-model", feature = "wasmparser"))]
290    pub(crate) fn new_func(func: Rc<FuncType>, shared: bool) -> Self {
291        Self {
292            inner: CompositeInnerType::Func(func),
293            shared,
294        }
295    }
296
297    fn unwrap_func(&self) -> &Rc<FuncType> {
298        match &self.inner {
299            CompositeInnerType::Func(f) => f,
300            _ => panic!("not a func"),
301        }
302    }
303
304    fn unwrap_array(&self) -> &ArrayType {
305        match &self.inner {
306            CompositeInnerType::Array(a) => a,
307            _ => panic!("not an array"),
308        }
309    }
310
311    fn unwrap_struct(&self) -> &StructType {
312        match &self.inner {
313            CompositeInnerType::Struct(s) => s,
314            _ => panic!("not a struct"),
315        }
316    }
317}
318
319impl From<&CompositeType> for wasm_encoder::CompositeType {
320    fn from(ty: &CompositeType) -> Self {
321        let inner = match &ty.inner {
322            CompositeInnerType::Array(a) => wasm_encoder::CompositeInnerType::Array(*a),
323            CompositeInnerType::Func(f) => wasm_encoder::CompositeInnerType::Func(
324                wasm_encoder::FuncType::new(f.params.iter().cloned(), f.results.iter().cloned()),
325            ),
326            CompositeInnerType::Struct(s) => wasm_encoder::CompositeInnerType::Struct(s.clone()),
327        };
328        wasm_encoder::CompositeType {
329            shared: ty.shared,
330            inner,
331        }
332    }
333}
334
335#[derive(Clone, Debug, PartialEq, Eq, Hash)]
336pub(crate) enum CompositeInnerType {
337    Array(ArrayType),
338    Func(Rc<FuncType>),
339    Struct(StructType),
340}
341
342/// A function signature.
343#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
344pub(crate) struct FuncType {
345    /// Types of the parameter values.
346    pub(crate) params: Vec<ValType>,
347    /// Types of the result values.
348    pub(crate) results: Vec<ValType>,
349}
350
351/// An import of an entity provided externally or by a component.
352#[derive(Clone, Debug, PartialEq, Eq, Hash)]
353pub(crate) struct Import {
354    /// The name of the module providing this entity.
355    pub(crate) module: String,
356    /// The name of the entity.
357    pub(crate) field: String,
358    /// The type of this entity.
359    pub(crate) entity_type: EntityType,
360}
361
362/// Type of an entity.
363#[derive(Clone, Debug, PartialEq, Eq, Hash)]
364pub(crate) enum EntityType {
365    /// A global entity.
366    Global(GlobalType),
367    /// A table entity.
368    Table(TableType),
369    /// A memory entity.
370    Memory(MemoryType),
371    /// A tag entity.
372    Tag(TagType),
373    /// A function entity.
374    Func(u32, Rc<FuncType>),
375}
376
377/// Type of a tag.
378#[derive(Clone, Debug, PartialEq, Eq, Hash)]
379pub(crate) struct TagType {
380    /// Index of the function type.
381    func_type_idx: u32,
382    /// Type of the function.
383    func_type: Rc<FuncType>,
384}
385
386#[derive(Debug)]
387struct ElementSegment {
388    kind: ElementKind,
389    ty: RefType,
390    items: Elements,
391}
392
393#[derive(Debug)]
394enum ElementKind {
395    Passive,
396    Declared,
397    Active {
398        table: Option<u32>, // None == table 0 implicitly
399        offset: Offset,
400    },
401}
402
403#[derive(Debug)]
404enum Elements {
405    Functions(Vec<u32>),
406    Expressions(Vec<ConstExpr>),
407}
408
409#[derive(Debug)]
410struct Code {
411    locals: Vec<ValType>,
412    instructions: Instructions,
413}
414
415#[derive(Debug)]
416enum Instructions {
417    Generated(Vec<Instruction>),
418    Arbitrary(Vec<u8>),
419}
420
421#[derive(Debug)]
422struct DataSegment {
423    kind: DataSegmentKind,
424    init: Vec<u8>,
425}
426
427#[derive(Debug)]
428enum DataSegmentKind {
429    Passive,
430    Active { memory_index: u32, offset: Offset },
431}
432
433#[derive(Debug)]
434pub(crate) enum Offset {
435    Const32(i32),
436    Const64(i64),
437    Global(u32),
438}
439
440impl Module {
441    fn build(&mut self, u: &mut Unstructured) -> Result<()> {
442        self.valtypes = configured_valtypes(&self.config);
443
444        // We attempt to figure out our available imports *before* creating the types section here,
445        // because the types for the imports are already well-known (specified by the user) and we
446        // must have those populated for all function/etc. imports, no matter what.
447        //
448        // This can affect the available capacity for types and such.
449        if self.arbitrary_imports_from_available(u)? {
450            self.arbitrary_types(u)?;
451        } else {
452            self.arbitrary_types(u)?;
453            self.arbitrary_imports(u)?;
454        }
455
456        self.should_encode_imports = !self.imports.is_empty() || u.arbitrary()?;
457
458        self.arbitrary_tags(u)?;
459        self.arbitrary_funcs(u)?;
460        self.arbitrary_tables(u)?;
461        self.arbitrary_memories(u)?;
462        self.arbitrary_globals(u)?;
463        if !self.required_exports(u)? {
464            self.arbitrary_exports(u)?;
465        };
466        self.should_encode_types = !self.types.is_empty() || u.arbitrary()?;
467        self.arbitrary_start(u)?;
468        self.arbitrary_elems(u)?;
469        self.arbitrary_data(u)?;
470        self.arbitrary_code(u)?;
471        Ok(())
472    }
473
474    #[inline]
475    fn val_type_is_sub_type(&self, a: ValType, b: ValType) -> bool {
476        match (a, b) {
477            (a, b) if a == b => true,
478            (ValType::Ref(a), ValType::Ref(b)) => self.ref_type_is_sub_type(a, b),
479            _ => false,
480        }
481    }
482
483    /// Is `a` a subtype of `b`?
484    fn ref_type_is_sub_type(&self, a: RefType, b: RefType) -> bool {
485        if a == b {
486            return true;
487        }
488
489        if a.nullable && !b.nullable {
490            return false;
491        }
492
493        self.heap_type_is_sub_type(a.heap_type, b.heap_type)
494    }
495
496    fn heap_type_is_sub_type(&self, a: HeapType, b: HeapType) -> bool {
497        use AbstractHeapType::*;
498        use CompositeInnerType as CT;
499        use HeapType as HT;
500        match (a, b) {
501            (a, b) if a == b => true,
502
503            (
504                HT::Abstract {
505                    shared: a_shared,
506                    ty: a_ty,
507                },
508                HT::Abstract {
509                    shared: b_shared,
510                    ty: b_ty,
511                },
512            ) => {
513                a_shared == b_shared
514                    && match (a_ty, b_ty) {
515                        (Eq | I31 | Struct | Array | None, Any) => true,
516                        (I31 | Struct | Array | None, Eq) => true,
517                        (NoExtern, Extern) => true,
518                        (NoFunc, Func) => true,
519                        (None, I31 | Array | Struct) => true,
520                        (NoExn, Exn) => true,
521                        _ => false,
522                    }
523            }
524
525            (HT::Concrete(a), HT::Abstract { shared, ty }) => {
526                let a_ty = &self.ty(a).composite_type;
527                if a_ty.shared != shared {
528                    return false;
529                }
530                match ty {
531                    Eq | Any => matches!(a_ty.inner, CT::Array(_) | CT::Struct(_)),
532                    Struct => matches!(a_ty.inner, CT::Struct(_)),
533                    Array => matches!(a_ty.inner, CT::Array(_)),
534                    Func => matches!(a_ty.inner, CT::Func(_)),
535                    _ => false,
536                }
537            }
538
539            (HT::Abstract { shared, ty }, HT::Concrete(b)) => {
540                let b_ty = &self.ty(b).composite_type;
541                if shared != b_ty.shared {
542                    return false;
543                }
544                match ty {
545                    None => matches!(b_ty.inner, CT::Array(_) | CT::Struct(_)),
546                    NoFunc => matches!(b_ty.inner, CT::Func(_)),
547                    _ => false,
548                }
549            }
550
551            (HT::Concrete(mut a), HT::Concrete(b)) => loop {
552                if a == b {
553                    return true;
554                }
555                if let Some(supertype) = self.ty(a).supertype {
556                    a = supertype;
557                } else {
558                    return false;
559                }
560            },
561        }
562    }
563
564    fn arbitrary_types(&mut self, u: &mut Unstructured) -> Result<()> {
565        assert!(self.config.min_types <= self.config.max_types);
566        while self.types.len() < self.config.min_types {
567            self.arbitrary_rec_group(u, AllowEmptyRecGroup::No)?;
568        }
569        while self.types.len() < self.config.max_types {
570            let keep_going = u.arbitrary().unwrap_or(false);
571            if !keep_going {
572                break;
573            }
574            self.arbitrary_rec_group(u, AllowEmptyRecGroup::Yes)?;
575        }
576        Ok(())
577    }
578
579    fn add_type(&mut self, ty: SubType) -> u32 {
580        let index = u32::try_from(self.types.len()).unwrap();
581
582        if let Some(supertype) = ty.supertype {
583            assert_eq!(self.is_shared_type(supertype), ty.composite_type.shared);
584            self.super_to_sub_types
585                .entry(supertype)
586                .or_default()
587                .push(index);
588        }
589
590        let list = match &ty.composite_type.inner {
591            CompositeInnerType::Array(_) => &mut self.array_types,
592            CompositeInnerType::Func(_) => &mut self.func_types,
593            CompositeInnerType::Struct(_) => &mut self.struct_types,
594        };
595        list.push(index);
596
597        // Calculate the recursive depth of this type, and if it's beneath a
598        // threshold then allow future types to subtype this one. Otherwise this
599        // can no longer be subtyped so despite this not being final don't add
600        // it to the `can_subtype` list.
601        //
602        // Note that this limit is intentinally a bit less than the wasm-defined
603        // maximum of 63.
604        const MAX_SUBTYPING_DEPTH: u32 = 60;
605        if !ty.is_final && ty.depth < MAX_SUBTYPING_DEPTH {
606            self.can_subtype.push(index);
607        }
608
609        self.types.push(ty);
610        index
611    }
612
613    fn arbitrary_rec_group(
614        &mut self,
615        u: &mut Unstructured,
616        kind: AllowEmptyRecGroup,
617    ) -> Result<()> {
618        let rec_group_start = self.types.len();
619
620        assert!(matches!(self.max_type_limit, MaxTypeLimit::ModuleTypes));
621
622        if self.config.gc_enabled {
623            // With small probability, clone an existing rec group.
624            if self.rec_groups.len() > 0 && u.ratio(1, u8::MAX)? {
625                return self.clone_rec_group(u, kind);
626            }
627
628            // Otherwise, create a new rec group with multiple types inside.
629            let max_rec_group_size = self.config.max_types - self.types.len();
630            let min_rec_group_size = match kind {
631                AllowEmptyRecGroup::Yes => 0,
632                AllowEmptyRecGroup::No => 1,
633            };
634            let rec_group_size = u.int_in_range(min_rec_group_size..=max_rec_group_size)?;
635            let type_ref_limit = u32::try_from(self.types.len() + rec_group_size).unwrap();
636            self.max_type_limit = MaxTypeLimit::Num(type_ref_limit);
637            for _ in 0..rec_group_size {
638                let ty = self.arbitrary_sub_type(u)?;
639                self.add_type(ty);
640            }
641        } else {
642            let type_ref_limit = u32::try_from(self.types.len()).unwrap();
643            self.max_type_limit = MaxTypeLimit::Num(type_ref_limit);
644            let ty = self.arbitrary_sub_type(u)?;
645            self.add_type(ty);
646        }
647
648        self.max_type_limit = MaxTypeLimit::ModuleTypes;
649
650        self.rec_groups.push(rec_group_start..self.types.len());
651        Ok(())
652    }
653
654    fn clone_rec_group(&mut self, u: &mut Unstructured, kind: AllowEmptyRecGroup) -> Result<()> {
655        // Choose an arbitrary rec group to clone, but bail out if the selected
656        // rec group isn't valid to clone. For example if empty groups aren't
657        // allowed and the selected group is empty, or if cloning the rec group
658        // would cause the maximum number of types to be exceeded.
659        let group = u.choose(&self.rec_groups)?.clone();
660        if group.is_empty() && kind == AllowEmptyRecGroup::No {
661            return Ok(());
662        }
663        if group.len() > self.config.max_types.saturating_sub(self.types.len()) {
664            return Ok(());
665        }
666
667        // NB: this does *not* guarantee that the cloned rec group will
668        // canonicalize the same as the original rec group and be deduplicated.
669        // That would require a second pass over the cloned types to rewrite
670        // references within the original rec group to be references into the
671        // new rec group. That might make sense to do one day, but for now we
672        // don't do it. That also means that we can't mark the new types as
673        // "subtypes" of the old types and vice versa.
674        let new_rec_group_start = self.types.len();
675        for index in group {
676            let orig_ty_index = u32::try_from(index).unwrap();
677            let ty = self.ty(orig_ty_index).clone();
678            self.add_type(ty);
679        }
680        self.rec_groups.push(new_rec_group_start..self.types.len());
681        Ok(())
682    }
683
684    fn arbitrary_sub_type(&mut self, u: &mut Unstructured) -> Result<SubType> {
685        if !self.config.gc_enabled {
686            let shared = self.arbitrary_shared(u)?;
687            let func_type = self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?;
688            let composite_type = CompositeType {
689                inner: CompositeInnerType::Func(func_type),
690                shared,
691            };
692            return Ok(SubType {
693                is_final: true,
694                supertype: None,
695                composite_type,
696                depth: 1,
697            });
698        }
699
700        if !self.can_subtype.is_empty() && u.ratio(1, 32_u8)? {
701            self.arbitrary_sub_type_of_super_type(u)
702        } else {
703            Ok(SubType {
704                is_final: u.arbitrary()?,
705                supertype: None,
706                composite_type: self.arbitrary_composite_type(u)?,
707                depth: 1,
708            })
709        }
710    }
711
712    fn arbitrary_sub_type_of_super_type(&mut self, u: &mut Unstructured) -> Result<SubType> {
713        let supertype = *u.choose(&self.can_subtype)?;
714        let mut composite_type = self.types[usize::try_from(supertype).unwrap()]
715            .composite_type
716            .clone();
717        match &mut composite_type.inner {
718            CompositeInnerType::Array(a) => {
719                a.0 = self.arbitrary_matching_field_type(u, a.0)?;
720            }
721            CompositeInnerType::Func(f) => {
722                *f = self.arbitrary_matching_func_type(u, f)?;
723            }
724            CompositeInnerType::Struct(s) => {
725                *s = self.propagate_shared(composite_type.shared, |m| {
726                    m.arbitrary_matching_struct_type(u, s)
727                })?;
728            }
729        }
730        Ok(SubType {
731            is_final: u.arbitrary()?,
732            supertype: Some(supertype),
733            composite_type,
734            depth: 1 + self.types[supertype as usize].depth,
735        })
736    }
737
738    fn arbitrary_matching_struct_type(
739        &mut self,
740        u: &mut Unstructured,
741        ty: &StructType,
742    ) -> Result<StructType> {
743        let len_extra_fields = u.int_in_range(0..=5)?;
744        let mut fields = Vec::with_capacity(ty.fields.len() + len_extra_fields);
745        for field in ty.fields.iter() {
746            fields.push(self.arbitrary_matching_field_type(u, *field)?);
747        }
748        for _ in 0..len_extra_fields {
749            fields.push(self.arbitrary_field_type(u)?);
750        }
751        Ok(StructType {
752            fields: fields.into_boxed_slice(),
753        })
754    }
755
756    fn arbitrary_matching_field_type(
757        &mut self,
758        u: &mut Unstructured,
759        ty: FieldType,
760    ) -> Result<FieldType> {
761        if ty.mutable {
762            Ok(ty)
763        } else {
764            Ok(FieldType {
765                element_type: self.arbitrary_matching_storage_type(u, ty.element_type)?,
766                mutable: false,
767            })
768        }
769    }
770
771    fn arbitrary_matching_storage_type(
772        &mut self,
773        u: &mut Unstructured,
774        ty: StorageType,
775    ) -> Result<StorageType> {
776        match ty {
777            StorageType::I8 => Ok(StorageType::I8),
778            StorageType::I16 => Ok(StorageType::I16),
779            StorageType::Val(ty) => Ok(StorageType::Val(self.arbitrary_matching_val_type(u, ty)?)),
780        }
781    }
782
783    fn arbitrary_matching_val_type(
784        &mut self,
785        u: &mut Unstructured,
786        ty: ValType,
787    ) -> Result<ValType> {
788        match ty {
789            ValType::I32 => Ok(ValType::I32),
790            ValType::I64 => Ok(ValType::I64),
791            ValType::F32 => Ok(ValType::F32),
792            ValType::F64 => Ok(ValType::F64),
793            ValType::V128 => Ok(ValType::V128),
794            ValType::Ref(ty) => Ok(ValType::Ref(self.arbitrary_matching_ref_type(u, ty)?)),
795        }
796    }
797
798    fn arbitrary_matching_ref_type(&self, u: &mut Unstructured, ty: RefType) -> Result<RefType> {
799        Ok(RefType {
800            nullable: ty.nullable,
801            heap_type: self.arbitrary_matching_heap_type(u, ty.heap_type)?,
802        })
803    }
804
805    fn arbitrary_matching_heap_type(&self, u: &mut Unstructured, ty: HeapType) -> Result<HeapType> {
806        use {AbstractHeapType as AHT, CompositeInnerType as CT, HeapType as HT};
807
808        if !self.config.gc_enabled {
809            return Ok(ty);
810        }
811
812        let mut choices = vec![ty];
813        match ty {
814            HT::Abstract { shared, ty } => {
815                use AbstractHeapType::*;
816                let add_abstract = |choices: &mut Vec<HT>, tys: &[AHT]| {
817                    choices.extend(tys.iter().map(|&ty| HT::Abstract { shared, ty }));
818                };
819                let add_concrete = |choices: &mut Vec<HT>, tys: &[u32]| {
820                    choices.extend(
821                        tys.iter()
822                            .filter(|&&idx| shared == self.is_shared_type(idx))
823                            .copied()
824                            .map(HT::Concrete),
825                    );
826                };
827                match ty {
828                    Any => {
829                        add_abstract(&mut choices, &[Eq, Struct, Array, I31, None]);
830                        add_concrete(&mut choices, &self.array_types);
831                        add_concrete(&mut choices, &self.struct_types);
832                    }
833                    Eq => {
834                        add_abstract(&mut choices, &[Struct, Array, I31, None]);
835                        add_concrete(&mut choices, &self.array_types);
836                        add_concrete(&mut choices, &self.struct_types);
837                    }
838                    Struct => {
839                        add_abstract(&mut choices, &[Struct, None]);
840                        add_concrete(&mut choices, &self.struct_types);
841                    }
842                    Array => {
843                        add_abstract(&mut choices, &[Array, None]);
844                        add_concrete(&mut choices, &self.array_types);
845                    }
846                    I31 => {
847                        add_abstract(&mut choices, &[None]);
848                    }
849                    Func => {
850                        add_abstract(&mut choices, &[NoFunc]);
851                        add_concrete(&mut choices, &self.func_types);
852                    }
853                    Extern => {
854                        add_abstract(&mut choices, &[NoExtern]);
855                    }
856                    Exn | NoExn | None | NoExtern | NoFunc | Cont | NoCont => {}
857                }
858            }
859            HT::Concrete(idx) => {
860                if let Some(subs) = self.super_to_sub_types.get(&idx) {
861                    choices.extend(subs.iter().copied().map(HT::Concrete));
862                }
863                match self
864                    .types
865                    .get(usize::try_from(idx).unwrap())
866                    .map(|ty| (ty.composite_type.shared, &ty.composite_type.inner))
867                {
868                    Some((shared, CT::Array(_) | CT::Struct(_))) => choices.push(HT::Abstract {
869                        shared,
870                        ty: AbstractHeapType::None,
871                    }),
872                    Some((shared, CT::Func(_))) => choices.push(HT::Abstract {
873                        shared,
874                        ty: AbstractHeapType::NoFunc,
875                    }),
876                    None => {
877                        // The referenced type might be part of this same rec
878                        // group we are currently generating, but not generated
879                        // yet. In this case, leave `choices` as it is, and we
880                        // will just end up choosing the original type again
881                        // down below, which is fine.
882                    }
883                }
884            }
885        }
886        Ok(*u.choose(&choices)?)
887    }
888
889    fn arbitrary_matching_func_type(
890        &mut self,
891        u: &mut Unstructured,
892        ty: &FuncType,
893    ) -> Result<Rc<FuncType>> {
894        // Note: parameters are contravariant, results are covariant. See
895        // https://github.com/bytecodealliance/wasm-tools/blob/0616ef196a183cf137ee06b4a5993b7d590088bf/crates/wasmparser/src/readers/core/types/matches.rs#L137-L174
896        // for details.
897        let mut params = Vec::with_capacity(ty.params.len());
898        for param in &ty.params {
899            params.push(self.arbitrary_super_type_of_val_type(u, *param)?);
900        }
901        let mut results = Vec::with_capacity(ty.results.len());
902        for result in &ty.results {
903            results.push(self.arbitrary_matching_val_type(u, *result)?);
904        }
905        Ok(Rc::new(FuncType { params, results }))
906    }
907
908    fn arbitrary_super_type_of_val_type(
909        &mut self,
910        u: &mut Unstructured,
911        ty: ValType,
912    ) -> Result<ValType> {
913        match ty {
914            ValType::I32 => Ok(ValType::I32),
915            ValType::I64 => Ok(ValType::I64),
916            ValType::F32 => Ok(ValType::F32),
917            ValType::F64 => Ok(ValType::F64),
918            ValType::V128 => Ok(ValType::V128),
919            ValType::Ref(ty) => Ok(ValType::Ref(self.arbitrary_super_type_of_ref_type(u, ty)?)),
920        }
921    }
922
923    fn arbitrary_super_type_of_ref_type(
924        &self,
925        u: &mut Unstructured,
926        ty: RefType,
927    ) -> Result<RefType> {
928        Ok(RefType {
929            // TODO: For now, only create allow nullable reference
930            // types. Eventually we should support non-nullable reference types,
931            // but this means that we will also need to recognize when it is
932            // impossible to create an instance of the reference (eg `(ref
933            // nofunc)` has no instances, and self-referential types that
934            // contain a non-null self-reference are also impossible to create).
935            nullable: true,
936            heap_type: self.arbitrary_super_type_of_heap_type(u, ty.heap_type)?,
937        })
938    }
939
940    fn arbitrary_super_type_of_heap_type(
941        &self,
942        u: &mut Unstructured,
943        ty: HeapType,
944    ) -> Result<HeapType> {
945        use {AbstractHeapType as AHT, CompositeInnerType as CT, HeapType as HT};
946
947        if !self.config.gc_enabled {
948            return Ok(ty);
949        }
950
951        let mut choices = vec![ty];
952        match ty {
953            HT::Abstract { shared, ty } => {
954                use AbstractHeapType::*;
955                let add_abstract = |choices: &mut Vec<HT>, tys: &[AHT]| {
956                    choices.extend(tys.iter().map(|&ty| HT::Abstract { shared, ty }));
957                };
958                let add_concrete = |choices: &mut Vec<HT>, tys: &[u32]| {
959                    choices.extend(
960                        tys.iter()
961                            .filter(|&&idx| shared == self.is_shared_type(idx))
962                            .copied()
963                            .map(HT::Concrete),
964                    );
965                };
966                match ty {
967                    None => {
968                        add_abstract(&mut choices, &[Any, Eq, Struct, Array, I31]);
969                        add_concrete(&mut choices, &self.array_types);
970                        add_concrete(&mut choices, &self.struct_types);
971                    }
972                    NoExtern => {
973                        add_abstract(&mut choices, &[Extern]);
974                    }
975                    NoFunc => {
976                        add_abstract(&mut choices, &[Func]);
977                        add_concrete(&mut choices, &self.func_types);
978                    }
979                    NoExn => {
980                        add_abstract(&mut choices, &[Exn]);
981                    }
982                    Struct | Array | I31 => {
983                        add_abstract(&mut choices, &[Any, Eq]);
984                    }
985                    Eq => {
986                        add_abstract(&mut choices, &[Any]);
987                    }
988                    NoCont => {
989                        add_abstract(&mut choices, &[Cont]);
990                    }
991                    Exn | Any | Func | Extern | Cont => {}
992                }
993            }
994            HT::Concrete(mut idx) => {
995                if let Some(sub_ty) = &self.types.get(usize::try_from(idx).unwrap()) {
996                    use AbstractHeapType::*;
997                    let ht = |ty| HT::Abstract {
998                        shared: sub_ty.composite_type.shared,
999                        ty,
1000                    };
1001                    match &sub_ty.composite_type.inner {
1002                        CT::Array(_) => {
1003                            choices.extend([ht(Any), ht(Eq), ht(Array)]);
1004                        }
1005                        CT::Func(_) => {
1006                            choices.push(ht(Func));
1007                        }
1008                        CT::Struct(_) => {
1009                            choices.extend([ht(Any), ht(Eq), ht(Struct)]);
1010                        }
1011                    }
1012                } else {
1013                    // Same as in `arbitrary_matching_heap_type`: this was a
1014                    // forward reference to a concrete type that is part of
1015                    // this same rec group we are generating right now, and
1016                    // therefore we haven't generated that type yet. Just
1017                    // leave `choices` as it is and we will choose the
1018                    // original type again down below.
1019                }
1020                while let Some(supertype) = self
1021                    .types
1022                    .get(usize::try_from(idx).unwrap())
1023                    .and_then(|ty| ty.supertype)
1024                {
1025                    choices.push(HT::Concrete(supertype));
1026                    idx = supertype;
1027                }
1028            }
1029        }
1030        Ok(*u.choose(&choices)?)
1031    }
1032
1033    fn arbitrary_composite_type(&mut self, u: &mut Unstructured) -> Result<CompositeType> {
1034        use CompositeInnerType as CT;
1035        let shared = self.arbitrary_shared(u)?;
1036
1037        if !self.config.gc_enabled {
1038            return Ok(CompositeType {
1039                shared,
1040                inner: CT::Func(self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?),
1041            });
1042        }
1043
1044        match u.int_in_range(0..=2)? {
1045            0 => Ok(CompositeType {
1046                shared,
1047                inner: CT::Array(ArrayType(
1048                    self.propagate_shared(shared, |m| m.arbitrary_field_type(u))?,
1049                )),
1050            }),
1051            1 => Ok(CompositeType {
1052                shared,
1053                inner: CT::Func(self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?),
1054            }),
1055            2 => Ok(CompositeType {
1056                shared,
1057                inner: CT::Struct(self.propagate_shared(shared, |m| m.arbitrary_struct_type(u))?),
1058            }),
1059            _ => unreachable!(),
1060        }
1061    }
1062
1063    fn arbitrary_struct_type(&mut self, u: &mut Unstructured) -> Result<StructType> {
1064        let len = u.int_in_range(0..=20)?;
1065        let mut fields = Vec::with_capacity(len);
1066        for _ in 0..len {
1067            fields.push(self.arbitrary_field_type(u)?);
1068        }
1069        Ok(StructType {
1070            fields: fields.into_boxed_slice(),
1071        })
1072    }
1073
1074    fn arbitrary_field_type(&mut self, u: &mut Unstructured) -> Result<FieldType> {
1075        Ok(FieldType {
1076            element_type: self.arbitrary_storage_type(u)?,
1077            mutable: u.arbitrary()?,
1078        })
1079    }
1080
1081    fn arbitrary_storage_type(&mut self, u: &mut Unstructured) -> Result<StorageType> {
1082        match u.int_in_range(0..=2)? {
1083            0 => Ok(StorageType::I8),
1084            1 => Ok(StorageType::I16),
1085            2 => Ok(StorageType::Val(self.arbitrary_valtype(u)?)),
1086            _ => unreachable!(),
1087        }
1088    }
1089
1090    fn arbitrary_ref_type(&self, u: &mut Unstructured) -> Result<RefType> {
1091        if !self.config.reference_types_enabled {
1092            return Ok(RefType::FUNCREF);
1093        }
1094        Ok(RefType {
1095            nullable: true,
1096            heap_type: self.arbitrary_heap_type(u)?,
1097        })
1098    }
1099
1100    fn arbitrary_heap_type(&self, u: &mut Unstructured) -> Result<HeapType> {
1101        assert!(self.config.reference_types_enabled);
1102
1103        let concrete_type_limit = match self.max_type_limit {
1104            MaxTypeLimit::Num(n) => n,
1105            MaxTypeLimit::ModuleTypes => u32::try_from(self.types.len()).unwrap(),
1106        };
1107
1108        if self.config.gc_enabled && concrete_type_limit > 0 && u.arbitrary()? {
1109            let idx = u.int_in_range(0..=concrete_type_limit - 1)?;
1110            // If the caller is demanding a shared heap type but the concrete
1111            // type we found is not in fact shared, we skip down below to use an
1112            // abstract heap type instead. If the caller is not demanding a
1113            // shared type, though, we can use either a shared or unshared
1114            // concrete type.
1115            if let Some(ty) = self.types.get(idx as usize) {
1116                // TODO: in the future, once we can easily query a list of
1117                // existing shared types, remove this extra check.
1118                if !(self.must_share && !ty.composite_type.shared) {
1119                    return Ok(HeapType::Concrete(idx));
1120                }
1121            }
1122        }
1123
1124        use AbstractHeapType::*;
1125        let mut choices = vec![Func, Extern];
1126        if self.config.exceptions_enabled {
1127            choices.push(Exn);
1128        }
1129        if self.config.gc_enabled {
1130            choices.extend(
1131                [Any, None, NoExtern, NoFunc, Eq, Struct, Array, I31]
1132                    .iter()
1133                    .copied(),
1134            );
1135        }
1136
1137        Ok(HeapType::Abstract {
1138            shared: self.arbitrary_shared(u)?,
1139            ty: *u.choose(&choices)?,
1140        })
1141    }
1142
1143    fn arbitrary_func_type(&mut self, u: &mut Unstructured) -> Result<Rc<FuncType>> {
1144        let mut params = vec![];
1145        let mut results = vec![];
1146        let max_params = 20;
1147        arbitrary_loop(u, 0, max_params, |u| {
1148            params.push(self.arbitrary_valtype(u)?);
1149            Ok(true)
1150        })?;
1151        let max_results = if self.config.multi_value_enabled {
1152            max_params
1153        } else {
1154            1
1155        };
1156        arbitrary_loop(u, 0, max_results, |u| {
1157            results.push(self.arbitrary_valtype(u)?);
1158            Ok(true)
1159        })?;
1160        Ok(Rc::new(FuncType { params, results }))
1161    }
1162
1163    fn can_add_local_or_import_tag(&self) -> bool {
1164        self.config.exceptions_enabled
1165            && self.has_tag_func_types()
1166            && self.tags.len() < self.config.max_tags
1167    }
1168
1169    fn can_add_local_or_import_func(&self) -> bool {
1170        !self.func_types.is_empty() && self.funcs.len() < self.config.max_funcs
1171    }
1172
1173    fn can_add_local_or_import_table(&self) -> bool {
1174        self.tables.len() < self.config.max_tables
1175    }
1176
1177    fn can_add_local_or_import_global(&self) -> bool {
1178        self.globals.len() < self.config.max_globals
1179    }
1180
1181    fn can_add_local_or_import_memory(&self) -> bool {
1182        self.memories.len() < self.config.max_memories
1183    }
1184
1185    fn arbitrary_imports(&mut self, u: &mut Unstructured) -> Result<()> {
1186        if self.config.max_type_size < self.type_size {
1187            return Ok(());
1188        }
1189
1190        let mut import_strings = HashSet::new();
1191        let mut choices: Vec<fn(&mut Unstructured, &mut Module) -> Result<EntityType>> =
1192            Vec::with_capacity(5);
1193        let min = self.config.min_imports.saturating_sub(self.num_imports);
1194        let max = self.config.max_imports.saturating_sub(self.num_imports);
1195        arbitrary_loop(u, min, max, |u| {
1196            choices.clear();
1197            if self.can_add_local_or_import_tag() {
1198                choices.push(|u, m| {
1199                    let ty = m.arbitrary_tag_type(u)?;
1200                    Ok(EntityType::Tag(ty))
1201                });
1202            }
1203            if self.can_add_local_or_import_func() {
1204                choices.push(|u, m| {
1205                    let idx = *u.choose(&m.func_types)?;
1206                    let ty = m.func_type(idx).clone();
1207                    Ok(EntityType::Func(idx, ty))
1208                });
1209            }
1210            if self.can_add_local_or_import_global() {
1211                choices.push(|u, m| {
1212                    let ty = m.arbitrary_global_type(u)?;
1213                    Ok(EntityType::Global(ty))
1214                });
1215            }
1216            if self.can_add_local_or_import_memory() {
1217                choices.push(|u, m| {
1218                    let ty = arbitrary_memtype(u, m.config())?;
1219                    Ok(EntityType::Memory(ty))
1220                });
1221            }
1222            if self.can_add_local_or_import_table() {
1223                choices.push(|u, m| {
1224                    let ty = arbitrary_table_type(u, m.config(), Some(m))?;
1225                    Ok(EntityType::Table(ty))
1226                });
1227            }
1228
1229            if choices.is_empty() {
1230                // We are out of choices. If we have not have reached the
1231                // minimum yet, then we have no way to satisfy the constraint,
1232                // but we follow max-constraints before the min-import
1233                // constraint.
1234                return Ok(false);
1235            }
1236
1237            // Generate a type to import, but only actually add the item if the
1238            // type size budget allows us to.
1239            let f = u.choose(&choices)?;
1240            let entity_type = f(u, self)?;
1241            let budget = self.config.max_type_size - self.type_size;
1242            if entity_type.size() + 1 > budget {
1243                return Ok(false);
1244            }
1245            self.type_size += entity_type.size() + 1;
1246
1247            // Generate an arbitrary module/name pair to name this import.
1248            let mut import_pair = unique_import_strings(1_000, u)?;
1249            if self.duplicate_imports_behavior == DuplicateImportsBehavior::Disallowed {
1250                while import_strings.contains(&import_pair) {
1251                    use std::fmt::Write;
1252                    write!(&mut import_pair.1, "{}", import_strings.len()).unwrap();
1253                }
1254                import_strings.insert(import_pair.clone());
1255            }
1256            let (module, field) = import_pair;
1257
1258            // Once our name is determined, then we push the typed item into the
1259            // appropriate namespace.
1260            match &entity_type {
1261                EntityType::Tag(ty) => self.tags.push(ty.clone()),
1262                EntityType::Func(idx, ty) => self.funcs.push((*idx, ty.clone())),
1263                EntityType::Global(ty) => self.globals.push(*ty),
1264                EntityType::Table(ty) => self.tables.push(*ty),
1265                EntityType::Memory(ty) => self.memories.push(*ty),
1266            }
1267
1268            self.num_imports += 1;
1269            self.imports.push(Import {
1270                module,
1271                field,
1272                entity_type,
1273            });
1274            Ok(true)
1275        })?;
1276
1277        Ok(())
1278    }
1279
1280    /// Generate some arbitrary imports from the list of available imports.
1281    ///
1282    /// Returns `true` if there was a list of available imports
1283    /// configured. Otherwise `false` and the caller should generate arbitrary
1284    /// imports.
1285    fn arbitrary_imports_from_available(&mut self, u: &mut Unstructured) -> Result<bool> {
1286        let example_module = if let Some(wasm) = self.config.available_imports.take() {
1287            wasm
1288        } else {
1289            return Ok(false);
1290        };
1291
1292        #[cfg(feature = "wasmparser")]
1293        {
1294            self._arbitrary_imports_from_available(u, &example_module)?;
1295            Ok(true)
1296        }
1297        #[cfg(not(feature = "wasmparser"))]
1298        {
1299            let _ = (example_module, u);
1300            panic!("support for `available_imports` was disabled at compile time");
1301        }
1302    }
1303
1304    #[cfg(feature = "wasmparser")]
1305    fn _arbitrary_imports_from_available(
1306        &mut self,
1307        u: &mut Unstructured,
1308        example_module: &[u8],
1309    ) -> Result<()> {
1310        // First, parse the module-by-example to collect the types and imports.
1311        //
1312        // `available_types` will map from a signature index (which is the same as the index into
1313        // this vector) as it appears in the parsed code, to the type itself. We copy all the types
1314        // from module-by-example into the module being constructed for the sake of simplicity
1315        // and for this reason, [`Self::config::max_types`] may be surpassed.
1316        let mut new_recgrps = Vec::<usize>::new();
1317        let mut available_types = Vec::<SubType>::new();
1318        let mut available_imports = Vec::<wasmparser::Import>::new();
1319        for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
1320            match payload.expect("could not parse the available import payload") {
1321                wasmparser::Payload::TypeSection(type_reader) => {
1322                    for recgrp in type_reader {
1323                        let recgrp = recgrp.expect("could not read recursive group");
1324                        new_recgrps.push(recgrp.types().len());
1325                        for subtype in recgrp.into_types() {
1326                            let mut subtype: SubType = subtype.try_into().unwrap();
1327                            if let Some(supertype_idx) = subtype.supertype {
1328                                assert!(supertype_idx < (available_types.len() as u32));
1329                                subtype.depth = available_types[supertype_idx as usize].depth + 1;
1330                            }
1331                            available_types.push(subtype);
1332                        }
1333                    }
1334                }
1335                wasmparser::Payload::ImportSection(import_reader) => {
1336                    for im in import_reader {
1337                        let im = im.expect("could not read import");
1338                        // We can immediately filter whether this is an import we want to
1339                        // use.
1340                        let use_import = u.arbitrary().unwrap_or(false);
1341                        if !use_import {
1342                            continue;
1343                        }
1344                        available_imports.push(im);
1345                    }
1346                }
1347                _ => {}
1348            }
1349        }
1350
1351        // We then generate import entries which refer to the imported types. Since this function
1352        // is called at the very beginning of the module generation process and all types from the
1353        // module-by-example are copied into the current module, no further adjustments are needed
1354        // for type indices.
1355        let mut new_imports = Vec::with_capacity(available_imports.len());
1356        for import in available_imports {
1357            let type_size_budget = self.config.max_type_size - self.type_size;
1358            let entity_type = match &import.ty {
1359                wasmparser::TypeRef::Func(sig_idx) => {
1360                    if self.funcs.len() >= self.config.max_funcs {
1361                        continue;
1362                    } else {
1363                        match available_types.get(*sig_idx as usize) {
1364                            None => panic!("signature index refers to a type out of bounds"),
1365                            Some(ty) => match &ty.composite_type.inner {
1366                                CompositeInnerType::Func(func_type) => {
1367                                    let entity = EntityType::Func(*sig_idx, Rc::clone(func_type));
1368                                    if type_size_budget < entity.size() {
1369                                        continue;
1370                                    }
1371                                    self.funcs.push((*sig_idx, Rc::clone(func_type)));
1372                                    entity
1373                                }
1374                                _ => panic!("a function type is required for function import"),
1375                            },
1376                        }
1377                    }
1378                }
1379
1380                wasmparser::TypeRef::Tag(wasmparser::TagType { func_type_idx, .. }) => {
1381                    let can_add_tag = self.tags.len() < self.config.max_tags;
1382                    if !self.config.exceptions_enabled || !can_add_tag {
1383                        continue;
1384                    } else {
1385                        match available_types.get(*func_type_idx as usize) {
1386                            None => {
1387                                panic!("function type index for tag refers to a type out of bounds")
1388                            }
1389                            Some(ty) => match &ty.composite_type.inner {
1390                                CompositeInnerType::Func(func_type) => {
1391                                    let tag_type = TagType {
1392                                        func_type_idx: *func_type_idx,
1393                                        func_type: Rc::clone(func_type),
1394                                    };
1395                                    let entity = EntityType::Tag(tag_type.clone());
1396                                    if type_size_budget < entity.size() {
1397                                        continue;
1398                                    }
1399                                    self.tags.push(tag_type);
1400                                    entity
1401                                }
1402                                _ => panic!("a function type is required for tag import"),
1403                            },
1404                        }
1405                    }
1406                }
1407
1408                wasmparser::TypeRef::Table(table_ty) => {
1409                    let table_ty = TableType::try_from(*table_ty).unwrap();
1410                    let entity = EntityType::Table(table_ty);
1411                    let type_size = entity.size();
1412                    if type_size_budget < type_size || !self.can_add_local_or_import_table() {
1413                        continue;
1414                    }
1415                    self.type_size += type_size;
1416                    self.tables.push(table_ty);
1417                    entity
1418                }
1419
1420                wasmparser::TypeRef::Memory(memory_ty) => {
1421                    let memory_ty = MemoryType::from(*memory_ty);
1422                    let entity = EntityType::Memory(memory_ty);
1423                    let type_size = entity.size();
1424                    if type_size_budget < type_size || !self.can_add_local_or_import_memory() {
1425                        continue;
1426                    }
1427                    self.type_size += type_size;
1428                    self.memories.push(memory_ty);
1429                    entity
1430                }
1431
1432                wasmparser::TypeRef::Global(global_ty) => {
1433                    let global_ty = GlobalType::try_from(*global_ty).unwrap();
1434                    let entity = EntityType::Global(global_ty);
1435                    let type_size = entity.size();
1436                    if type_size_budget < type_size || !self.can_add_local_or_import_global() {
1437                        continue;
1438                    }
1439                    self.type_size += type_size;
1440                    self.globals.push(global_ty);
1441                    entity
1442                }
1443            };
1444            new_imports.push(Import {
1445                module: import.module.to_string(),
1446                field: import.name.to_string(),
1447                entity_type,
1448            });
1449            self.num_imports += 1;
1450        }
1451
1452        // Finally, add the entities we just generated.
1453        let mut recgrp_start_idx = self.types.len();
1454        for size in new_recgrps {
1455            self.rec_groups
1456                .push(recgrp_start_idx..recgrp_start_idx + size);
1457            recgrp_start_idx += size;
1458        }
1459        for ty in available_types {
1460            self.add_type(ty);
1461        }
1462        self.imports.extend(new_imports);
1463
1464        Ok(())
1465    }
1466
1467    fn type_of(&self, kind: ExportKind, index: u32) -> EntityType {
1468        match kind {
1469            ExportKind::Global => EntityType::Global(self.globals[index as usize]),
1470            ExportKind::Memory => EntityType::Memory(self.memories[index as usize]),
1471            ExportKind::Table => EntityType::Table(self.tables[index as usize]),
1472            ExportKind::Func => {
1473                let (_idx, ty) = &self.funcs[index as usize];
1474                EntityType::Func(u32::max_value(), ty.clone())
1475            }
1476            ExportKind::Tag => EntityType::Tag(self.tags[index as usize].clone()),
1477        }
1478    }
1479
1480    fn ty(&self, idx: u32) -> &SubType {
1481        &self.types[idx as usize]
1482    }
1483
1484    fn func_types(&self) -> impl Iterator<Item = (u32, &FuncType)> + '_ {
1485        self.func_types
1486            .iter()
1487            .copied()
1488            .map(move |type_i| (type_i, &**self.func_type(type_i)))
1489    }
1490
1491    fn func_type(&self, idx: u32) -> &Rc<FuncType> {
1492        match &self.ty(idx).composite_type.inner {
1493            CompositeInnerType::Func(f) => f,
1494            _ => panic!("types[{idx}] is not a func type"),
1495        }
1496    }
1497
1498    fn tags(&self) -> impl Iterator<Item = (u32, &TagType)> + '_ {
1499        self.tags
1500            .iter()
1501            .enumerate()
1502            .map(move |(i, ty)| (i as u32, ty))
1503    }
1504
1505    fn funcs(&self) -> impl Iterator<Item = (u32, &Rc<FuncType>)> + '_ {
1506        self.funcs
1507            .iter()
1508            .enumerate()
1509            .map(move |(i, (_, ty))| (i as u32, ty))
1510    }
1511
1512    fn has_tag_func_types(&self) -> bool {
1513        self.tag_func_types().next().is_some()
1514    }
1515
1516    fn tag_func_types(&self) -> impl Iterator<Item = u32> + '_ {
1517        self.func_types
1518            .iter()
1519            .copied()
1520            .filter(move |i| self.func_type(*i).results.is_empty())
1521    }
1522
1523    fn arbitrary_valtype(&self, u: &mut Unstructured) -> Result<ValType> {
1524        #[derive(PartialEq, Eq, PartialOrd, Ord)]
1525        enum ValTypeClass {
1526            I32,
1527            I64,
1528            F32,
1529            F64,
1530            V128,
1531            Ref,
1532        }
1533
1534        let mut val_classes: Vec<_> = self
1535            .valtypes
1536            .iter()
1537            .map(|vt| match vt {
1538                ValType::I32 => ValTypeClass::I32,
1539                ValType::I64 => ValTypeClass::I64,
1540                ValType::F32 => ValTypeClass::F32,
1541                ValType::F64 => ValTypeClass::F64,
1542                ValType::V128 => ValTypeClass::V128,
1543                ValType::Ref(_) => ValTypeClass::Ref,
1544            })
1545            .collect();
1546        val_classes.sort_unstable();
1547        val_classes.dedup();
1548
1549        match u.choose(&val_classes)? {
1550            ValTypeClass::I32 => Ok(ValType::I32),
1551            ValTypeClass::I64 => Ok(ValType::I64),
1552            ValTypeClass::F32 => Ok(ValType::F32),
1553            ValTypeClass::F64 => Ok(ValType::F64),
1554            ValTypeClass::V128 => Ok(ValType::V128),
1555            ValTypeClass::Ref => Ok(ValType::Ref(self.arbitrary_ref_type(u)?)),
1556        }
1557    }
1558
1559    fn arbitrary_global_type(&self, u: &mut Unstructured) -> Result<GlobalType> {
1560        let val_type = self.arbitrary_valtype(u)?;
1561        // Propagate the inner type's sharedness to the global type.
1562        let shared = match val_type {
1563            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {
1564                self.arbitrary_shared(u)?
1565            }
1566            ValType::Ref(r) => self.is_shared_ref_type(r),
1567        };
1568        Ok(GlobalType {
1569            val_type,
1570            mutable: u.arbitrary()?,
1571            shared,
1572        })
1573    }
1574
1575    fn arbitrary_tag_type(&self, u: &mut Unstructured) -> Result<TagType> {
1576        let candidate_func_types: Vec<_> = self.tag_func_types().collect();
1577        arbitrary_tag_type(u, &candidate_func_types, |ty_idx| {
1578            self.func_type(ty_idx).clone()
1579        })
1580    }
1581
1582    fn arbitrary_tags(&mut self, u: &mut Unstructured) -> Result<()> {
1583        if !self.config.exceptions_enabled || !self.has_tag_func_types() {
1584            return Ok(());
1585        }
1586
1587        arbitrary_loop(u, self.config.min_tags, self.config.max_tags, |u| {
1588            if !self.can_add_local_or_import_tag() {
1589                return Ok(false);
1590            }
1591            self.tags.push(self.arbitrary_tag_type(u)?);
1592            self.num_defined_tags += 1;
1593            Ok(true)
1594        })
1595    }
1596
1597    fn arbitrary_funcs(&mut self, u: &mut Unstructured) -> Result<()> {
1598        if self.func_types.is_empty() {
1599            return Ok(());
1600        }
1601
1602        // For now, only define non-shared functions. Until we can update
1603        // instruction generation to understand the additional sharedness
1604        // validation, we don't want to generate instructions that touch
1605        // unshared objects from a shared context (TODO: handle shared).
1606        let unshared_func_types: Vec<_> = self
1607            .func_types
1608            .iter()
1609            .copied()
1610            .filter(|&i| !self.is_shared_type(i))
1611            .collect();
1612        if unshared_func_types.is_empty() {
1613            return Ok(());
1614        }
1615
1616        arbitrary_loop(u, self.config.min_funcs, self.config.max_funcs, |u| {
1617            if !self.can_add_local_or_import_func() {
1618                return Ok(false);
1619            }
1620            let max = unshared_func_types.len() - 1;
1621            let ty = unshared_func_types[u.int_in_range(0..=max)?];
1622            self.funcs.push((ty, self.func_type(ty).clone()));
1623            self.num_defined_funcs += 1;
1624            Ok(true)
1625        })
1626    }
1627
1628    fn arbitrary_tables(&mut self, u: &mut Unstructured) -> Result<()> {
1629        arbitrary_loop(
1630            u,
1631            self.config.min_tables as usize,
1632            self.config.max_tables,
1633            |u| {
1634                if !self.can_add_local_or_import_table() {
1635                    return Ok(false);
1636                }
1637                let ty = arbitrary_table_type(u, self.config(), Some(self))?;
1638                self.add_arbitrary_table_of_type(ty, u)?;
1639                Ok(true)
1640            },
1641        )
1642    }
1643
1644    /// Generates an arbitrary table initialization expression for a table whose
1645    /// element type is `ty`.
1646    ///
1647    /// Table initialization expressions were added by the GC proposal to
1648    /// initialize non-nullable tables.
1649    fn arbitrary_table_init(
1650        &mut self,
1651        u: &mut Unstructured,
1652        ty: RefType,
1653    ) -> Result<Option<ConstExpr>> {
1654        if !self.config.gc_enabled {
1655            assert!(ty.nullable);
1656            return Ok(None);
1657        }
1658        // Even with the GC proposal an initialization expression is not
1659        // required if the element type is nullable.
1660        if ty.nullable && u.arbitrary()? {
1661            return Ok(None);
1662        }
1663        // Only imported globals are allowed in the constant initialization
1664        // expressions for tables.
1665        let expr = self.arbitrary_const_expr(ValType::Ref(ty), u, false)?;
1666        Ok(Some(expr))
1667    }
1668
1669    fn arbitrary_memories(&mut self, u: &mut Unstructured) -> Result<()> {
1670        arbitrary_loop(
1671            u,
1672            self.config.min_memories as usize,
1673            self.config.max_memories,
1674            |u| {
1675                if !self.can_add_local_or_import_memory() {
1676                    return Ok(false);
1677                }
1678                let ty = arbitrary_memtype(u, self.config())?;
1679                self.add_arbitrary_memory_of_type(ty)?;
1680                Ok(true)
1681            },
1682        )
1683    }
1684
1685    /// Add a new global of the given type and return its global index.
1686    fn add_arbitrary_global_of_type(
1687        &mut self,
1688        ty: GlobalType,
1689        u: &mut Unstructured,
1690    ) -> Result<u32> {
1691        let expr = self.arbitrary_const_expr(ty.val_type, u, true)?;
1692        let global_idx = self.globals.len() as u32;
1693        self.globals.push(ty);
1694        self.defined_globals.push((global_idx, expr));
1695        Ok(global_idx)
1696    }
1697
1698    /// Add a new memory of the given type and return its memory index.
1699    fn add_arbitrary_memory_of_type(&mut self, ty: MemoryType) -> Result<u32> {
1700        let memory_idx = self.memories.len() as u32;
1701        self.num_defined_memories += 1;
1702        self.memories.push(ty);
1703        Ok(memory_idx)
1704    }
1705
1706    /// Add a new table of the given type and return its table index.
1707    fn add_arbitrary_table_of_type(&mut self, ty: TableType, u: &mut Unstructured) -> Result<u32> {
1708        let expr = self.arbitrary_table_init(u, ty.element_type)?;
1709        let table_idx = self.tables.len() as u32;
1710        self.tables.push(ty);
1711        self.defined_tables.push(expr);
1712        Ok(table_idx)
1713    }
1714
1715    /// Generates an arbitrary constant expression of the type `ty`.
1716    fn arbitrary_const_expr(
1717        &mut self,
1718        ty: ValType,
1719        u: &mut Unstructured,
1720        allow_defined_globals: bool,
1721    ) -> Result<ConstExpr> {
1722        let mut choices = mem::take(&mut self.const_expr_choices);
1723        choices.clear();
1724
1725        // MVP wasm can `global.get` any immutable imported global in a
1726        // constant expression, and the GC proposal enables this for all
1727        // globals, so make all matching globals a candidate.
1728        for i in self.globals_for_const_expr(ty, allow_defined_globals) {
1729            choices.push(Box::new(move |_, _| Ok(ConstExpr::global_get(i))));
1730        }
1731
1732        // Another option for all types is to have an actual value of each type.
1733        // Change `ty` to any valid subtype of `ty` and then generate a matching
1734        // type of that value.
1735        let ty = self.arbitrary_matching_val_type(u, ty)?;
1736        match ty {
1737            ValType::I32 => {
1738                choices.push(Box::new(|u, _| Ok(ConstExpr::i32_const(u.arbitrary()?))));
1739                if self.config.extended_const_enabled {
1740                    choices.push(Box::new(arbitrary_extended_const));
1741                }
1742            }
1743            ValType::I64 => {
1744                choices.push(Box::new(|u, _| Ok(ConstExpr::i64_const(u.arbitrary()?))));
1745                if self.config.extended_const_enabled {
1746                    choices.push(Box::new(arbitrary_extended_const));
1747                }
1748            }
1749            ValType::F32 => choices.push(Box::new(|u, _| {
1750                Ok(ConstExpr::f32_const(u.arbitrary::<f32>()?.into()))
1751            })),
1752            ValType::F64 => choices.push(Box::new(|u, _| {
1753                Ok(ConstExpr::f64_const(u.arbitrary::<f64>()?.into()))
1754            })),
1755            ValType::V128 => {
1756                choices.push(Box::new(|u, _| Ok(ConstExpr::v128_const(u.arbitrary()?))))
1757            }
1758
1759            ValType::Ref(ty) => {
1760                if ty.nullable {
1761                    choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_null(ty.heap_type))));
1762                }
1763
1764                match ty.heap_type {
1765                    HeapType::Abstract {
1766                        ty: AbstractHeapType::Func,
1767                        shared,
1768                    } => {
1769                        let num_funcs = self
1770                            .funcs
1771                            .iter()
1772                            .filter(|(t, _)| shared == self.is_shared_type(*t))
1773                            .count();
1774                        if num_funcs > 0 {
1775                            let pick = u.int_in_range(0..=num_funcs - 1)?;
1776                            let (i, _) = self
1777                                .funcs
1778                                .iter()
1779                                .map(|(t, _)| *t)
1780                                .enumerate()
1781                                .filter(|(_, t)| shared == self.is_shared_type(*t))
1782                                .nth(pick)
1783                                .unwrap();
1784                            choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_func(i as u32))));
1785                        }
1786                    }
1787
1788                    HeapType::Concrete(ty) => {
1789                        for (i, fty) in self.funcs.iter().map(|(t, _)| *t).enumerate() {
1790                            if ty != fty {
1791                                continue;
1792                            }
1793                            choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_func(i as u32))));
1794                        }
1795                    }
1796
1797                    // TODO: fill out more GC types e.g `array.new` and
1798                    // `struct.new`
1799                    _ => {}
1800                }
1801            }
1802        }
1803
1804        let f = u.choose(&choices)?;
1805        let ret = f(u, ty);
1806        self.const_expr_choices = choices;
1807        return ret;
1808
1809        /// Implementation of generation of expressions from the
1810        /// `extended-const` proposal to WebAssembly. This proposal enabled
1811        /// using `i{32,64}.{add,sub,mul}` in constant expressions in addition
1812        /// to the previous `i{32,64}.const` instructions. Note that at this
1813        /// time this doesn't use the full expression generator in
1814        /// `code_builder.rs` but instead inlines just what's necessary for
1815        /// constant expressions here.
1816        fn arbitrary_extended_const(u: &mut Unstructured<'_>, ty: ValType) -> Result<ConstExpr> {
1817            use wasm_encoder::Instruction::*;
1818
1819            // This only works for i32/i64, would need refactoring for different
1820            // types.
1821            assert!(ty == ValType::I32 || ty == ValType::I64);
1822            let add = if ty == ValType::I32 { I32Add } else { I64Add };
1823            let sub = if ty == ValType::I32 { I32Sub } else { I64Sub };
1824            let mul = if ty == ValType::I32 { I32Mul } else { I64Mul };
1825            let const_: fn(&mut Unstructured<'_>) -> Result<wasm_encoder::Instruction<'static>> =
1826                if ty == ValType::I32 {
1827                    |u| u.arbitrary().map(I32Const)
1828                } else {
1829                    |u| u.arbitrary().map(I64Const)
1830                };
1831
1832            // Here `instrs` is the list of instructions, in reverse order, that
1833            // are going to be emitted. The `needed` value keeps track of how
1834            // many values are needed to complete this expression. New
1835            // instructions must be generated while some more items are needed.
1836            let mut instrs = Vec::new();
1837            let mut needed = 1;
1838            while needed > 0 {
1839                // If fuzz data has been exhausted or if this is a "large
1840                // enough" constant expression then force generation of
1841                // constants to finish out the expression.
1842                let choice = if u.is_empty() || instrs.len() > 10 {
1843                    0
1844                } else {
1845                    u.int_in_range(0..=3)?
1846                };
1847                match choice {
1848                    0 => {
1849                        instrs.push(const_(u)?);
1850                        needed -= 1;
1851                    }
1852                    1 => {
1853                        instrs.push(add.clone());
1854                        needed += 1;
1855                    }
1856                    2 => {
1857                        instrs.push(sub.clone());
1858                        needed += 1;
1859                    }
1860                    3 => {
1861                        instrs.push(mul.clone());
1862                        needed += 1;
1863                    }
1864                    _ => unreachable!(),
1865                }
1866            }
1867            Ok(ConstExpr::extended(instrs.into_iter().rev()))
1868        }
1869    }
1870
1871    fn arbitrary_globals(&mut self, u: &mut Unstructured) -> Result<()> {
1872        arbitrary_loop(u, self.config.min_globals, self.config.max_globals, |u| {
1873            if !self.can_add_local_or_import_global() {
1874                return Ok(false);
1875            }
1876
1877            let ty = self.arbitrary_global_type(u)?;
1878            self.add_arbitrary_global_of_type(ty, u)?;
1879
1880            Ok(true)
1881        })
1882    }
1883
1884    fn required_exports(&mut self, u: &mut Unstructured) -> Result<bool> {
1885        let example_module = if let Some(wasm) = self.config.exports.clone() {
1886            wasm
1887        } else {
1888            return Ok(false);
1889        };
1890
1891        #[cfg(feature = "wasmparser")]
1892        {
1893            self._required_exports(u, &example_module)?;
1894            Ok(true)
1895        }
1896        #[cfg(not(feature = "wasmparser"))]
1897        {
1898            let _ = (example_module, u);
1899            panic!("support for `exports` was disabled at compile time");
1900        }
1901    }
1902
1903    #[cfg(feature = "wasmparser")]
1904    fn _required_exports(&mut self, u: &mut Unstructured, example_module: &[u8]) -> Result<()> {
1905        let mut required_exports: Vec<wasmparser::Export> = vec![];
1906        let mut validator = wasmparser::Validator::new();
1907        let exports_types = validator
1908            .validate_all(&example_module)
1909            .expect("Failed to validate `exports` Wasm");
1910        for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
1911            match payload.expect("Failed to read `exports` Wasm") {
1912                wasmparser::Payload::ExportSection(export_reader) => {
1913                    required_exports = export_reader
1914                        .into_iter()
1915                        .collect::<Result<_, _>>()
1916                        .expect("Failed to read `exports` export section");
1917                }
1918                _ => {}
1919            }
1920        }
1921
1922        // For each export, add necessary prerequisites to the module.
1923        let exports_types = exports_types.as_ref();
1924        let check_and_get_func_type =
1925            |id: wasmparser::types::CoreTypeId| -> (Rc<FuncType>, SubType) {
1926                let subtype = exports_types.get(id).unwrap_or_else(|| {
1927                    panic!("Unable to get subtype for {id:?} in `exports` Wasm")
1928                });
1929                match &subtype.composite_type.inner {
1930                    wasmparser::CompositeInnerType::Func(func_type) => {
1931                        assert!(
1932                            subtype.is_final,
1933                            "Subtype {subtype:?} from `exports` Wasm is not final"
1934                        );
1935                        assert!(
1936                            subtype.supertype_idx.is_none(),
1937                            "Subtype {subtype:?} from `exports` Wasm has non-empty supertype"
1938                        );
1939                        let func_type = Rc::new(FuncType {
1940                            params: func_type
1941                                .params()
1942                                .iter()
1943                                .copied()
1944                                .map(|t| t.try_into().unwrap())
1945                                .collect(),
1946                            results: func_type
1947                                .results()
1948                                .iter()
1949                                .copied()
1950                                .map(|t| t.try_into().unwrap())
1951                                .collect(),
1952                        });
1953                        let subtype = SubType {
1954                            is_final: true,
1955                            supertype: None,
1956                            depth: 1,
1957                            composite_type: CompositeType::new_func(
1958                                Rc::clone(&func_type),
1959                                subtype.composite_type.shared,
1960                            ),
1961                        };
1962                        (func_type, subtype)
1963                    }
1964                    _ => panic!(
1965                        "Unable to handle type {:?} from `exports` Wasm",
1966                        subtype.composite_type
1967                    ),
1968                }
1969            };
1970        for export in required_exports {
1971            let new_index = match exports_types
1972                .entity_type_from_export(&export)
1973                .unwrap_or_else(|| {
1974                    panic!("Unable to get type from export {export:?} in `exports` Wasm",)
1975                }) {
1976                // For functions, add the type and a function with that type.
1977                wasmparser::types::EntityType::Func(id) => {
1978                    let (func_type, subtype) = check_and_get_func_type(id);
1979                    self.rec_groups.push(self.types.len()..self.types.len() + 1);
1980                    let type_index = self.add_type(subtype);
1981                    let func_index = self.funcs.len() as u32;
1982                    self.funcs.push((type_index, func_type));
1983                    self.num_defined_funcs += 1;
1984                    func_index
1985                }
1986                // For globals, add a new global.
1987                wasmparser::types::EntityType::Global(global_type) => {
1988                    self.add_arbitrary_global_of_type(global_type.try_into().unwrap(), u)?
1989                }
1990                // For memories, add a new memory.
1991                wasmparser::types::EntityType::Memory(memory_type) => {
1992                    self.add_arbitrary_memory_of_type(memory_type.into())?
1993                }
1994                // For tables, add a new table.
1995                wasmparser::types::EntityType::Table(table_type) => {
1996                    self.add_arbitrary_table_of_type(table_type.try_into().unwrap(), u)?
1997                }
1998                // For tags, add the type.
1999                wasmparser::types::EntityType::Tag(id) => {
2000                    let (func_type, subtype) = check_and_get_func_type(id);
2001                    self.rec_groups.push(self.types.len()..self.types.len() + 1);
2002                    let type_index = self.add_type(subtype);
2003                    let tag_index = self.tags.len() as u32;
2004                    self.tags.push(TagType {
2005                        func_type_idx: type_index,
2006                        func_type: func_type,
2007                    });
2008                    self.num_defined_tags += 1;
2009                    tag_index
2010                }
2011            };
2012            self.exports
2013                .push((export.name.to_string(), export.kind.into(), new_index));
2014            self.export_names.insert(export.name.to_string());
2015        }
2016
2017        Ok(())
2018    }
2019
2020    fn arbitrary_exports(&mut self, u: &mut Unstructured) -> Result<()> {
2021        if self.config.max_type_size < self.type_size && !self.config.export_everything {
2022            return Ok(());
2023        }
2024
2025        // Build up a list of candidates for each class of import
2026        let mut choices: Vec<Vec<(ExportKind, u32)>> = Vec::with_capacity(6);
2027        choices.push(
2028            (0..self.funcs.len())
2029                .map(|i| (ExportKind::Func, i as u32))
2030                .collect(),
2031        );
2032        choices.push(
2033            (0..self.tables.len())
2034                .map(|i| (ExportKind::Table, i as u32))
2035                .collect(),
2036        );
2037        choices.push(
2038            (0..self.memories.len())
2039                .map(|i| (ExportKind::Memory, i as u32))
2040                .collect(),
2041        );
2042        choices.push(
2043            (0..self.globals.len())
2044                .map(|i| (ExportKind::Global, i as u32))
2045                .collect(),
2046        );
2047
2048        // If the configuration demands exporting everything, we do so here and
2049        // early-return.
2050        if self.config.export_everything {
2051            for choices_by_kind in choices {
2052                for (kind, idx) in choices_by_kind {
2053                    let name = unique_string(1_000, &mut self.export_names, u)?;
2054                    self.add_arbitrary_export(name, kind, idx)?;
2055                }
2056            }
2057            return Ok(());
2058        }
2059
2060        arbitrary_loop(u, self.config.min_exports, self.config.max_exports, |u| {
2061            // Remove all candidates for export whose type size exceeds our
2062            // remaining budget for type size. Then also remove any classes
2063            // of exports which no longer have any candidates.
2064            //
2065            // If there's nothing remaining after this, then we're done.
2066            let max_size = self.config.max_type_size - self.type_size;
2067            for list in choices.iter_mut() {
2068                list.retain(|(kind, idx)| self.type_of(*kind, *idx).size() + 1 < max_size);
2069            }
2070            choices.retain(|list| !list.is_empty());
2071            if choices.is_empty() {
2072                return Ok(false);
2073            }
2074
2075            // Pick a name, then pick the export, and then we can record
2076            // information about the chosen export.
2077            let name = unique_string(1_000, &mut self.export_names, u)?;
2078            let list = u.choose(&choices)?;
2079            let (kind, idx) = *u.choose(list)?;
2080            self.add_arbitrary_export(name, kind, idx)?;
2081            Ok(true)
2082        })
2083    }
2084
2085    fn add_arbitrary_export(&mut self, name: String, kind: ExportKind, idx: u32) -> Result<()> {
2086        let ty = self.type_of(kind, idx);
2087        self.type_size += 1 + ty.size();
2088        if self.type_size <= self.config.max_type_size {
2089            self.exports.push((name, kind, idx));
2090            Ok(())
2091        } else {
2092            // If our addition of exports takes us above the allowed number of
2093            // types, we fail; this error code is not the most illustrative of
2094            // the cause but is the best available from `arbitrary`.
2095            Err(arbitrary::Error::IncorrectFormat)
2096        }
2097    }
2098
2099    fn arbitrary_start(&mut self, u: &mut Unstructured) -> Result<()> {
2100        if !self.config.allow_start_export {
2101            return Ok(());
2102        }
2103
2104        let mut choices = Vec::with_capacity(self.funcs.len());
2105
2106        for (func_idx, ty) in self.funcs() {
2107            if ty.params.is_empty() && ty.results.is_empty() {
2108                choices.push(func_idx);
2109            }
2110        }
2111
2112        if !choices.is_empty() && u.arbitrary().unwrap_or(false) {
2113            let f = *u.choose(&choices)?;
2114            self.start = Some(f);
2115        }
2116
2117        Ok(())
2118    }
2119
2120    fn arbitrary_elems(&mut self, u: &mut Unstructured) -> Result<()> {
2121        // Create a helper closure to choose an arbitrary offset.
2122        let mut global_i32 = vec![];
2123        let mut global_i64 = vec![];
2124        if !self.config.disallow_traps {
2125            for i in self.globals_for_const_expr(ValType::I32, true) {
2126                global_i32.push(i);
2127            }
2128            for i in self.globals_for_const_expr(ValType::I64, true) {
2129                global_i64.push(i);
2130            }
2131        }
2132        let disallow_traps = self.config.disallow_traps;
2133        let arbitrary_active_elem =
2134            |u: &mut Unstructured, min_mem_size: u64, table: Option<u32>, table_ty: &TableType| {
2135                let global_choices = if table_ty.table64 {
2136                    &global_i64
2137                } else {
2138                    &global_i32
2139                };
2140                let (offset, max_size_hint) = if !global_choices.is_empty() && u.arbitrary()? {
2141                    let g = u.choose(&global_choices)?;
2142                    (Offset::Global(*g), None)
2143                } else {
2144                    let max_mem_size = if disallow_traps {
2145                        table_ty.minimum
2146                    } else if table_ty.table64 {
2147                        u64::MAX
2148                    } else {
2149                        u64::from(u32::MAX)
2150                    };
2151                    let offset = arbitrary_offset(u, min_mem_size, max_mem_size, 0)?;
2152                    let max_size_hint = if disallow_traps
2153                        || (offset <= min_mem_size
2154                            && u.int_in_range(0..=CHANCE_OFFSET_INBOUNDS)? != 0)
2155                    {
2156                        Some(min_mem_size - offset)
2157                    } else {
2158                        None
2159                    };
2160
2161                    let offset = if table_ty.table64 {
2162                        Offset::Const64(offset as i64)
2163                    } else {
2164                        Offset::Const32(offset as i32)
2165                    };
2166                    (offset, max_size_hint)
2167                };
2168                Ok((ElementKind::Active { table, offset }, max_size_hint))
2169            };
2170
2171        // Generate a list of candidates for "kinds" of elements segments. For
2172        // example we can have an active segment for any existing table or
2173        // passive/declared segments if the right wasm features are enabled.
2174        type GenElemSegment<'a> =
2175            dyn Fn(&mut Unstructured) -> Result<(ElementKind, Option<u64>)> + 'a;
2176        let mut choices: Vec<Box<GenElemSegment>> = Vec::new();
2177
2178        // Bulk memory enables passive/declared segments, and note that the
2179        // types used are selected later.
2180        if self.config.bulk_memory_enabled {
2181            choices.push(Box::new(|_| Ok((ElementKind::Passive, None))));
2182            choices.push(Box::new(|_| Ok((ElementKind::Declared, None))));
2183        }
2184
2185        for (i, ty) in self.tables.iter().enumerate() {
2186            // If this table starts with no capacity then any non-empty element
2187            // segment placed onto it will immediately trap, which isn't too
2188            // too interesting. If that's the case give it an unlikely chance
2189            // of proceeding.
2190            if ty.minimum == 0 && u.int_in_range(0..=CHANCE_SEGMENT_ON_EMPTY)? != 0 {
2191                continue;
2192            }
2193
2194            let minimum = ty.minimum;
2195            // If the first table is a funcref table then it's a candidate for
2196            // the MVP encoding of element segments.
2197            let ty = *ty;
2198            if i == 0 && ty.element_type == RefType::FUNCREF {
2199                choices.push(Box::new(move |u| {
2200                    arbitrary_active_elem(u, minimum, None, &ty)
2201                }));
2202            }
2203            if self.config.bulk_memory_enabled {
2204                let idx = Some(i as u32);
2205                choices.push(Box::new(move |u| {
2206                    arbitrary_active_elem(u, minimum, idx, &ty)
2207                }));
2208            }
2209        }
2210
2211        if choices.is_empty() {
2212            return Ok(());
2213        }
2214
2215        arbitrary_loop(
2216            u,
2217            self.config.min_element_segments,
2218            self.config.max_element_segments,
2219            |u| {
2220                // Pick a kind of element segment to generate which will also
2221                // give us a hint of the maximum size, if any.
2222                let (kind, max_size_hint) = u.choose(&choices)?(u)?;
2223                let max = max_size_hint
2224                    .map(|i| usize::try_from(i).unwrap())
2225                    .unwrap_or_else(|| self.config.max_elements);
2226
2227                // Infer, from the kind of segment, the type of the element
2228                // segment. Passive/declared segments can be declared with any
2229                // reference type, but active segments must match their table.
2230                let ty = match kind {
2231                    ElementKind::Passive | ElementKind::Declared => self.arbitrary_ref_type(u)?,
2232                    ElementKind::Active { table, .. } => {
2233                        let idx = table.unwrap_or(0);
2234                        self.arbitrary_matching_ref_type(u, self.tables[idx as usize].element_type)?
2235                    }
2236                };
2237
2238                // The `Elements::Functions` encoding is only possible when the
2239                // element type is a `funcref` because the binary format can't
2240                // allow encoding any other type in that form.
2241                let can_use_function_list = ty == RefType::FUNCREF;
2242                if !self.config.reference_types_enabled {
2243                    assert!(can_use_function_list);
2244                }
2245
2246                // If a function list is possible then build up a list of
2247                // functions that can be selected from.
2248                let mut func_candidates = Vec::new();
2249                if can_use_function_list {
2250                    match ty.heap_type {
2251                        HeapType::Abstract {
2252                            ty: AbstractHeapType::Func,
2253                            ..
2254                        } => {
2255                            func_candidates.extend(0..self.funcs.len() as u32);
2256                        }
2257                        HeapType::Concrete(ty) => {
2258                            for (i, (fty, _)) in self.funcs.iter().enumerate() {
2259                                if *fty == ty {
2260                                    func_candidates.push(i as u32);
2261                                }
2262                            }
2263                        }
2264                        _ => {}
2265                    }
2266                }
2267
2268                // And finally actually generate the arbitrary elements of this
2269                // element segment. Function indices are used if they're either
2270                // forced or allowed, and otherwise expressions are used
2271                // instead.
2272                let items = if !self.config.reference_types_enabled
2273                    || (can_use_function_list && u.arbitrary()?)
2274                {
2275                    let mut init = vec![];
2276                    if func_candidates.len() > 0 {
2277                        arbitrary_loop(u, self.config.min_elements, max, |u| {
2278                            let func_idx = *u.choose(&func_candidates)?;
2279                            init.push(func_idx);
2280                            Ok(true)
2281                        })?;
2282                    }
2283                    Elements::Functions(init)
2284                } else {
2285                    let mut init = vec![];
2286                    arbitrary_loop(u, self.config.min_elements, max, |u| {
2287                        init.push(self.arbitrary_const_expr(ValType::Ref(ty), u, true)?);
2288                        Ok(true)
2289                    })?;
2290                    Elements::Expressions(init)
2291                };
2292
2293                self.elems.push(ElementSegment { kind, ty, items });
2294                Ok(true)
2295            },
2296        )
2297    }
2298
2299    fn arbitrary_code(&mut self, u: &mut Unstructured) -> Result<()> {
2300        self.compute_interesting_values();
2301
2302        self.code.reserve(self.num_defined_funcs);
2303        let mut allocs = CodeBuilderAllocations::new(self, self.config.exports.is_some());
2304        for (idx, ty) in self.funcs[self.funcs.len() - self.num_defined_funcs..].iter() {
2305            let shared = self.is_shared_type(*idx);
2306            let body = self.arbitrary_func_body(u, ty, &mut allocs, shared)?;
2307            self.code.push(body);
2308        }
2309        allocs.finish(u, self)?;
2310        Ok(())
2311    }
2312
2313    fn arbitrary_func_body(
2314        &self,
2315        u: &mut Unstructured,
2316        ty: &FuncType,
2317        allocs: &mut CodeBuilderAllocations,
2318        shared: bool,
2319    ) -> Result<Code> {
2320        let mut locals = self.arbitrary_locals(u)?;
2321        let builder = allocs.builder(ty, &mut locals, shared);
2322        let instructions = if self.config.allow_invalid_funcs && u.arbitrary().unwrap_or(false) {
2323            Instructions::Arbitrary(arbitrary_vec_u8(u)?)
2324        } else {
2325            Instructions::Generated(builder.arbitrary(u, self)?)
2326        };
2327
2328        Ok(Code {
2329            locals,
2330            instructions,
2331        })
2332    }
2333
2334    fn arbitrary_locals(&self, u: &mut Unstructured) -> Result<Vec<ValType>> {
2335        let mut ret = Vec::new();
2336        arbitrary_loop(u, 0, 100, |u| {
2337            ret.push(self.arbitrary_valtype(u)?);
2338            Ok(true)
2339        })?;
2340        Ok(ret)
2341    }
2342
2343    fn arbitrary_data(&mut self, u: &mut Unstructured) -> Result<()> {
2344        // With bulk-memory we can generate passive data, otherwise if there are
2345        // no memories we can't generate any data.
2346        let memories = self.memories.len() as u32;
2347        if memories == 0 && !self.config.bulk_memory_enabled {
2348            return Ok(());
2349        }
2350        let disallow_traps = self.config.disallow_traps;
2351        let mut choices32: Vec<Box<dyn Fn(&mut Unstructured, u64, usize) -> Result<Offset>>> =
2352            vec![];
2353        choices32.push(Box::new(|u, min_size, data_len| {
2354            let min = u32::try_from(min_size.saturating_mul(64 * 1024))
2355                .unwrap_or(u32::MAX)
2356                .into();
2357            let max = if disallow_traps { min } else { u32::MAX.into() };
2358            Ok(Offset::Const32(
2359                arbitrary_offset(u, min, max, data_len)? as i32
2360            ))
2361        }));
2362        let mut choices64: Vec<Box<dyn Fn(&mut Unstructured, u64, usize) -> Result<Offset>>> =
2363            vec![];
2364        choices64.push(Box::new(|u, min_size, data_len| {
2365            let min = min_size.saturating_mul(64 * 1024);
2366            let max = if disallow_traps { min } else { u64::MAX };
2367            Ok(Offset::Const64(
2368                arbitrary_offset(u, min, max, data_len)? as i64
2369            ))
2370        }));
2371        if !self.config.disallow_traps {
2372            for i in self.globals_for_const_expr(ValType::I32, true) {
2373                choices32.push(Box::new(move |_, _, _| Ok(Offset::Global(i))));
2374            }
2375            for i in self.globals_for_const_expr(ValType::I64, true) {
2376                choices64.push(Box::new(move |_, _, _| Ok(Offset::Global(i))));
2377            }
2378        }
2379
2380        // Build a list of candidate memories that we'll add data initializers
2381        // for. If a memory doesn't have an initial size then any initializers
2382        // for that memory will trap instantiation, which isn't too
2383        // interesting. Try to make this happen less often by making it less
2384        // likely that a memory with 0 size will have a data segment.
2385        let mut memories = Vec::new();
2386        for (i, mem) in self.memories.iter().enumerate() {
2387            if mem.minimum > 0 || u.int_in_range(0..=CHANCE_SEGMENT_ON_EMPTY)? == 0 {
2388                memories.push(i as u32);
2389            }
2390        }
2391
2392        // With memories we can generate data segments, and with bulk memory we
2393        // can generate passive segments. Without these though we can't create
2394        // a valid module with data segments.
2395        if memories.is_empty() && !self.config.bulk_memory_enabled {
2396            return Ok(());
2397        }
2398
2399        arbitrary_loop(
2400            u,
2401            self.config.min_data_segments,
2402            self.config.max_data_segments,
2403            |u| {
2404                let mut init: Vec<u8> = u.arbitrary()?;
2405
2406                // Passive data can only be generated if bulk memory is enabled.
2407                // Otherwise if there are no memories we *only* generate passive
2408                // data. Finally if all conditions are met we use an input byte to
2409                // determine if it should be passive or active.
2410                let kind =
2411                    if self.config.bulk_memory_enabled && (memories.is_empty() || u.arbitrary()?) {
2412                        DataSegmentKind::Passive
2413                    } else {
2414                        let memory_index = *u.choose(&memories)?;
2415                        let mem = &self.memories[memory_index as usize];
2416                        let f = if mem.memory64 {
2417                            u.choose(&choices64)?
2418                        } else {
2419                            u.choose(&choices32)?
2420                        };
2421                        let mut offset = f(u, mem.minimum, init.len())?;
2422
2423                        // If traps are disallowed then truncate the size of the
2424                        // data segment to the minimum size of memory to guarantee
2425                        // it will fit. Afterwards ensure that the offset of the
2426                        // data segment is in-bounds by clamping it to the
2427                        if self.config.disallow_traps {
2428                            let max_size = (u64::MAX / 64 / 1024).min(mem.minimum) * 64 * 1024;
2429                            init.truncate(max_size as usize);
2430                            let max_offset = max_size - init.len() as u64;
2431                            match &mut offset {
2432                                Offset::Const32(x) => {
2433                                    *x = (*x as u64).min(max_offset) as i32;
2434                                }
2435                                Offset::Const64(x) => {
2436                                    *x = (*x as u64).min(max_offset) as i64;
2437                                }
2438                                Offset::Global(_) => unreachable!(),
2439                            }
2440                        }
2441                        DataSegmentKind::Active {
2442                            offset,
2443                            memory_index,
2444                        }
2445                    };
2446                self.data.push(DataSegment { kind, init });
2447                Ok(true)
2448            },
2449        )
2450    }
2451
2452    fn params_results(&self, ty: &BlockType) -> (Vec<ValType>, Vec<ValType>) {
2453        match ty {
2454            BlockType::Empty => (vec![], vec![]),
2455            BlockType::Result(t) => (vec![], vec![*t]),
2456            BlockType::FunctionType(ty) => {
2457                let ty = self.func_type(*ty);
2458                (ty.params.to_vec(), ty.results.to_vec())
2459            }
2460        }
2461    }
2462
2463    /// Returns an iterator of all globals which can be used in constant
2464    /// expressions for a value of type `ty` specified.
2465    fn globals_for_const_expr(
2466        &self,
2467        ty: ValType,
2468        allow_defined_globals: bool,
2469    ) -> impl Iterator<Item = u32> + '_ {
2470        // Before the GC proposal only imported globals could be referenced, but
2471        // the GC proposal relaxed this feature to allow any global.
2472        let num_imported_globals = self.globals.len() - self.defined_globals.len();
2473        let max_global = if self.config.gc_enabled && allow_defined_globals {
2474            self.globals.len()
2475        } else {
2476            num_imported_globals
2477        };
2478
2479        self.globals[..max_global]
2480            .iter()
2481            .enumerate()
2482            .filter_map(move |(i, g)| {
2483                // Mutable globals cannot participate in constant expressions,
2484                // but otherwise so long as the global is a subtype of the
2485                // desired type it's a candidate.
2486                if !g.mutable && self.val_type_is_sub_type(g.val_type, ty) {
2487                    Some(i as u32)
2488                } else {
2489                    None
2490                }
2491            })
2492    }
2493
2494    fn compute_interesting_values(&mut self) {
2495        debug_assert!(self.interesting_values32.is_empty());
2496        debug_assert!(self.interesting_values64.is_empty());
2497
2498        let mut interesting_values32 = HashSet::new();
2499        let mut interesting_values64 = HashSet::new();
2500
2501        let mut interesting = |val: u64| {
2502            interesting_values32.insert(val as u32);
2503            interesting_values64.insert(val);
2504        };
2505
2506        // Zero is always interesting.
2507        interesting(0);
2508
2509        // Max values are always interesting.
2510        interesting(u8::MAX as _);
2511        interesting(u16::MAX as _);
2512        interesting(u32::MAX as _);
2513        interesting(u64::MAX);
2514
2515        // Min values are always interesting.
2516        interesting(i8::MIN as _);
2517        interesting(i16::MIN as _);
2518        interesting(i32::MIN as _);
2519        interesting(i64::MIN as _);
2520
2521        for i in 0..64 {
2522            // Powers of two.
2523            interesting(1 << i);
2524
2525            // Inverted powers of two.
2526            interesting(!(1 << i));
2527
2528            // Powers of two minus one, AKA high bits unset and low bits set.
2529            interesting((1 << i) - 1);
2530
2531            // Negative powers of two, AKA high bits set and low bits unset.
2532            interesting(((1_i64 << 63) >> i) as _);
2533        }
2534
2535        // Some repeating bit patterns.
2536        for pattern in [0b01010101, 0b00010001, 0b00010001, 0b00000001] {
2537            for b in [pattern, !pattern] {
2538                interesting(u64::from_ne_bytes([b, b, b, b, b, b, b, b]));
2539            }
2540        }
2541
2542        // Interesting float values.
2543        let mut interesting_f64 = |x: f64| interesting(x.to_bits());
2544        interesting_f64(0.0);
2545        interesting_f64(-0.0);
2546        interesting_f64(f64::INFINITY);
2547        interesting_f64(f64::NEG_INFINITY);
2548        interesting_f64(f64::EPSILON);
2549        interesting_f64(-f64::EPSILON);
2550        interesting_f64(f64::MIN);
2551        interesting_f64(f64::MIN_POSITIVE);
2552        interesting_f64(f64::MAX);
2553        interesting_f64(f64::NAN);
2554        let mut interesting_f32 = |x: f32| interesting(x.to_bits() as _);
2555        interesting_f32(0.0);
2556        interesting_f32(-0.0);
2557        interesting_f32(f32::INFINITY);
2558        interesting_f32(f32::NEG_INFINITY);
2559        interesting_f32(f32::EPSILON);
2560        interesting_f32(-f32::EPSILON);
2561        interesting_f32(f32::MIN);
2562        interesting_f32(f32::MIN_POSITIVE);
2563        interesting_f32(f32::MAX);
2564        interesting_f32(f32::NAN);
2565
2566        // Interesting values related to table bounds.
2567        for t in self.tables.iter() {
2568            interesting(t.minimum as _);
2569            if let Some(x) = t.minimum.checked_add(1) {
2570                interesting(x as _);
2571            }
2572
2573            if let Some(x) = t.maximum {
2574                interesting(x as _);
2575                if let Some(y) = x.checked_add(1) {
2576                    interesting(y as _);
2577                }
2578            }
2579        }
2580
2581        // Interesting values related to memory bounds.
2582        for m in self.memories.iter() {
2583            let min = m.minimum.saturating_mul(crate::page_size(m).into());
2584            interesting(min);
2585            for i in 0..5 {
2586                if let Some(x) = min.checked_add(1 << i) {
2587                    interesting(x);
2588                }
2589                if let Some(x) = min.checked_sub(1 << i) {
2590                    interesting(x);
2591                }
2592            }
2593
2594            if let Some(max) = m.maximum {
2595                let max = max.saturating_mul(crate::page_size(m).into());
2596                interesting(max);
2597                for i in 0..5 {
2598                    if let Some(x) = max.checked_add(1 << i) {
2599                        interesting(x);
2600                    }
2601                    if let Some(x) = max.checked_sub(1 << i) {
2602                        interesting(x);
2603                    }
2604                }
2605            }
2606        }
2607
2608        self.interesting_values32.extend(interesting_values32);
2609        self.interesting_values64.extend(interesting_values64);
2610
2611        // Sort for determinism.
2612        self.interesting_values32.sort();
2613        self.interesting_values64.sort();
2614    }
2615
2616    fn arbitrary_const_instruction(
2617        &self,
2618        ty: ValType,
2619        u: &mut Unstructured<'_>,
2620    ) -> Result<Instruction> {
2621        debug_assert!(self.interesting_values32.len() > 0);
2622        debug_assert!(self.interesting_values64.len() > 0);
2623        match ty {
2624            ValType::I32 => Ok(Instruction::I32Const(if u.arbitrary()? {
2625                *u.choose(&self.interesting_values32)? as i32
2626            } else {
2627                u.arbitrary()?
2628            })),
2629            ValType::I64 => Ok(Instruction::I64Const(if u.arbitrary()? {
2630                *u.choose(&self.interesting_values64)? as i64
2631            } else {
2632                u.arbitrary()?
2633            })),
2634            ValType::F32 => Ok(Instruction::F32Const(if u.arbitrary()? {
2635                f32::from_bits(*u.choose(&self.interesting_values32)?).into()
2636            } else {
2637                u.arbitrary::<f32>()?.into()
2638            })),
2639            ValType::F64 => Ok(Instruction::F64Const(if u.arbitrary()? {
2640                f64::from_bits(*u.choose(&self.interesting_values64)?).into()
2641            } else {
2642                u.arbitrary::<f64>()?.into()
2643            })),
2644            ValType::V128 => Ok(Instruction::V128Const(if u.arbitrary()? {
2645                let upper = (*u.choose(&self.interesting_values64)? as i128) << 64;
2646                let lower = *u.choose(&self.interesting_values64)? as i128;
2647                upper | lower
2648            } else {
2649                u.arbitrary()?
2650            })),
2651            ValType::Ref(ty) => {
2652                assert!(ty.nullable);
2653                Ok(Instruction::RefNull(ty.heap_type))
2654            }
2655        }
2656    }
2657
2658    fn propagate_shared<T>(&mut self, must_share: bool, mut f: impl FnMut(&mut Self) -> T) -> T {
2659        let tmp = mem::replace(&mut self.must_share, must_share);
2660        let result = f(self);
2661        self.must_share = tmp;
2662        result
2663    }
2664
2665    fn arbitrary_shared(&self, u: &mut Unstructured) -> Result<bool> {
2666        if self.must_share {
2667            Ok(true)
2668        } else {
2669            Ok(self.config.shared_everything_threads_enabled && u.ratio(1, 4)?)
2670        }
2671    }
2672
2673    fn is_shared_ref_type(&self, ty: RefType) -> bool {
2674        match ty.heap_type {
2675            HeapType::Abstract { shared, .. } => shared,
2676            HeapType::Concrete(i) => self.types[i as usize].composite_type.shared,
2677        }
2678    }
2679
2680    fn is_shared_type(&self, index: u32) -> bool {
2681        let index = usize::try_from(index).unwrap();
2682        let ty = self.types.get(index).unwrap();
2683        ty.composite_type.shared
2684    }
2685}
2686
2687pub(crate) fn arbitrary_limits64(
2688    u: &mut Unstructured,
2689    min_minimum: Option<u64>,
2690    max_minimum: u64,
2691    max_required: bool,
2692    max_inbounds: u64,
2693) -> Result<(u64, Option<u64>)> {
2694    assert!(
2695        min_minimum.unwrap_or(0) <= max_minimum,
2696        "{} <= {max_minimum}",
2697        min_minimum.unwrap_or(0),
2698    );
2699    assert!(
2700        min_minimum.unwrap_or(0) <= max_inbounds,
2701        "{} <= {max_inbounds}",
2702        min_minimum.unwrap_or(0),
2703    );
2704
2705    let min = gradually_grow(u, min_minimum.unwrap_or(0), max_inbounds, max_minimum)?;
2706    assert!(min <= max_minimum, "{min} <= {max_minimum}");
2707
2708    let max = if max_required || u.arbitrary().unwrap_or(false) {
2709        Some(u.int_in_range(min..=max_minimum)?)
2710    } else {
2711        None
2712    };
2713    assert!(min <= max.unwrap_or(min), "{min} <= {}", max.unwrap_or(min));
2714
2715    Ok((min, max))
2716}
2717
2718pub(crate) fn configured_valtypes(config: &Config) -> Vec<ValType> {
2719    let mut valtypes = Vec::with_capacity(25);
2720    valtypes.push(ValType::I32);
2721    valtypes.push(ValType::I64);
2722    if config.allow_floats {
2723        valtypes.push(ValType::F32);
2724        valtypes.push(ValType::F64);
2725    }
2726    if config.simd_enabled {
2727        valtypes.push(ValType::V128);
2728    }
2729    if config.gc_enabled && config.reference_types_enabled {
2730        for nullable in [
2731            // TODO: For now, only create allow nullable reference
2732            // types. Eventually we should support non-nullable reference types,
2733            // but this means that we will also need to recognize when it is
2734            // impossible to create an instance of the reference (eg `(ref
2735            // nofunc)` has no instances, and self-referential types that
2736            // contain a non-null self-reference are also impossible to create).
2737            true,
2738        ] {
2739            use AbstractHeapType::*;
2740            let abs_ref_types = [
2741                Any, Eq, I31, Array, Struct, None, Func, NoFunc, Extern, NoExtern,
2742            ];
2743            valtypes.extend(
2744                abs_ref_types
2745                    .iter()
2746                    .map(|&ty| ValType::Ref(RefType::new_abstract(ty, nullable, false))),
2747            );
2748            if config.shared_everything_threads_enabled {
2749                valtypes.extend(
2750                    abs_ref_types
2751                        .iter()
2752                        .map(|&ty| ValType::Ref(RefType::new_abstract(ty, nullable, true))),
2753                );
2754            }
2755        }
2756    } else if config.reference_types_enabled {
2757        valtypes.push(ValType::EXTERNREF);
2758        valtypes.push(ValType::FUNCREF);
2759    }
2760    valtypes
2761}
2762
2763pub(crate) fn arbitrary_table_type(
2764    u: &mut Unstructured,
2765    config: &Config,
2766    module: Option<&Module>,
2767) -> Result<TableType> {
2768    let table64 = config.memory64_enabled && u.arbitrary()?;
2769    // We don't want to generate tables that are too large on average, so
2770    // keep the "inbounds" limit here a bit smaller.
2771    let max_inbounds = 10_000;
2772    let min_elements = if config.disallow_traps { Some(1) } else { None };
2773    let max_elements = min_elements.unwrap_or(0).max(config.max_table_elements);
2774    let (minimum, maximum) = arbitrary_limits64(
2775        u,
2776        min_elements,
2777        max_elements,
2778        config.table_max_size_required,
2779        max_inbounds.min(max_elements),
2780    )?;
2781    if config.disallow_traps {
2782        assert!(minimum > 0);
2783    }
2784    let element_type = match module {
2785        Some(module) => module.arbitrary_ref_type(u)?,
2786        None => RefType::FUNCREF,
2787    };
2788
2789    // Propagate the element type's sharedness to the table type.
2790    let shared = match module {
2791        Some(module) => module.is_shared_ref_type(element_type),
2792        None => false,
2793    };
2794
2795    Ok(TableType {
2796        element_type,
2797        minimum,
2798        maximum,
2799        table64,
2800        shared,
2801    })
2802}
2803
2804pub(crate) fn arbitrary_memtype(u: &mut Unstructured, config: &Config) -> Result<MemoryType> {
2805    // When threads are enabled, we only want to generate shared memories about
2806    // 25% of the time.
2807    let shared = config.threads_enabled && u.ratio(1, 4)?;
2808
2809    let memory64 = config.memory64_enabled && u.arbitrary()?;
2810    let page_size_log2 = if config.custom_page_sizes_enabled && u.arbitrary()? {
2811        Some(if u.arbitrary()? { 0 } else { 16 })
2812    } else {
2813        None
2814    };
2815
2816    let min_pages = if config.disallow_traps { Some(1) } else { None };
2817    let max_pages = min_pages.unwrap_or(0).max(if memory64 {
2818        u64::try_from(config.max_memory64_bytes >> page_size_log2.unwrap_or(16))
2819            // Can only fail when we have a custom page size of 1 byte and a
2820            // memory size of `2**64 == u64::MAX + 1`. In this case, just
2821            // saturate to `u64::MAX`.
2822            .unwrap_or(u64::MAX)
2823    } else {
2824        u32::try_from(config.max_memory32_bytes >> page_size_log2.unwrap_or(16))
2825            // Similar case as above, but while we could represent `2**32` in our
2826            // `u64` here, 32-bit memories' limits must fit in a `u32`.
2827            .unwrap_or(u32::MAX)
2828            .into()
2829    });
2830
2831    // We want to favor keeping the total memories <= 1gb in size.
2832    let max_all_mems_in_bytes = 1 << 30;
2833    let max_this_mem_in_bytes = max_all_mems_in_bytes / u64::try_from(config.max_memories).unwrap();
2834    let max_inbounds = max_this_mem_in_bytes >> page_size_log2.unwrap_or(16);
2835    let max_inbounds = max_inbounds.clamp(min_pages.unwrap_or(0), max_pages);
2836
2837    let (minimum, maximum) = arbitrary_limits64(
2838        u,
2839        min_pages,
2840        max_pages,
2841        config.memory_max_size_required || shared,
2842        max_inbounds,
2843    )?;
2844
2845    Ok(MemoryType {
2846        minimum,
2847        maximum,
2848        memory64,
2849        shared,
2850        page_size_log2,
2851    })
2852}
2853
2854pub(crate) fn arbitrary_tag_type(
2855    u: &mut Unstructured,
2856    candidate_func_types: &[u32],
2857    get_func_type: impl FnOnce(u32) -> Rc<FuncType>,
2858) -> Result<TagType> {
2859    let max = candidate_func_types.len() - 1;
2860    let ty = candidate_func_types[u.int_in_range(0..=max)?];
2861    Ok(TagType {
2862        func_type_idx: ty,
2863        func_type: get_func_type(ty),
2864    })
2865}
2866
2867/// This function generates a number between `min` and `max`, favoring values
2868/// between `min` and `max_inbounds`.
2869///
2870/// The thinking behind this function is that it's used for things like offsets
2871/// and minimum sizes which, when very large, can trivially make the wasm oom or
2872/// abort with a trap. This isn't the most interesting thing to do so it tries
2873/// to favor numbers in the `min..max_inbounds` range to avoid immediate ooms.
2874fn gradually_grow(u: &mut Unstructured, min: u64, max_inbounds: u64, max: u64) -> Result<u64> {
2875    if min == max {
2876        return Ok(min);
2877    }
2878    let x = {
2879        let min = min as f64;
2880        let max = max as f64;
2881        let max_inbounds = max_inbounds as f64;
2882        let x = u.arbitrary::<u32>()?;
2883        let x = f64::from(x);
2884        let x = map_custom(
2885            x,
2886            f64::from(u32::MIN)..f64::from(u32::MAX),
2887            min..max_inbounds,
2888            min..max,
2889        );
2890        assert!(min <= x, "{min} <= {x}");
2891        assert!(x <= max, "{x} <= {max}");
2892        x.round() as u64
2893    };
2894
2895    // Conversion between `u64` and `f64` is lossy, especially for large
2896    // numbers, so just clamp the final result.
2897    return Ok(x.clamp(min, max));
2898
2899    /// Map a value from within the input range to the output range(s).
2900    ///
2901    /// This will first map the input range into the `0..1` input range, and
2902    /// then depending on the value it will either map it exponentially
2903    /// (favoring small values) into the `output_inbounds` range or it will map
2904    /// it into the `output` range.
2905    fn map_custom(
2906        value: f64,
2907        input: Range<f64>,
2908        output_inbounds: Range<f64>,
2909        output: Range<f64>,
2910    ) -> f64 {
2911        assert!(!value.is_nan(), "{}", value);
2912        assert!(value.is_finite(), "{}", value);
2913        assert!(input.start < input.end, "{} < {}", input.start, input.end);
2914        assert!(
2915            output.start < output.end,
2916            "{} < {}",
2917            output.start,
2918            output.end
2919        );
2920        assert!(value >= input.start, "{} >= {}", value, input.start);
2921        assert!(value <= input.end, "{} <= {}", value, input.end);
2922        assert!(
2923            output.start <= output_inbounds.start,
2924            "{} <= {}",
2925            output.start,
2926            output_inbounds.start
2927        );
2928        assert!(
2929            output_inbounds.end <= output.end,
2930            "{} <= {}",
2931            output_inbounds.end,
2932            output.end
2933        );
2934
2935        let x = map_linear(value, input, 0.0..1.0);
2936        let result = if x < PCT_INBOUNDS {
2937            if output_inbounds.start == output_inbounds.end {
2938                output_inbounds.start
2939            } else {
2940                let unscaled = x * x * x * x * x * x;
2941                map_linear(unscaled, 0.0..1.0, output_inbounds)
2942            }
2943        } else {
2944            map_linear(x, 0.0..1.0, output.clone())
2945        };
2946
2947        assert!(result >= output.start, "{} >= {}", result, output.start);
2948        assert!(result <= output.end, "{} <= {}", result, output.end);
2949        result
2950    }
2951
2952    /// Map a value from within the input range linearly to the output range.
2953    ///
2954    /// For example, mapping `0.5` from the input range `0.0..1.0` to the output
2955    /// range `1.0..3.0` produces `2.0`.
2956    fn map_linear(
2957        value: f64,
2958        Range {
2959            start: in_low,
2960            end: in_high,
2961        }: Range<f64>,
2962        Range {
2963            start: out_low,
2964            end: out_high,
2965        }: Range<f64>,
2966    ) -> f64 {
2967        assert!(!value.is_nan(), "{}", value);
2968        assert!(value.is_finite(), "{}", value);
2969        assert!(in_low < in_high, "{in_low} < {in_high}");
2970        assert!(out_low < out_high, "{out_low} < {out_high}");
2971        assert!(value >= in_low, "{value} >= {in_low}");
2972        assert!(value <= in_high, "{value} <= {in_high}");
2973
2974        let dividend = out_high - out_low;
2975        let divisor = in_high - in_low;
2976        let slope = dividend / divisor;
2977        let result = out_low + (slope * (value - in_low));
2978
2979        assert!(result >= out_low, "{result} >= {out_low}");
2980        assert!(result <= out_high, "{result} <= {out_high}");
2981        result
2982    }
2983}
2984
2985/// Selects a reasonable offset for an element or data segment. This favors
2986/// having the segment being in-bounds, but it may still generate
2987/// any offset.
2988fn arbitrary_offset(
2989    u: &mut Unstructured,
2990    limit_min: u64,
2991    limit_max: u64,
2992    segment_size: usize,
2993) -> Result<u64> {
2994    let size = u64::try_from(segment_size).unwrap();
2995
2996    // If the segment is too big for the whole memory, just give it any
2997    // offset.
2998    if size > limit_min {
2999        u.int_in_range(0..=limit_max)
3000    } else {
3001        gradually_grow(u, 0, limit_min - size, limit_max)
3002    }
3003}
3004
3005fn unique_import_strings(max_size: usize, u: &mut Unstructured) -> Result<(String, String)> {
3006    let module = limited_string(max_size, u)?;
3007    let field = limited_string(max_size, u)?;
3008    Ok((module, field))
3009}
3010
3011fn arbitrary_vec_u8(u: &mut Unstructured) -> Result<Vec<u8>> {
3012    let size = u.arbitrary_len::<u8>()?;
3013    Ok(u.bytes(size)?.to_vec())
3014}
3015
3016impl EntityType {
3017    fn size(&self) -> u32 {
3018        match self {
3019            EntityType::Tag(_)
3020            | EntityType::Global(_)
3021            | EntityType::Table(_)
3022            | EntityType::Memory(_) => 1,
3023            EntityType::Func(_, ty) => 1 + (ty.params.len() + ty.results.len()) as u32,
3024        }
3025    }
3026}
3027
3028/// A container for the kinds of instructions that wasm-smith is allowed to
3029/// emit.
3030///
3031/// # Example
3032///
3033/// ```
3034/// # use wasm_smith::{InstructionKinds, InstructionKind};
3035/// let kinds = InstructionKinds::new(&[InstructionKind::Numeric, InstructionKind::Memory]);
3036/// assert!(kinds.contains(InstructionKind::Memory));
3037/// ```
3038#[derive(Clone, Copy, Debug, Default)]
3039#[cfg_attr(
3040    feature = "serde",
3041    derive(serde_derive::Deserialize, serde_derive::Serialize)
3042)]
3043pub struct InstructionKinds(pub(crate) FlagSet<InstructionKind>);
3044
3045impl InstructionKinds {
3046    /// Create a new container.
3047    pub fn new(kinds: &[InstructionKind]) -> Self {
3048        Self(kinds.iter().fold(FlagSet::default(), |ks, k| ks | *k))
3049    }
3050
3051    /// Include all [InstructionKind]s.
3052    pub fn all() -> Self {
3053        Self(FlagSet::full())
3054    }
3055
3056    /// Include no [InstructionKind]s.
3057    pub fn none() -> Self {
3058        Self(FlagSet::default())
3059    }
3060
3061    /// Check if the [InstructionKind] is contained in this set.
3062    #[inline]
3063    pub fn contains(&self, kind: InstructionKind) -> bool {
3064        self.0.contains(kind)
3065    }
3066
3067    /// Restrict each [InstructionKind] to its subset not involving floats
3068    pub fn without_floats(&self) -> Self {
3069        let mut floatless = self.0;
3070        if floatless.contains(InstructionKind::Numeric) {
3071            floatless -= InstructionKind::Numeric;
3072            floatless |= InstructionKind::NumericInt;
3073        }
3074        if floatless.contains(InstructionKind::Vector) {
3075            floatless -= InstructionKind::Vector;
3076            floatless |= InstructionKind::VectorInt;
3077        }
3078        if floatless.contains(InstructionKind::Memory) {
3079            floatless -= InstructionKind::Memory;
3080            floatless |= InstructionKind::MemoryInt;
3081        }
3082        Self(floatless)
3083    }
3084}
3085
3086flags! {
3087    /// Enumerate the categories of instructions defined in the [WebAssembly
3088    /// specification](https://webassembly.github.io/spec/core/syntax/instructions.html).
3089    #[allow(missing_docs)]
3090    #[cfg_attr(feature = "_internal_cli", derive(serde_derive::Deserialize))]
3091    pub enum InstructionKind: u16 {
3092        NumericInt = 1 << 0,
3093        Numeric = (1 << 1) | (1 << 0),
3094        VectorInt = 1 << 2,
3095        Vector = (1 << 3) | (1 << 2),
3096        Reference = 1 << 4,
3097        Parametric = 1 << 5,
3098        Variable = 1 << 6,
3099        Table = 1 << 7,
3100        MemoryInt = 1 << 8,
3101        Memory = (1 << 9) | (1 << 8),
3102        Control = 1 << 10,
3103        Aggregate = 1 << 11,
3104    }
3105}
3106
3107impl FromStr for InstructionKinds {
3108    type Err = String;
3109    fn from_str(s: &str) -> std::prelude::v1::Result<Self, Self::Err> {
3110        let mut kinds = vec![];
3111        for part in s.split(",") {
3112            let kind = InstructionKind::from_str(part)?;
3113            kinds.push(kind);
3114        }
3115        Ok(InstructionKinds::new(&kinds))
3116    }
3117}
3118
3119impl FromStr for InstructionKind {
3120    type Err = String;
3121    fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
3122        match s.to_lowercase().as_str() {
3123            "numeric_non_float" => Ok(InstructionKind::NumericInt),
3124            "numeric" => Ok(InstructionKind::Numeric),
3125            "vector_non_float" => Ok(InstructionKind::VectorInt),
3126            "vector" => Ok(InstructionKind::Vector),
3127            "reference" => Ok(InstructionKind::Reference),
3128            "parametric" => Ok(InstructionKind::Parametric),
3129            "variable" => Ok(InstructionKind::Variable),
3130            "table" => Ok(InstructionKind::Table),
3131            "memory_non_float" => Ok(InstructionKind::MemoryInt),
3132            "memory" => Ok(InstructionKind::Memory),
3133            "control" => Ok(InstructionKind::Control),
3134            _ => Err(format!("unknown instruction kind: {s}")),
3135        }
3136    }
3137}
3138
3139// Conversions from `wasmparser` to `wasm-smith`. Currently, only type conversions
3140// have been implemented.
3141#[cfg(feature = "wasmparser")]
3142impl TryFrom<wasmparser::FuncType> for FuncType {
3143    type Error = ();
3144
3145    fn try_from(value: wasmparser::FuncType) -> Result<Self, Self::Error> {
3146        Ok(FuncType {
3147            params: value
3148                .params()
3149                .iter()
3150                .copied()
3151                .map(|ty| ty.try_into().map_err(|_| ()))
3152                .collect::<Result<Vec<_>, _>>()?,
3153            results: value
3154                .results()
3155                .iter()
3156                .copied()
3157                .map(|ty| ty.try_into().map_err(|_| ()))
3158                .collect::<Result<Vec<_>, _>>()?,
3159        })
3160    }
3161}
3162
3163#[cfg(feature = "wasmparser")]
3164impl TryFrom<wasmparser::CompositeType> for CompositeType {
3165    type Error = ();
3166
3167    fn try_from(value: wasmparser::CompositeType) -> Result<Self, Self::Error> {
3168        let inner_type = match value.inner {
3169            wasmparser::CompositeInnerType::Func(func_type) => {
3170                CompositeInnerType::Func(Rc::new(func_type.try_into()?))
3171            }
3172            wasmparser::CompositeInnerType::Array(array_type) => {
3173                CompositeInnerType::Array(array_type.try_into().map_err(|_| ())?)
3174            }
3175            wasmparser::CompositeInnerType::Struct(struct_type) => {
3176                CompositeInnerType::Struct(struct_type.try_into().map_err(|_| ())?)
3177            }
3178            wasmparser::CompositeInnerType::Cont(_) => {
3179                panic!("continuation type is not supported by wasm-smith currently.")
3180            }
3181        };
3182
3183        Ok(CompositeType {
3184            inner: inner_type,
3185            shared: value.shared,
3186        })
3187    }
3188}
3189
3190#[cfg(feature = "wasmparser")]
3191impl TryFrom<wasmparser::SubType> for SubType {
3192    type Error = ();
3193
3194    fn try_from(value: wasmparser::SubType) -> Result<Self, Self::Error> {
3195        Ok(SubType {
3196            is_final: value.is_final,
3197            supertype: value
3198                .supertype_idx
3199                .map(|idx| idx.as_module_index().ok_or(()))
3200                .transpose()?,
3201            composite_type: value.composite_type.try_into()?,
3202            // We cannot determine the depth of current subtype here, set it to 1
3203            // temporarily and fix it later.
3204            depth: 1,
3205        })
3206    }
3207}