1mod code_builder;
4pub(crate) mod encode;
5mod terminate;
6
7use crate::{Config, arbitrary_loop, limited_string, unique_string};
8use arbitrary::{Arbitrary, Result, Unstructured};
9use code_builder::CodeBuilderAllocations;
10use flagset::{FlagSet, flags};
11use std::collections::{HashMap, HashSet};
12use std::fmt;
13use std::mem;
14use std::ops::Range;
15use std::rc::Rc;
16use std::str::{self, FromStr};
17use wasm_encoder::{
18 AbstractHeapType, ArrayType, BlockType, ConstExpr, ExportKind, FieldType, HeapType, RefType,
19 StorageType, StructType, ValType,
20};
21pub(crate) use wasm_encoder::{GlobalType, MemoryType, TableType};
22
23const CHANCE_OFFSET_INBOUNDS: usize = 10; const CHANCE_SEGMENT_ON_EMPTY: usize = 10; const PCT_INBOUNDS: f64 = 0.995; type Instruction = wasm_encoder::Instruction<'static>;
34
35pub struct Module {
47 config: Config,
48 duplicate_imports_behavior: DuplicateImportsBehavior,
49 valtypes: Vec<ValType>,
50
51 types: Vec<SubType>,
54
55 rec_groups: Vec<Range<usize>>,
59
60 super_to_sub_types: HashMap<u32, Vec<u32>>,
62
63 can_subtype: Vec<u32>,
65
66 should_encode_types: bool,
68
69 must_share: bool,
72
73 imports: Vec<Import>,
77
78 should_encode_imports: bool,
81
82 array_types: Vec<u32>,
84
85 func_types: Vec<u32>,
87
88 struct_types: Vec<u32>,
90
91 num_imports: usize,
93
94 num_defined_tags: usize,
97
98 num_defined_funcs: usize,
101
102 defined_tables: Vec<Option<ConstExpr>>,
104
105 num_defined_memories: usize,
108
109 defined_globals: Vec<(u32, ConstExpr)>,
112
113 tags: Vec<TagType>,
116
117 funcs: Vec<(u32, Rc<FuncType>)>,
121
122 tables: Vec<TableType>,
125
126 globals: Vec<GlobalType>,
129
130 memories: Vec<MemoryType>,
133
134 exports: Vec<(String, ExportKind, u32)>,
135 start: Option<u32>,
136 elems: Vec<ElementSegment>,
137 code: Vec<Code>,
138 data: Vec<DataSegment>,
139
140 type_size: u32,
143
144 export_names: HashSet<String>,
146
147 const_expr_choices: Vec<Box<dyn Fn(&mut Unstructured, ValType) -> Result<ConstExpr>>>,
150
151 max_type_limit: MaxTypeLimit,
153
154 interesting_values32: Vec<u32>,
157 interesting_values64: Vec<u64>,
158}
159
160impl<'a> Arbitrary<'a> for Module {
161 fn arbitrary(u: &mut Unstructured<'a>) -> Result<Self> {
162 Module::new(Config::default(), u)
163 }
164}
165
166impl fmt::Debug for Module {
167 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
168 f.debug_struct("Module")
169 .field("config", &self.config)
170 .field(&"...", &"...")
171 .finish()
172 }
173}
174
175#[derive(Debug, Clone, Copy, PartialEq, Eq)]
176pub(crate) enum DuplicateImportsBehavior {
177 Allowed,
178 Disallowed,
179}
180
181#[derive(Debug, Clone, Copy, PartialEq, Eq)]
182enum AllowEmptyRecGroup {
183 Yes,
184 No,
185}
186
187#[derive(Debug, Clone, Copy, PartialEq, Eq)]
188enum MaxTypeLimit {
189 ModuleTypes,
190 Num(u32),
191}
192
193impl Module {
194 pub fn config(&self) -> &Config {
196 &self.config
197 }
198
199 pub fn new(config: Config, u: &mut Unstructured<'_>) -> Result<Self> {
202 Self::new_internal(config, u, DuplicateImportsBehavior::Allowed)
203 }
204
205 pub(crate) fn new_internal(
206 config: Config,
207 u: &mut Unstructured<'_>,
208 duplicate_imports_behavior: DuplicateImportsBehavior,
209 ) -> Result<Self> {
210 let mut module = Module::empty(config, duplicate_imports_behavior);
211 module.build(u)?;
212 Ok(module)
213 }
214
215 fn empty(mut config: Config, duplicate_imports_behavior: DuplicateImportsBehavior) -> Self {
216 config.sanitize();
217 Module {
218 config,
219 duplicate_imports_behavior,
220 valtypes: Vec::new(),
221 types: Vec::new(),
222 rec_groups: Vec::new(),
223 can_subtype: Vec::new(),
224 super_to_sub_types: HashMap::new(),
225 should_encode_types: false,
226 imports: Vec::new(),
227 should_encode_imports: false,
228 array_types: Vec::new(),
229 func_types: Vec::new(),
230 struct_types: Vec::new(),
231 num_imports: 0,
232 num_defined_tags: 0,
233 num_defined_funcs: 0,
234 defined_tables: Vec::new(),
235 num_defined_memories: 0,
236 defined_globals: Vec::new(),
237 tags: Vec::new(),
238 funcs: Vec::new(),
239 tables: Vec::new(),
240 globals: Vec::new(),
241 memories: Vec::new(),
242 exports: Vec::new(),
243 start: None,
244 elems: Vec::new(),
245 code: Vec::new(),
246 data: Vec::new(),
247 type_size: 0,
248 export_names: HashSet::new(),
249 const_expr_choices: Vec::new(),
250 max_type_limit: MaxTypeLimit::ModuleTypes,
251 interesting_values32: Vec::new(),
252 interesting_values64: Vec::new(),
253 must_share: false,
254 }
255 }
256}
257
258#[derive(Clone, Debug, PartialEq, Eq, Hash)]
259pub(crate) struct SubType {
260 pub(crate) is_final: bool,
261 pub(crate) supertype: Option<u32>,
262 pub(crate) composite_type: CompositeType,
263 depth: u32,
266}
267
268impl SubType {
269 fn unwrap_struct(&self) -> &StructType {
270 self.composite_type.unwrap_struct()
271 }
272
273 fn unwrap_func(&self) -> &Rc<FuncType> {
274 self.composite_type.unwrap_func()
275 }
276
277 fn unwrap_array(&self) -> &ArrayType {
278 self.composite_type.unwrap_array()
279 }
280}
281
282#[derive(Clone, Debug, PartialEq, Eq, Hash)]
283pub(crate) struct CompositeType {
284 pub inner: CompositeInnerType,
285 pub shared: bool,
286 pub descriptor: Option<u32>,
287 pub describes: Option<u32>,
288}
289
290impl CompositeType {
291 #[cfg(any(feature = "component-model", feature = "wasmparser"))]
292 pub(crate) fn new_func(func: Rc<FuncType>, shared: bool) -> Self {
293 Self {
294 inner: CompositeInnerType::Func(func),
295 shared,
296 descriptor: None,
297 describes: None,
298 }
299 }
300
301 fn unwrap_func(&self) -> &Rc<FuncType> {
302 match &self.inner {
303 CompositeInnerType::Func(f) => f,
304 _ => panic!("not a func"),
305 }
306 }
307
308 fn unwrap_array(&self) -> &ArrayType {
309 match &self.inner {
310 CompositeInnerType::Array(a) => a,
311 _ => panic!("not an array"),
312 }
313 }
314
315 fn unwrap_struct(&self) -> &StructType {
316 match &self.inner {
317 CompositeInnerType::Struct(s) => s,
318 _ => panic!("not a struct"),
319 }
320 }
321}
322
323impl From<&CompositeType> for wasm_encoder::CompositeType {
324 fn from(ty: &CompositeType) -> Self {
325 let inner = match &ty.inner {
326 CompositeInnerType::Array(a) => wasm_encoder::CompositeInnerType::Array(*a),
327 CompositeInnerType::Func(f) => wasm_encoder::CompositeInnerType::Func(
328 wasm_encoder::FuncType::new(f.params.iter().cloned(), f.results.iter().cloned()),
329 ),
330 CompositeInnerType::Struct(s) => wasm_encoder::CompositeInnerType::Struct(s.clone()),
331 };
332 wasm_encoder::CompositeType {
333 shared: ty.shared,
334 inner,
335 descriptor: ty.descriptor,
336 describes: ty.describes,
337 }
338 }
339}
340
341#[derive(Clone, Debug, PartialEq, Eq, Hash)]
342pub(crate) enum CompositeInnerType {
343 Array(ArrayType),
344 Func(Rc<FuncType>),
345 Struct(StructType),
346}
347
348#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
350pub(crate) struct FuncType {
351 pub(crate) params: Vec<ValType>,
353 pub(crate) results: Vec<ValType>,
355}
356
357#[derive(Clone, Debug, PartialEq, Eq, Hash)]
359pub(crate) struct Import {
360 pub(crate) module: String,
362 pub(crate) field: String,
364 pub(crate) entity_type: EntityType,
366}
367
368#[derive(Clone, Debug, PartialEq, Eq, Hash)]
370pub(crate) enum EntityType {
371 Global(GlobalType),
373 Table(TableType),
375 Memory(MemoryType),
377 Tag(TagType),
379 Func(u32, Rc<FuncType>),
381}
382
383#[derive(Clone, Debug, PartialEq, Eq, Hash)]
385pub(crate) struct TagType {
386 func_type_idx: u32,
388 func_type: Rc<FuncType>,
390}
391
392#[derive(Debug)]
393struct ElementSegment {
394 kind: ElementKind,
395 ty: RefType,
396 items: Elements,
397}
398
399#[derive(Debug)]
400enum ElementKind {
401 Passive,
402 Declared,
403 Active {
404 table: Option<u32>, offset: Offset,
406 },
407}
408
409#[derive(Debug)]
410enum Elements {
411 Functions(Vec<u32>),
412 Expressions(Vec<ConstExpr>),
413}
414
415#[derive(Debug)]
416struct Code {
417 locals: Vec<ValType>,
418 instructions: Instructions,
419}
420
421#[derive(Debug)]
422enum Instructions {
423 Generated(Vec<Instruction>),
424 Arbitrary(Vec<u8>),
425}
426
427#[derive(Debug)]
428struct DataSegment {
429 kind: DataSegmentKind,
430 init: Vec<u8>,
431}
432
433#[derive(Debug)]
434enum DataSegmentKind {
435 Passive,
436 Active { memory_index: u32, offset: Offset },
437}
438
439#[derive(Debug)]
440pub(crate) enum Offset {
441 Const32(i32),
442 Const64(i64),
443 Global(u32),
444}
445
446impl Module {
447 fn build(&mut self, u: &mut Unstructured) -> Result<()> {
448 self.valtypes = configured_valtypes(&self.config);
449
450 let mut generate_arbitrary_imports = true;
451 let mut generate_arbitrary_exports = true;
452 if self.imports_exports_from_module_shape(u)? {
453 generate_arbitrary_imports = false;
454 generate_arbitrary_exports = false;
455 }
456 if self.arbitrary_imports_from_available(u)? {
462 generate_arbitrary_imports = false;
463 }
464 self.arbitrary_types(u)?;
465 if generate_arbitrary_imports {
466 self.arbitrary_imports(u)?;
467 }
468
469 self.should_encode_imports = !self.imports.is_empty() || u.arbitrary()?;
470
471 self.arbitrary_tags(u)?;
472 self.arbitrary_funcs(u)?;
473 self.arbitrary_tables(u)?;
474 self.arbitrary_memories(u)?;
475 self.arbitrary_globals(u)?;
476 if self.required_exports(u)? {
477 generate_arbitrary_exports = false;
478 }
479 if generate_arbitrary_exports {
480 self.arbitrary_exports(u)?;
481 }
482 self.should_encode_types = !self.types.is_empty() || u.arbitrary()?;
483 self.arbitrary_start(u)?;
484 self.arbitrary_elems(u)?;
485 self.arbitrary_data(u)?;
486 self.arbitrary_code(u)?;
487 Ok(())
488 }
489
490 #[inline]
491 fn val_type_is_sub_type(&self, a: ValType, b: ValType) -> bool {
492 match (a, b) {
493 (a, b) if a == b => true,
494 (ValType::Ref(a), ValType::Ref(b)) => self.ref_type_is_sub_type(a, b),
495 _ => false,
496 }
497 }
498
499 fn ref_type_is_sub_type(&self, a: RefType, b: RefType) -> bool {
501 if a == b {
502 return true;
503 }
504
505 if a.nullable && !b.nullable {
506 return false;
507 }
508
509 self.heap_type_is_sub_type(a.heap_type, b.heap_type)
510 }
511
512 fn heap_type_is_sub_type(&self, a: HeapType, b: HeapType) -> bool {
513 use AbstractHeapType::*;
514 use CompositeInnerType as CT;
515 use HeapType as HT;
516 match (a, b) {
517 (a, b) if a == b => true,
518
519 (
520 HT::Abstract {
521 shared: a_shared,
522 ty: a_ty,
523 },
524 HT::Abstract {
525 shared: b_shared,
526 ty: b_ty,
527 },
528 ) => {
529 a_shared == b_shared
530 && match (a_ty, b_ty) {
531 (Eq | I31 | Struct | Array | None, Any) => true,
532 (I31 | Struct | Array | None, Eq) => true,
533 (NoExtern, Extern) => true,
534 (NoFunc, Func) => true,
535 (None, I31 | Array | Struct) => true,
536 (NoExn, Exn) => true,
537 _ => false,
538 }
539 }
540
541 (HT::Concrete(a), HT::Abstract { shared, ty })
542 | (HT::Exact(a), HT::Abstract { shared, ty }) => {
543 let a_ty = &self.ty(a).composite_type;
544 if a_ty.shared != shared {
545 return false;
546 }
547 match ty {
548 Eq | Any => matches!(a_ty.inner, CT::Array(_) | CT::Struct(_)),
549 Struct => matches!(a_ty.inner, CT::Struct(_)),
550 Array => matches!(a_ty.inner, CT::Array(_)),
551 Func => matches!(a_ty.inner, CT::Func(_)),
552 _ => false,
553 }
554 }
555
556 (HT::Abstract { shared, ty }, HT::Concrete(b))
557 | (HT::Abstract { shared, ty }, HT::Exact(b)) => {
558 let b_ty = &self.ty(b).composite_type;
559 if shared != b_ty.shared {
560 return false;
561 }
562 match ty {
563 None => matches!(b_ty.inner, CT::Array(_) | CT::Struct(_)),
564 NoFunc => matches!(b_ty.inner, CT::Func(_)),
565 _ => false,
566 }
567 }
568
569 (HT::Concrete(mut a), HT::Concrete(b)) | (HT::Exact(mut a), HT::Concrete(b)) => loop {
570 if a == b {
571 return true;
572 }
573 if let Some(supertype) = self.ty(a).supertype {
574 a = supertype;
575 } else {
576 return false;
577 }
578 },
579
580 (HT::Concrete(a), HT::Exact(b)) | (HT::Exact(a), HT::Exact(b)) => {
581 return a == b;
582 }
583 }
584 }
585
586 fn arbitrary_types(&mut self, u: &mut Unstructured) -> Result<()> {
587 assert!(self.config.min_types <= self.config.max_types);
588 while self.types.len() < self.config.min_types {
589 self.arbitrary_rec_group(u, AllowEmptyRecGroup::No)?;
590 }
591 while self.types.len() < self.config.max_types {
592 let keep_going = u.arbitrary().unwrap_or(false);
593 if !keep_going {
594 break;
595 }
596 self.arbitrary_rec_group(u, AllowEmptyRecGroup::Yes)?;
597 }
598 Ok(())
599 }
600
601 fn add_type(&mut self, ty: SubType) -> u32 {
602 let index = u32::try_from(self.types.len()).unwrap();
603
604 if let Some(supertype) = ty.supertype {
605 assert_eq!(self.is_shared_type(supertype), ty.composite_type.shared);
606 self.super_to_sub_types
607 .entry(supertype)
608 .or_default()
609 .push(index);
610 }
611
612 let list = match &ty.composite_type.inner {
613 CompositeInnerType::Array(_) => &mut self.array_types,
614 CompositeInnerType::Func(_) => &mut self.func_types,
615 CompositeInnerType::Struct(_) => &mut self.struct_types,
616 };
617 list.push(index);
618
619 const MAX_SUBTYPING_DEPTH: u32 = 60;
627 if !ty.is_final && ty.depth < MAX_SUBTYPING_DEPTH {
628 self.can_subtype.push(index);
629 }
630
631 self.types.push(ty);
632 index
633 }
634
635 fn arbitrary_rec_group(
636 &mut self,
637 u: &mut Unstructured,
638 kind: AllowEmptyRecGroup,
639 ) -> Result<()> {
640 let rec_group_start = self.types.len();
641
642 assert!(matches!(self.max_type_limit, MaxTypeLimit::ModuleTypes));
643
644 if self.config.gc_enabled {
645 if self.rec_groups.len() > 0 && u.ratio(1, u8::MAX)? {
647 return self.clone_rec_group(u, kind);
648 }
649
650 let max_rec_group_size = self.config.max_types - self.types.len();
652 let min_rec_group_size = match kind {
653 AllowEmptyRecGroup::Yes => 0,
654 AllowEmptyRecGroup::No => 1,
655 };
656 let rec_group_size = u.int_in_range(min_rec_group_size..=max_rec_group_size)?;
657 let type_ref_limit = u32::try_from(self.types.len() + rec_group_size).unwrap();
658 self.max_type_limit = MaxTypeLimit::Num(type_ref_limit);
659 for _ in 0..rec_group_size {
660 let ty = self.arbitrary_sub_type(u)?;
661 self.add_type(ty);
662 }
663 } else {
664 let type_ref_limit = u32::try_from(self.types.len()).unwrap();
665 self.max_type_limit = MaxTypeLimit::Num(type_ref_limit);
666 let ty = self.arbitrary_sub_type(u)?;
667 self.add_type(ty);
668 }
669
670 self.max_type_limit = MaxTypeLimit::ModuleTypes;
671
672 self.rec_groups.push(rec_group_start..self.types.len());
673 Ok(())
674 }
675
676 fn clone_rec_group(&mut self, u: &mut Unstructured, kind: AllowEmptyRecGroup) -> Result<()> {
677 let group = u.choose(&self.rec_groups)?.clone();
682 if group.is_empty() && kind == AllowEmptyRecGroup::No {
683 return Ok(());
684 }
685 if group.len() > self.config.max_types.saturating_sub(self.types.len()) {
686 return Ok(());
687 }
688
689 let new_rec_group_start = self.types.len();
697 for index in group {
698 let orig_ty_index = u32::try_from(index).unwrap();
699 let ty = self.ty(orig_ty_index).clone();
700 self.add_type(ty);
701 }
702 self.rec_groups.push(new_rec_group_start..self.types.len());
703 Ok(())
704 }
705
706 fn arbitrary_sub_type(&mut self, u: &mut Unstructured) -> Result<SubType> {
707 if !self.config.gc_enabled {
708 let shared = self.arbitrary_shared(u)?;
709 let func_type = self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?;
710 let composite_type = CompositeType {
711 inner: CompositeInnerType::Func(func_type),
712 shared,
713 descriptor: None,
714 describes: None,
715 };
716 return Ok(SubType {
717 is_final: true,
718 supertype: None,
719 composite_type,
720 depth: 1,
721 });
722 }
723
724 if !self.can_subtype.is_empty() && u.ratio(1, 32_u8)? {
725 self.arbitrary_sub_type_of_super_type(u)
726 } else {
727 Ok(SubType {
728 is_final: u.arbitrary()?,
729 supertype: None,
730 composite_type: self.arbitrary_composite_type(u)?,
731 depth: 1,
732 })
733 }
734 }
735
736 fn arbitrary_sub_type_of_super_type(&mut self, u: &mut Unstructured) -> Result<SubType> {
737 let supertype = *u.choose(&self.can_subtype)?;
738 let mut composite_type = self.types[usize::try_from(supertype).unwrap()]
739 .composite_type
740 .clone();
741 match &mut composite_type.inner {
742 CompositeInnerType::Array(a) => {
743 a.0 = self.arbitrary_matching_field_type(u, a.0)?;
744 }
745 CompositeInnerType::Func(f) => {
746 *f = self.arbitrary_matching_func_type(u, f)?;
747 }
748 CompositeInnerType::Struct(s) => {
749 *s = self.propagate_shared(composite_type.shared, |m| {
750 m.arbitrary_matching_struct_type(u, s)
751 })?;
752 }
753 }
754 Ok(SubType {
755 is_final: u.arbitrary()?,
756 supertype: Some(supertype),
757 composite_type,
758 depth: 1 + self.types[supertype as usize].depth,
759 })
760 }
761
762 fn arbitrary_matching_struct_type(
763 &mut self,
764 u: &mut Unstructured,
765 ty: &StructType,
766 ) -> Result<StructType> {
767 let len_extra_fields = u.int_in_range(0..=5)?;
768 let mut fields = Vec::with_capacity(ty.fields.len() + len_extra_fields);
769 for field in ty.fields.iter() {
770 fields.push(self.arbitrary_matching_field_type(u, *field)?);
771 }
772 for _ in 0..len_extra_fields {
773 fields.push(self.arbitrary_field_type(u)?);
774 }
775 Ok(StructType {
776 fields: fields.into_boxed_slice(),
777 })
778 }
779
780 fn arbitrary_matching_field_type(
781 &mut self,
782 u: &mut Unstructured,
783 ty: FieldType,
784 ) -> Result<FieldType> {
785 if ty.mutable {
786 Ok(ty)
787 } else {
788 Ok(FieldType {
789 element_type: self.arbitrary_matching_storage_type(u, ty.element_type)?,
790 mutable: false,
791 })
792 }
793 }
794
795 fn arbitrary_matching_storage_type(
796 &mut self,
797 u: &mut Unstructured,
798 ty: StorageType,
799 ) -> Result<StorageType> {
800 match ty {
801 StorageType::I8 => Ok(StorageType::I8),
802 StorageType::I16 => Ok(StorageType::I16),
803 StorageType::Val(ty) => Ok(StorageType::Val(self.arbitrary_matching_val_type(u, ty)?)),
804 }
805 }
806
807 fn arbitrary_matching_val_type(
808 &mut self,
809 u: &mut Unstructured,
810 ty: ValType,
811 ) -> Result<ValType> {
812 match ty {
813 ValType::I32 => Ok(ValType::I32),
814 ValType::I64 => Ok(ValType::I64),
815 ValType::F32 => Ok(ValType::F32),
816 ValType::F64 => Ok(ValType::F64),
817 ValType::V128 => Ok(ValType::V128),
818 ValType::Ref(ty) => Ok(ValType::Ref(self.arbitrary_matching_ref_type(u, ty)?)),
819 }
820 }
821
822 fn arbitrary_matching_ref_type(&self, u: &mut Unstructured, ty: RefType) -> Result<RefType> {
823 Ok(RefType {
824 nullable: ty.nullable,
825 heap_type: self.arbitrary_matching_heap_type(u, ty.heap_type)?,
826 })
827 }
828
829 fn arbitrary_matching_heap_type(&self, u: &mut Unstructured, ty: HeapType) -> Result<HeapType> {
830 use {AbstractHeapType as AHT, CompositeInnerType as CT, HeapType as HT};
831
832 if !self.config.gc_enabled {
833 return Ok(ty);
834 }
835
836 let mut choices = vec![ty];
837 match ty {
838 HT::Abstract { shared, ty } => {
839 use AbstractHeapType::*;
840 let add_abstract = |choices: &mut Vec<HT>, tys: &[AHT]| {
841 choices.extend(tys.iter().map(|&ty| HT::Abstract { shared, ty }));
842 };
843 let add_concrete = |choices: &mut Vec<HT>, tys: &[u32]| {
844 choices.extend(
845 tys.iter()
846 .filter(|&&idx| shared == self.is_shared_type(idx))
847 .copied()
848 .map(HT::Concrete),
849 );
850 };
851 match ty {
852 Any => {
853 add_abstract(&mut choices, &[Eq, Struct, Array, I31, None]);
854 add_concrete(&mut choices, &self.array_types);
855 add_concrete(&mut choices, &self.struct_types);
856 }
857 Eq => {
858 add_abstract(&mut choices, &[Struct, Array, I31, None]);
859 add_concrete(&mut choices, &self.array_types);
860 add_concrete(&mut choices, &self.struct_types);
861 }
862 Struct => {
863 add_abstract(&mut choices, &[Struct, None]);
864 add_concrete(&mut choices, &self.struct_types);
865 }
866 Array => {
867 add_abstract(&mut choices, &[Array, None]);
868 add_concrete(&mut choices, &self.array_types);
869 }
870 I31 => {
871 add_abstract(&mut choices, &[None]);
872 }
873 Func => {
874 add_abstract(&mut choices, &[NoFunc]);
875 add_concrete(&mut choices, &self.func_types);
876 }
877 Extern => {
878 add_abstract(&mut choices, &[NoExtern]);
879 }
880 Exn | NoExn | None | NoExtern | NoFunc | Cont | NoCont => {}
881 }
882 }
883 HT::Concrete(idx) => {
884 if let Some(subs) = self.super_to_sub_types.get(&idx) {
885 choices.extend(subs.iter().copied().map(HT::Concrete));
886 }
887 if self.config.custom_descriptors_enabled {
888 choices.push(HT::Exact(idx));
889 if let Some(subs) = self.super_to_sub_types.get(&idx) {
890 choices.extend(subs.iter().copied().map(HT::Concrete));
891 }
892 }
893 match self
894 .types
895 .get(usize::try_from(idx).unwrap())
896 .map(|ty| (ty.composite_type.shared, &ty.composite_type.inner))
897 {
898 Some((shared, CT::Array(_) | CT::Struct(_))) => choices.push(HT::Abstract {
899 shared,
900 ty: AbstractHeapType::None,
901 }),
902 Some((shared, CT::Func(_))) => choices.push(HT::Abstract {
903 shared,
904 ty: AbstractHeapType::NoFunc,
905 }),
906 None => {
907 }
913 }
914 }
915 HT::Exact(_) => (),
916 }
917 Ok(*u.choose(&choices)?)
918 }
919
920 fn arbitrary_matching_func_type(
921 &mut self,
922 u: &mut Unstructured,
923 ty: &FuncType,
924 ) -> Result<Rc<FuncType>> {
925 let mut params = Vec::with_capacity(ty.params.len());
929 for param in &ty.params {
930 params.push(self.arbitrary_super_type_of_val_type(u, *param)?);
931 }
932 let mut results = Vec::with_capacity(ty.results.len());
933 for result in &ty.results {
934 results.push(self.arbitrary_matching_val_type(u, *result)?);
935 }
936 Ok(Rc::new(FuncType { params, results }))
937 }
938
939 fn arbitrary_super_type_of_val_type(
940 &mut self,
941 u: &mut Unstructured,
942 ty: ValType,
943 ) -> Result<ValType> {
944 match ty {
945 ValType::I32 => Ok(ValType::I32),
946 ValType::I64 => Ok(ValType::I64),
947 ValType::F32 => Ok(ValType::F32),
948 ValType::F64 => Ok(ValType::F64),
949 ValType::V128 => Ok(ValType::V128),
950 ValType::Ref(ty) => Ok(ValType::Ref(self.arbitrary_super_type_of_ref_type(u, ty)?)),
951 }
952 }
953
954 fn arbitrary_super_type_of_ref_type(
955 &self,
956 u: &mut Unstructured,
957 ty: RefType,
958 ) -> Result<RefType> {
959 Ok(RefType {
960 nullable: true,
967 heap_type: self.arbitrary_super_type_of_heap_type(u, ty.heap_type)?,
968 })
969 }
970
971 fn arbitrary_super_type_of_heap_type(
972 &self,
973 u: &mut Unstructured,
974 ty: HeapType,
975 ) -> Result<HeapType> {
976 use {AbstractHeapType as AHT, CompositeInnerType as CT, HeapType as HT};
977
978 if !self.config.gc_enabled {
979 return Ok(ty);
980 }
981
982 let mut choices = vec![ty];
983 match ty {
984 HT::Abstract { shared, ty } => {
985 use AbstractHeapType::*;
986 let add_abstract = |choices: &mut Vec<HT>, tys: &[AHT]| {
987 choices.extend(tys.iter().map(|&ty| HT::Abstract { shared, ty }));
988 };
989 let add_concrete = |choices: &mut Vec<HT>, tys: &[u32]| {
990 choices.extend(
991 tys.iter()
992 .filter(|&&idx| shared == self.is_shared_type(idx))
993 .copied()
994 .map(HT::Concrete),
995 );
996 };
997 match ty {
998 None => {
999 add_abstract(&mut choices, &[Any, Eq, Struct, Array, I31]);
1000 add_concrete(&mut choices, &self.array_types);
1001 add_concrete(&mut choices, &self.struct_types);
1002 }
1003 NoExtern => {
1004 add_abstract(&mut choices, &[Extern]);
1005 }
1006 NoFunc => {
1007 add_abstract(&mut choices, &[Func]);
1008 add_concrete(&mut choices, &self.func_types);
1009 }
1010 NoExn => {
1011 add_abstract(&mut choices, &[Exn]);
1012 }
1013 Struct | Array | I31 => {
1014 add_abstract(&mut choices, &[Any, Eq]);
1015 }
1016 Eq => {
1017 add_abstract(&mut choices, &[Any]);
1018 }
1019 NoCont => {
1020 add_abstract(&mut choices, &[Cont]);
1021 }
1022 Exn | Any | Func | Extern | Cont => {}
1023 }
1024 }
1025 HT::Concrete(mut idx) => {
1026 if let Some(sub_ty) = &self.types.get(usize::try_from(idx).unwrap()) {
1027 use AbstractHeapType::*;
1028 let ht = |ty| HT::Abstract {
1029 shared: sub_ty.composite_type.shared,
1030 ty,
1031 };
1032 match &sub_ty.composite_type.inner {
1033 CT::Array(_) => {
1034 choices.extend([ht(Any), ht(Eq), ht(Array)]);
1035 }
1036 CT::Func(_) => {
1037 choices.push(ht(Func));
1038 }
1039 CT::Struct(_) => {
1040 choices.extend([ht(Any), ht(Eq), ht(Struct)]);
1041 }
1042 }
1043 } else {
1044 }
1051 while let Some(supertype) = self
1052 .types
1053 .get(usize::try_from(idx).unwrap())
1054 .and_then(|ty| ty.supertype)
1055 {
1056 choices.push(HT::Concrete(supertype));
1057 idx = supertype;
1058 }
1059 }
1060 HT::Exact(_) => (),
1061 }
1062 Ok(*u.choose(&choices)?)
1063 }
1064
1065 fn arbitrary_composite_type(&mut self, u: &mut Unstructured) -> Result<CompositeType> {
1066 use CompositeInnerType as CT;
1067 let shared = self.arbitrary_shared(u)?;
1068
1069 if !self.config.gc_enabled {
1070 return Ok(CompositeType {
1071 shared,
1072 inner: CT::Func(self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?),
1073 descriptor: None,
1074 describes: None,
1075 });
1076 }
1077
1078 match u.int_in_range(0..=2)? {
1079 0 => Ok(CompositeType {
1080 shared,
1081 inner: CT::Array(ArrayType(
1082 self.propagate_shared(shared, |m| m.arbitrary_field_type(u))?,
1083 )),
1084 descriptor: None,
1085 describes: None,
1086 }),
1087 1 => Ok(CompositeType {
1088 shared,
1089 inner: CT::Func(self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?),
1090 descriptor: None,
1091 describes: None,
1092 }),
1093 2 => Ok(CompositeType {
1094 shared,
1095 inner: CT::Struct(self.propagate_shared(shared, |m| m.arbitrary_struct_type(u))?),
1096 descriptor: None, describes: None,
1098 }),
1099 _ => unreachable!(),
1100 }
1101 }
1102
1103 fn arbitrary_struct_type(&mut self, u: &mut Unstructured) -> Result<StructType> {
1104 let len = u.int_in_range(0..=20)?;
1105 let mut fields = Vec::with_capacity(len);
1106 for _ in 0..len {
1107 fields.push(self.arbitrary_field_type(u)?);
1108 }
1109 Ok(StructType {
1110 fields: fields.into_boxed_slice(),
1111 })
1112 }
1113
1114 fn arbitrary_field_type(&mut self, u: &mut Unstructured) -> Result<FieldType> {
1115 Ok(FieldType {
1116 element_type: self.arbitrary_storage_type(u)?,
1117 mutable: u.arbitrary()?,
1118 })
1119 }
1120
1121 fn arbitrary_storage_type(&mut self, u: &mut Unstructured) -> Result<StorageType> {
1122 match u.int_in_range(0..=2)? {
1123 0 => Ok(StorageType::I8),
1124 1 => Ok(StorageType::I16),
1125 2 => Ok(StorageType::Val(self.arbitrary_valtype(u)?)),
1126 _ => unreachable!(),
1127 }
1128 }
1129
1130 fn arbitrary_ref_type(&self, u: &mut Unstructured) -> Result<RefType> {
1131 if !self.config.reference_types_enabled {
1132 return Ok(RefType::FUNCREF);
1133 }
1134 Ok(RefType {
1135 nullable: true,
1136 heap_type: self.arbitrary_heap_type(u)?,
1137 })
1138 }
1139
1140 fn arbitrary_heap_type(&self, u: &mut Unstructured) -> Result<HeapType> {
1141 assert!(self.config.reference_types_enabled);
1142
1143 let concrete_type_limit = match self.max_type_limit {
1144 MaxTypeLimit::Num(n) => n,
1145 MaxTypeLimit::ModuleTypes => u32::try_from(self.types.len()).unwrap(),
1146 };
1147
1148 if self.config.gc_enabled && concrete_type_limit > 0 && u.arbitrary()? {
1149 let idx = u.int_in_range(0..=concrete_type_limit - 1)?;
1150 if let Some(ty) = self.types.get(idx as usize) {
1156 if !(self.must_share && !ty.composite_type.shared) {
1159 return Ok(HeapType::Concrete(idx));
1160 }
1161 }
1162 }
1163
1164 use AbstractHeapType::*;
1165 let mut choices = vec![Func, Extern];
1166 if self.config.exceptions_enabled {
1167 choices.push(Exn);
1168 }
1169 if self.config.gc_enabled {
1170 choices.extend(
1171 [Any, None, NoExtern, NoFunc, Eq, Struct, Array, I31]
1172 .iter()
1173 .copied(),
1174 );
1175 }
1176
1177 Ok(HeapType::Abstract {
1178 shared: self.arbitrary_shared(u)?,
1179 ty: *u.choose(&choices)?,
1180 })
1181 }
1182
1183 fn arbitrary_func_type(&mut self, u: &mut Unstructured) -> Result<Rc<FuncType>> {
1184 let mut params = vec![];
1185 let mut results = vec![];
1186 let max_params = 20;
1187 arbitrary_loop(u, 0, max_params, |u| {
1188 params.push(self.arbitrary_valtype(u)?);
1189 Ok(true)
1190 })?;
1191 let max_results = if self.config.multi_value_enabled {
1192 max_params
1193 } else {
1194 1
1195 };
1196 arbitrary_loop(u, 0, max_results, |u| {
1197 results.push(self.arbitrary_valtype(u)?);
1198 Ok(true)
1199 })?;
1200 Ok(Rc::new(FuncType { params, results }))
1201 }
1202
1203 fn can_add_local_or_import_tag(&self) -> bool {
1204 self.config.exceptions_enabled
1205 && self.has_tag_func_types()
1206 && self.tags.len() < self.config.max_tags
1207 }
1208
1209 fn can_add_local_or_import_func(&self) -> bool {
1210 !self.func_types.is_empty() && self.funcs.len() < self.config.max_funcs
1211 }
1212
1213 fn can_add_local_or_import_table(&self) -> bool {
1214 self.tables.len() < self.config.max_tables
1215 }
1216
1217 fn can_add_local_or_import_global(&self) -> bool {
1218 self.globals.len() < self.config.max_globals
1219 }
1220
1221 fn can_add_local_or_import_memory(&self) -> bool {
1222 self.memories.len() < self.config.max_memories
1223 }
1224
1225 fn imports_exports_from_module_shape(&mut self, u: &mut Unstructured) -> Result<bool> {
1226 let example_module = if let Some(wasm) = self.config.module_shape.clone() {
1227 wasm
1228 } else {
1229 return Ok(false);
1230 };
1231
1232 #[cfg(feature = "wasmparser")]
1233 {
1234 self._imports_exports_from_module_shape(u, &example_module)?;
1235 Ok(true)
1236 }
1237 #[cfg(not(feature = "wasmparser"))]
1238 {
1239 let _ = (example_module, u);
1240 panic!("support for `module_shape` was disabled at compile time");
1241 }
1242 }
1243
1244 #[cfg(feature = "wasmparser")]
1245 fn _imports_exports_from_module_shape(
1246 &mut self,
1247 u: &mut Unstructured,
1248 example_module: &[u8],
1249 ) -> Result<()> {
1250 let mut available_funcs: Vec<u32> = Vec::new();
1254 let mut available_tags: Vec<wasmparser::TagType> = Vec::new();
1255 let mut available_tables: Vec<wasmparser::TableType> = Vec::new();
1256 let mut available_globals: Vec<wasmparser::GlobalType> = Vec::new();
1257 let mut available_memories: Vec<wasmparser::MemoryType> = Vec::new();
1258
1259 let mut required_types: Vec<SubType> = Vec::new();
1260 let mut required_recgrps: Vec<usize> = Vec::new();
1261 let mut required_imports: Vec<wasmparser::Import> = Vec::new();
1262 let mut required_exports: Vec<wasmparser::Export> = Vec::new();
1263 let mut validator = wasmparser::Validator::new();
1264 validator
1265 .validate_all(example_module)
1266 .expect("Failed to validate `module_shape` module");
1267 for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
1268 match payload.expect("could not parse the `module_shape` module") {
1269 wasmparser::Payload::TypeSection(type_reader) => {
1270 for recgrp in type_reader {
1271 let recgrp = recgrp.expect("could not read recursive group");
1272 required_recgrps.push(recgrp.types().len());
1273 for subtype in recgrp.into_types() {
1274 let mut subtype: SubType = subtype.try_into().unwrap();
1275 if let Some(supertype_idx) = subtype.supertype {
1276 subtype.depth = required_types[supertype_idx as usize].depth + 1;
1277 }
1278 required_types.push(subtype);
1279 }
1280 }
1281 }
1282 wasmparser::Payload::ImportSection(import_reader) => {
1283 for im in import_reader {
1284 let im = im.expect("could not read import");
1285 required_imports.push(im);
1286 }
1287 }
1288 wasmparser::Payload::ExportSection(export_reader) => {
1289 for ex in export_reader {
1290 let ex = ex.expect("could not read export");
1291 required_exports.push(ex);
1292 }
1293 }
1294 wasmparser::Payload::FunctionSection(function_reader) => {
1295 for func in function_reader {
1296 let func = func.expect("could not read function");
1297 available_funcs.push(func);
1298 }
1299 }
1300 wasmparser::Payload::TagSection(tag_reader) => {
1301 for tag in tag_reader {
1302 let tag = tag.expect("could not read tag");
1303 available_tags.push(tag);
1304 }
1305 }
1306 wasmparser::Payload::TableSection(table_reader) => {
1307 for table in table_reader {
1308 let table = table.expect("could not read table");
1309 available_tables.push(table.ty);
1310 }
1311 }
1312 wasmparser::Payload::MemorySection(memory_reader) => {
1313 for memory in memory_reader {
1314 let memory = memory.expect("could not read memory");
1315 available_memories.push(memory);
1316 }
1317 }
1318 wasmparser::Payload::GlobalSection(global_reader) => {
1319 for global in global_reader {
1320 let global = global.expect("could not read global");
1321 available_globals.push(global.ty);
1322 }
1323 }
1324 _ => {}
1325 }
1326 }
1327
1328 let mut recgrp_start_idx = self.types.len();
1331 for size in required_recgrps {
1332 self.rec_groups
1333 .push(recgrp_start_idx..recgrp_start_idx + size);
1334 recgrp_start_idx += size;
1335 }
1336 for ty in &required_types {
1337 self.add_type(ty.clone());
1338 }
1339
1340 let mut imported_funcs: Vec<u32> = Vec::new();
1344 let mut imported_tags: Vec<wasmparser::TagType> = Vec::new();
1345 let mut imported_tables: Vec<wasmparser::TableType> = Vec::new();
1346 let mut imported_globals: Vec<wasmparser::GlobalType> = Vec::new();
1347 let mut imported_memories: Vec<wasmparser::MemoryType> = Vec::new();
1348 let mut new_imports = Vec::with_capacity(required_imports.len());
1349 for import in required_imports {
1350 let entity_type = match &import.ty {
1351 wasmparser::TypeRef::Func(sig_idx) => {
1352 imported_funcs.push(*sig_idx);
1353 match required_types.get(*sig_idx as usize) {
1354 None => panic!("signature index refers to a type out of bounds"),
1355 Some(ty) => match &ty.composite_type.inner {
1356 CompositeInnerType::Func(func_type) => {
1357 let entity = EntityType::Func(*sig_idx, Rc::clone(func_type));
1358 self.funcs.push((*sig_idx, Rc::clone(func_type)));
1359 entity
1360 }
1361 _ => panic!("a function type is required for function import"),
1362 },
1363 }
1364 }
1365
1366 wasmparser::TypeRef::FuncExact(_) => panic!("Unexpected func_exact import"),
1367
1368 wasmparser::TypeRef::Tag(wasmparser::TagType {
1369 kind,
1370 func_type_idx,
1371 }) => {
1372 imported_tags.push(wasmparser::TagType {
1373 kind: *kind,
1374 func_type_idx: *func_type_idx,
1375 });
1376 match required_types.get(*func_type_idx as usize) {
1377 None => {
1378 panic!("function type index for tag refers to a type out of bounds")
1379 }
1380 Some(ty) => match &ty.composite_type.inner {
1381 CompositeInnerType::Func(func_type) => {
1382 let tag_type = TagType {
1383 func_type_idx: *func_type_idx,
1384 func_type: Rc::clone(func_type),
1385 };
1386 let entity = EntityType::Tag(tag_type.clone());
1387 self.tags.push(tag_type);
1388 entity
1389 }
1390 _ => panic!("a function type is required for tag import"),
1391 },
1392 }
1393 }
1394
1395 wasmparser::TypeRef::Table(table_ty) => {
1396 imported_tables.push(*table_ty);
1397 let table_ty = TableType::try_from(*table_ty).unwrap();
1398 let entity = EntityType::Table(table_ty);
1399 self.tables.push(table_ty);
1400 entity
1401 }
1402
1403 wasmparser::TypeRef::Memory(memory_ty) => {
1404 imported_memories.push(*memory_ty);
1405 let memory_ty = MemoryType::from(*memory_ty);
1406 let entity = EntityType::Memory(memory_ty);
1407 self.memories.push(memory_ty);
1408 entity
1409 }
1410
1411 wasmparser::TypeRef::Global(global_ty) => {
1412 imported_globals.push(*global_ty);
1413 let global_ty = GlobalType::try_from(*global_ty).unwrap();
1414 let entity = EntityType::Global(global_ty);
1415 self.globals.push(global_ty);
1416 entity
1417 }
1418 };
1419 new_imports.push(Import {
1420 module: import.module.to_string(),
1421 field: import.name.to_string(),
1422 entity_type,
1423 });
1424 self.num_imports += 1;
1425 }
1426 self.imports.extend(new_imports);
1427 available_tags.splice(0..0, imported_tags);
1428 available_funcs.splice(0..0, imported_funcs);
1429 available_tables.splice(0..0, imported_tables);
1430 available_globals.splice(0..0, imported_globals);
1431 available_memories.splice(0..0, imported_memories);
1432
1433 for export in required_exports {
1435 let index = match export.kind {
1436 wasmparser::ExternalKind::Func | wasmparser::ExternalKind::FuncExact => {
1437 match available_funcs.get(export.index as usize) {
1438 None => panic!("function index out of bounds"),
1439 Some(sig_idx) => match required_types.get(*sig_idx as usize) {
1440 None => panic!("signature index refers to a type out of bounds"),
1441 Some(ty) => match &ty.composite_type.inner {
1442 CompositeInnerType::Func(func_type) => {
1443 let func_index = self.funcs.len() as u32;
1444 self.funcs.push((*sig_idx, Rc::clone(func_type)));
1445 self.num_defined_funcs += 1;
1446 func_index
1447 }
1448 _ => panic!("a function type is required for function export"),
1449 },
1450 },
1451 }
1452 }
1453
1454 wasmparser::ExternalKind::Tag => match available_tags.get(export.index as usize) {
1455 None => panic!("tag index out of bounds"),
1456 Some(wasmparser::TagType { func_type_idx, .. }) => {
1457 match required_types.get(*func_type_idx as usize) {
1458 None => {
1459 panic!("function type index for tag refers to a type out of bounds")
1460 }
1461 Some(ty) => match &ty.composite_type.inner {
1462 CompositeInnerType::Func(func_type) => {
1463 let tag_index = self.tags.len() as u32;
1464 self.tags.push(TagType {
1465 func_type_idx: *func_type_idx,
1466 func_type: Rc::clone(func_type),
1467 });
1468 self.num_defined_tags += 1;
1469 tag_index
1470 }
1471 _ => panic!("a function type is required for tag export"),
1472 },
1473 }
1474 }
1475 },
1476
1477 wasmparser::ExternalKind::Table => {
1478 match available_tables.get(export.index as usize) {
1479 None => panic!("table index out of bounds"),
1480 Some(ty) => {
1481 self.add_arbitrary_table_of_type((*ty).try_into().unwrap(), u)?
1482 }
1483 }
1484 }
1485
1486 wasmparser::ExternalKind::Memory => {
1487 match available_memories.get(export.index as usize) {
1488 None => panic!("memory index out of bounds"),
1489 Some(ty) => self.add_arbitrary_memory_of_type((*ty).into())?,
1490 }
1491 }
1492
1493 wasmparser::ExternalKind::Global => {
1494 match available_globals.get(export.index as usize) {
1495 None => panic!("global index out of bounds"),
1496 Some(ty) => {
1497 self.add_arbitrary_global_of_type((*ty).try_into().unwrap(), u)?
1498 }
1499 }
1500 }
1501 };
1502 self.exports
1503 .push((export.name.to_string(), export.kind.into(), index));
1504 self.export_names.insert(export.name.to_string());
1505 }
1506
1507 Ok(())
1508 }
1509
1510 fn arbitrary_imports(&mut self, u: &mut Unstructured) -> Result<()> {
1511 if self.config.max_type_size < self.type_size {
1512 return Ok(());
1513 }
1514
1515 let mut import_strings = HashSet::new();
1516 let mut choices: Vec<fn(&mut Unstructured, &mut Module) -> Result<EntityType>> =
1517 Vec::with_capacity(5);
1518 let min = self.config.min_imports.saturating_sub(self.num_imports);
1519 let max = self.config.max_imports.saturating_sub(self.num_imports);
1520 arbitrary_loop(u, min, max, |u| {
1521 choices.clear();
1522 if self.can_add_local_or_import_tag() {
1523 choices.push(|u, m| {
1524 let ty = m.arbitrary_tag_type(u)?;
1525 Ok(EntityType::Tag(ty))
1526 });
1527 }
1528 if self.can_add_local_or_import_func() {
1529 choices.push(|u, m| {
1530 let idx = *u.choose(&m.func_types)?;
1531 let ty = m.func_type(idx).clone();
1532 Ok(EntityType::Func(idx, ty))
1533 });
1534 }
1535 if self.can_add_local_or_import_global() {
1536 choices.push(|u, m| {
1537 let ty = m.arbitrary_global_type(u)?;
1538 Ok(EntityType::Global(ty))
1539 });
1540 }
1541 if self.can_add_local_or_import_memory() {
1542 choices.push(|u, m| {
1543 let ty = arbitrary_memtype(u, m.config())?;
1544 Ok(EntityType::Memory(ty))
1545 });
1546 }
1547 if self.can_add_local_or_import_table() {
1548 choices.push(|u, m| {
1549 let ty = arbitrary_table_type(u, m.config(), Some(m))?;
1550 Ok(EntityType::Table(ty))
1551 });
1552 }
1553
1554 if choices.is_empty() {
1555 return Ok(false);
1560 }
1561
1562 let f = u.choose(&choices)?;
1565 let entity_type = f(u, self)?;
1566 let budget = self.config.max_type_size - self.type_size;
1567 if entity_type.size() + 1 > budget {
1568 return Ok(false);
1569 }
1570 self.type_size += entity_type.size() + 1;
1571
1572 let mut import_pair = unique_import_strings(1_000, u)?;
1574 if self.duplicate_imports_behavior == DuplicateImportsBehavior::Disallowed {
1575 while import_strings.contains(&import_pair) {
1576 use std::fmt::Write;
1577 write!(&mut import_pair.1, "{}", import_strings.len()).unwrap();
1578 }
1579 import_strings.insert(import_pair.clone());
1580 }
1581 let (module, field) = import_pair;
1582
1583 match &entity_type {
1586 EntityType::Tag(ty) => self.tags.push(ty.clone()),
1587 EntityType::Func(idx, ty) => self.funcs.push((*idx, ty.clone())),
1588 EntityType::Global(ty) => self.globals.push(*ty),
1589 EntityType::Table(ty) => self.tables.push(*ty),
1590 EntityType::Memory(ty) => self.memories.push(*ty),
1591 }
1592
1593 self.num_imports += 1;
1594 self.imports.push(Import {
1595 module,
1596 field,
1597 entity_type,
1598 });
1599 Ok(true)
1600 })?;
1601
1602 Ok(())
1603 }
1604
1605 fn arbitrary_imports_from_available(&mut self, u: &mut Unstructured) -> Result<bool> {
1611 let example_module = if let Some(wasm) = self.config.available_imports.take() {
1612 wasm
1613 } else {
1614 return Ok(false);
1615 };
1616
1617 #[cfg(feature = "wasmparser")]
1618 {
1619 self._arbitrary_imports_from_available(u, &example_module)?;
1620 Ok(true)
1621 }
1622 #[cfg(not(feature = "wasmparser"))]
1623 {
1624 let _ = (example_module, u);
1625 panic!("support for `available_imports` was disabled at compile time");
1626 }
1627 }
1628
1629 #[cfg(feature = "wasmparser")]
1630 fn _arbitrary_imports_from_available(
1631 &mut self,
1632 u: &mut Unstructured,
1633 example_module: &[u8],
1634 ) -> Result<()> {
1635 let mut new_recgrps = Vec::<usize>::new();
1642 let mut available_types = Vec::<SubType>::new();
1643 let mut available_imports = Vec::<wasmparser::Import>::new();
1644 let mut validator = wasmparser::Validator::new();
1645 validator
1646 .validate_all(example_module)
1647 .expect("Failed to validate `module_shape` module");
1648 for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
1649 match payload.expect("could not parse the available import payload") {
1650 wasmparser::Payload::TypeSection(type_reader) => {
1651 for recgrp in type_reader {
1652 let recgrp = recgrp.expect("could not read recursive group");
1653 new_recgrps.push(recgrp.types().len());
1654 for subtype in recgrp.into_types() {
1655 let mut subtype: SubType = subtype.try_into().unwrap();
1656 if let Some(supertype_idx) = subtype.supertype {
1657 subtype.depth = available_types[supertype_idx as usize].depth + 1;
1658 }
1659 available_types.push(subtype);
1660 }
1661 }
1662 }
1663 wasmparser::Payload::ImportSection(import_reader) => {
1664 for im in import_reader {
1665 let im = im.expect("could not read import");
1666 let use_import = u.arbitrary().unwrap_or(false);
1669 if !use_import {
1670 continue;
1671 }
1672 available_imports.push(im);
1673 }
1674 }
1675 _ => {}
1676 }
1677 }
1678
1679 let mut new_imports = Vec::with_capacity(available_imports.len());
1684 for import in available_imports {
1685 let type_size_budget = self.config.max_type_size - self.type_size;
1686 let entity_type = match &import.ty {
1687 wasmparser::TypeRef::Func(sig_idx) => {
1688 if self.funcs.len() >= self.config.max_funcs {
1689 continue;
1690 } else {
1691 match available_types.get(*sig_idx as usize) {
1692 None => panic!("signature index refers to a type out of bounds"),
1693 Some(ty) => match &ty.composite_type.inner {
1694 CompositeInnerType::Func(func_type) => {
1695 let entity = EntityType::Func(*sig_idx, Rc::clone(func_type));
1696 if type_size_budget < entity.size() {
1697 continue;
1698 }
1699 self.funcs.push((*sig_idx, Rc::clone(func_type)));
1700 entity
1701 }
1702 _ => panic!("a function type is required for function import"),
1703 },
1704 }
1705 }
1706 }
1707
1708 wasmparser::TypeRef::FuncExact(_) => panic!("Unexpected func_exact import"),
1709
1710 wasmparser::TypeRef::Tag(wasmparser::TagType { func_type_idx, .. }) => {
1711 let can_add_tag = self.tags.len() < self.config.max_tags;
1712 if !self.config.exceptions_enabled || !can_add_tag {
1713 continue;
1714 } else {
1715 match available_types.get(*func_type_idx as usize) {
1716 None => {
1717 panic!("function type index for tag refers to a type out of bounds")
1718 }
1719 Some(ty) => match &ty.composite_type.inner {
1720 CompositeInnerType::Func(func_type) => {
1721 let tag_type = TagType {
1722 func_type_idx: *func_type_idx,
1723 func_type: Rc::clone(func_type),
1724 };
1725 let entity = EntityType::Tag(tag_type.clone());
1726 if type_size_budget < entity.size() {
1727 continue;
1728 }
1729 self.tags.push(tag_type);
1730 entity
1731 }
1732 _ => panic!("a function type is required for tag import"),
1733 },
1734 }
1735 }
1736 }
1737
1738 wasmparser::TypeRef::Table(table_ty) => {
1739 let table_ty = TableType::try_from(*table_ty).unwrap();
1740 let entity = EntityType::Table(table_ty);
1741 let type_size = entity.size();
1742 if type_size_budget < type_size || !self.can_add_local_or_import_table() {
1743 continue;
1744 }
1745 self.type_size += type_size;
1746 self.tables.push(table_ty);
1747 entity
1748 }
1749
1750 wasmparser::TypeRef::Memory(memory_ty) => {
1751 let memory_ty = MemoryType::from(*memory_ty);
1752 let entity = EntityType::Memory(memory_ty);
1753 let type_size = entity.size();
1754 if type_size_budget < type_size || !self.can_add_local_or_import_memory() {
1755 continue;
1756 }
1757 self.type_size += type_size;
1758 self.memories.push(memory_ty);
1759 entity
1760 }
1761
1762 wasmparser::TypeRef::Global(global_ty) => {
1763 let global_ty = GlobalType::try_from(*global_ty).unwrap();
1764 let entity = EntityType::Global(global_ty);
1765 let type_size = entity.size();
1766 if type_size_budget < type_size || !self.can_add_local_or_import_global() {
1767 continue;
1768 }
1769 self.type_size += type_size;
1770 self.globals.push(global_ty);
1771 entity
1772 }
1773 };
1774 new_imports.push(Import {
1775 module: import.module.to_string(),
1776 field: import.name.to_string(),
1777 entity_type,
1778 });
1779 self.num_imports += 1;
1780 }
1781
1782 let mut recgrp_start_idx = self.types.len();
1784 for size in new_recgrps {
1785 self.rec_groups
1786 .push(recgrp_start_idx..recgrp_start_idx + size);
1787 recgrp_start_idx += size;
1788 }
1789 for ty in available_types {
1790 self.add_type(ty);
1791 }
1792 self.imports.extend(new_imports);
1793
1794 Ok(())
1795 }
1796
1797 fn type_of(&self, kind: ExportKind, index: u32) -> EntityType {
1798 match kind {
1799 ExportKind::Global => EntityType::Global(self.globals[index as usize]),
1800 ExportKind::Memory => EntityType::Memory(self.memories[index as usize]),
1801 ExportKind::Table => EntityType::Table(self.tables[index as usize]),
1802 ExportKind::Func => {
1803 let (_idx, ty) = &self.funcs[index as usize];
1804 EntityType::Func(u32::max_value(), ty.clone())
1805 }
1806 ExportKind::Tag => EntityType::Tag(self.tags[index as usize].clone()),
1807 }
1808 }
1809
1810 fn ty(&self, idx: u32) -> &SubType {
1811 &self.types[idx as usize]
1812 }
1813
1814 fn func_types(&self) -> impl Iterator<Item = (u32, &FuncType)> + '_ {
1815 self.func_types
1816 .iter()
1817 .copied()
1818 .map(move |type_i| (type_i, &**self.func_type(type_i)))
1819 }
1820
1821 fn func_type(&self, idx: u32) -> &Rc<FuncType> {
1822 match &self.ty(idx).composite_type.inner {
1823 CompositeInnerType::Func(f) => f,
1824 _ => panic!("types[{idx}] is not a func type"),
1825 }
1826 }
1827
1828 fn tags(&self) -> impl Iterator<Item = (u32, &TagType)> + '_ {
1829 self.tags
1830 .iter()
1831 .enumerate()
1832 .map(move |(i, ty)| (i as u32, ty))
1833 }
1834
1835 fn funcs(&self) -> impl Iterator<Item = (u32, &Rc<FuncType>)> + '_ {
1836 self.funcs
1837 .iter()
1838 .enumerate()
1839 .map(move |(i, (_, ty))| (i as u32, ty))
1840 }
1841
1842 fn has_tag_func_types(&self) -> bool {
1843 self.tag_func_types().next().is_some()
1844 }
1845
1846 fn tag_func_types(&self) -> impl Iterator<Item = u32> + '_ {
1847 self.func_types
1848 .iter()
1849 .copied()
1850 .filter(move |i| self.func_type(*i).results.is_empty())
1851 }
1852
1853 fn arbitrary_valtype(&self, u: &mut Unstructured) -> Result<ValType> {
1854 #[derive(PartialEq, Eq, PartialOrd, Ord)]
1855 enum ValTypeClass {
1856 I32,
1857 I64,
1858 F32,
1859 F64,
1860 V128,
1861 Ref,
1862 }
1863
1864 let mut val_classes: Vec<_> = self
1865 .valtypes
1866 .iter()
1867 .map(|vt| match vt {
1868 ValType::I32 => ValTypeClass::I32,
1869 ValType::I64 => ValTypeClass::I64,
1870 ValType::F32 => ValTypeClass::F32,
1871 ValType::F64 => ValTypeClass::F64,
1872 ValType::V128 => ValTypeClass::V128,
1873 ValType::Ref(_) => ValTypeClass::Ref,
1874 })
1875 .collect();
1876 val_classes.sort_unstable();
1877 val_classes.dedup();
1878
1879 match u.choose(&val_classes)? {
1880 ValTypeClass::I32 => Ok(ValType::I32),
1881 ValTypeClass::I64 => Ok(ValType::I64),
1882 ValTypeClass::F32 => Ok(ValType::F32),
1883 ValTypeClass::F64 => Ok(ValType::F64),
1884 ValTypeClass::V128 => Ok(ValType::V128),
1885 ValTypeClass::Ref => Ok(ValType::Ref(self.arbitrary_ref_type(u)?)),
1886 }
1887 }
1888
1889 fn arbitrary_global_type(&self, u: &mut Unstructured) -> Result<GlobalType> {
1890 let val_type = self.arbitrary_valtype(u)?;
1891 let shared = match val_type {
1893 ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {
1894 self.arbitrary_shared(u)?
1895 }
1896 ValType::Ref(r) => self.is_shared_ref_type(r),
1897 };
1898 Ok(GlobalType {
1899 val_type,
1900 mutable: u.arbitrary()?,
1901 shared,
1902 })
1903 }
1904
1905 fn arbitrary_tag_type(&self, u: &mut Unstructured) -> Result<TagType> {
1906 let candidate_func_types: Vec<_> = self.tag_func_types().collect();
1907 arbitrary_tag_type(u, &candidate_func_types, |ty_idx| {
1908 self.func_type(ty_idx).clone()
1909 })
1910 }
1911
1912 fn arbitrary_tags(&mut self, u: &mut Unstructured) -> Result<()> {
1913 if !self.config.exceptions_enabled || !self.has_tag_func_types() {
1914 return Ok(());
1915 }
1916
1917 arbitrary_loop(u, self.config.min_tags, self.config.max_tags, |u| {
1918 if !self.can_add_local_or_import_tag() {
1919 return Ok(false);
1920 }
1921 self.tags.push(self.arbitrary_tag_type(u)?);
1922 self.num_defined_tags += 1;
1923 Ok(true)
1924 })
1925 }
1926
1927 fn arbitrary_funcs(&mut self, u: &mut Unstructured) -> Result<()> {
1928 if self.func_types.is_empty() {
1929 return Ok(());
1930 }
1931
1932 let unshared_func_types: Vec<_> = self
1937 .func_types
1938 .iter()
1939 .copied()
1940 .filter(|&i| !self.is_shared_type(i))
1941 .collect();
1942 if unshared_func_types.is_empty() {
1943 return Ok(());
1944 }
1945
1946 arbitrary_loop(u, self.config.min_funcs, self.config.max_funcs, |u| {
1947 if !self.can_add_local_or_import_func() {
1948 return Ok(false);
1949 }
1950 let max = unshared_func_types.len() - 1;
1951 let ty = unshared_func_types[u.int_in_range(0..=max)?];
1952 self.funcs.push((ty, self.func_type(ty).clone()));
1953 self.num_defined_funcs += 1;
1954 Ok(true)
1955 })
1956 }
1957
1958 fn arbitrary_tables(&mut self, u: &mut Unstructured) -> Result<()> {
1959 arbitrary_loop(
1960 u,
1961 self.config.min_tables as usize,
1962 self.config.max_tables,
1963 |u| {
1964 if !self.can_add_local_or_import_table() {
1965 return Ok(false);
1966 }
1967 let ty = arbitrary_table_type(u, self.config(), Some(self))?;
1968 self.add_arbitrary_table_of_type(ty, u)?;
1969 Ok(true)
1970 },
1971 )
1972 }
1973
1974 fn arbitrary_table_init(
1980 &mut self,
1981 u: &mut Unstructured,
1982 ty: RefType,
1983 ) -> Result<Option<ConstExpr>> {
1984 if !self.config.gc_enabled {
1985 assert!(ty.nullable);
1986 return Ok(None);
1987 }
1988 if ty.nullable && u.arbitrary()? {
1991 return Ok(None);
1992 }
1993 let expr = self.arbitrary_const_expr(ValType::Ref(ty), u, false)?;
1996 Ok(Some(expr))
1997 }
1998
1999 fn arbitrary_memories(&mut self, u: &mut Unstructured) -> Result<()> {
2000 arbitrary_loop(
2001 u,
2002 self.config.min_memories as usize,
2003 self.config.max_memories,
2004 |u| {
2005 if !self.can_add_local_or_import_memory() {
2006 return Ok(false);
2007 }
2008 let ty = arbitrary_memtype(u, self.config())?;
2009 self.add_arbitrary_memory_of_type(ty)?;
2010 Ok(true)
2011 },
2012 )
2013 }
2014
2015 fn add_arbitrary_global_of_type(
2017 &mut self,
2018 ty: GlobalType,
2019 u: &mut Unstructured,
2020 ) -> Result<u32> {
2021 let expr = self.arbitrary_const_expr(ty.val_type, u, true)?;
2022 let global_idx = self.globals.len() as u32;
2023 self.globals.push(ty);
2024 self.defined_globals.push((global_idx, expr));
2025 Ok(global_idx)
2026 }
2027
2028 fn add_arbitrary_memory_of_type(&mut self, ty: MemoryType) -> Result<u32> {
2030 let memory_idx = self.memories.len() as u32;
2031 self.num_defined_memories += 1;
2032 self.memories.push(ty);
2033 Ok(memory_idx)
2034 }
2035
2036 fn add_arbitrary_table_of_type(&mut self, ty: TableType, u: &mut Unstructured) -> Result<u32> {
2038 let expr = self.arbitrary_table_init(u, ty.element_type)?;
2039 let table_idx = self.tables.len() as u32;
2040 self.tables.push(ty);
2041 self.defined_tables.push(expr);
2042 Ok(table_idx)
2043 }
2044
2045 fn arbitrary_const_expr(
2047 &mut self,
2048 ty: ValType,
2049 u: &mut Unstructured,
2050 allow_defined_globals: bool,
2051 ) -> Result<ConstExpr> {
2052 let mut choices = mem::take(&mut self.const_expr_choices);
2053 choices.clear();
2054
2055 for i in self.globals_for_const_expr(ty, allow_defined_globals) {
2059 choices.push(Box::new(move |_, _| Ok(ConstExpr::global_get(i))));
2060 }
2061
2062 let ty = self.arbitrary_matching_val_type(u, ty)?;
2066 match ty {
2067 ValType::I32 => {
2068 choices.push(Box::new(|u, _| Ok(ConstExpr::i32_const(u.arbitrary()?))));
2069 if self.config.extended_const_enabled {
2070 choices.push(Box::new(arbitrary_extended_const));
2071 }
2072 }
2073 ValType::I64 => {
2074 choices.push(Box::new(|u, _| Ok(ConstExpr::i64_const(u.arbitrary()?))));
2075 if self.config.extended_const_enabled {
2076 choices.push(Box::new(arbitrary_extended_const));
2077 }
2078 }
2079 ValType::F32 => choices.push(Box::new(|u, _| {
2080 Ok(ConstExpr::f32_const(u.arbitrary::<f32>()?.into()))
2081 })),
2082 ValType::F64 => choices.push(Box::new(|u, _| {
2083 Ok(ConstExpr::f64_const(u.arbitrary::<f64>()?.into()))
2084 })),
2085 ValType::V128 => {
2086 choices.push(Box::new(|u, _| Ok(ConstExpr::v128_const(u.arbitrary()?))))
2087 }
2088
2089 ValType::Ref(ty) => {
2090 if ty.nullable {
2091 choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_null(ty.heap_type))));
2092 }
2093
2094 match ty.heap_type {
2095 HeapType::Abstract {
2096 ty: AbstractHeapType::Func,
2097 shared,
2098 } => {
2099 let num_funcs = self
2100 .funcs
2101 .iter()
2102 .filter(|(t, _)| shared == self.is_shared_type(*t))
2103 .count();
2104 if num_funcs > 0 {
2105 let pick = u.int_in_range(0..=num_funcs - 1)?;
2106 let (i, _) = self
2107 .funcs
2108 .iter()
2109 .map(|(t, _)| *t)
2110 .enumerate()
2111 .filter(|(_, t)| shared == self.is_shared_type(*t))
2112 .nth(pick)
2113 .unwrap();
2114 choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_func(i as u32))));
2115 }
2116 }
2117
2118 HeapType::Concrete(ty) => {
2119 for (i, fty) in self.funcs.iter().map(|(t, _)| *t).enumerate() {
2120 if ty != fty {
2121 continue;
2122 }
2123 choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_func(i as u32))));
2124 }
2125 }
2126
2127 _ => {}
2130 }
2131 }
2132 }
2133
2134 let f = u.choose(&choices)?;
2135 let ret = f(u, ty);
2136 self.const_expr_choices = choices;
2137 return ret;
2138
2139 fn arbitrary_extended_const(u: &mut Unstructured<'_>, ty: ValType) -> Result<ConstExpr> {
2147 use wasm_encoder::Instruction::*;
2148
2149 assert!(ty == ValType::I32 || ty == ValType::I64);
2152 let add = if ty == ValType::I32 { I32Add } else { I64Add };
2153 let sub = if ty == ValType::I32 { I32Sub } else { I64Sub };
2154 let mul = if ty == ValType::I32 { I32Mul } else { I64Mul };
2155 let const_: fn(&mut Unstructured<'_>) -> Result<wasm_encoder::Instruction<'static>> =
2156 if ty == ValType::I32 {
2157 |u| u.arbitrary().map(I32Const)
2158 } else {
2159 |u| u.arbitrary().map(I64Const)
2160 };
2161
2162 let mut instrs = Vec::new();
2167 let mut needed = 1;
2168 while needed > 0 {
2169 let choice = if u.is_empty() || instrs.len() > 10 {
2173 0
2174 } else {
2175 u.int_in_range(0..=3)?
2176 };
2177 match choice {
2178 0 => {
2179 instrs.push(const_(u)?);
2180 needed -= 1;
2181 }
2182 1 => {
2183 instrs.push(add.clone());
2184 needed += 1;
2185 }
2186 2 => {
2187 instrs.push(sub.clone());
2188 needed += 1;
2189 }
2190 3 => {
2191 instrs.push(mul.clone());
2192 needed += 1;
2193 }
2194 _ => unreachable!(),
2195 }
2196 }
2197 Ok(ConstExpr::extended(instrs.into_iter().rev()))
2198 }
2199 }
2200
2201 fn arbitrary_globals(&mut self, u: &mut Unstructured) -> Result<()> {
2202 arbitrary_loop(u, self.config.min_globals, self.config.max_globals, |u| {
2203 if !self.can_add_local_or_import_global() {
2204 return Ok(false);
2205 }
2206
2207 let ty = self.arbitrary_global_type(u)?;
2208 self.add_arbitrary_global_of_type(ty, u)?;
2209
2210 Ok(true)
2211 })
2212 }
2213
2214 fn required_exports(&mut self, u: &mut Unstructured) -> Result<bool> {
2215 let example_module = if let Some(wasm) = self.config.exports.clone() {
2216 wasm
2217 } else {
2218 return Ok(false);
2219 };
2220
2221 #[cfg(feature = "wasmparser")]
2222 {
2223 self._required_exports(u, &example_module)?;
2224 Ok(true)
2225 }
2226 #[cfg(not(feature = "wasmparser"))]
2227 {
2228 let _ = (example_module, u);
2229 panic!("support for `exports` was disabled at compile time");
2230 }
2231 }
2232
2233 #[cfg(feature = "wasmparser")]
2234 fn _required_exports(&mut self, u: &mut Unstructured, example_module: &[u8]) -> Result<()> {
2235 let mut required_exports: Vec<wasmparser::Export> = vec![];
2236 let mut validator = wasmparser::Validator::new();
2237 let exports_types = validator
2238 .validate_all(&example_module)
2239 .expect("Failed to validate `exports` Wasm");
2240 for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
2241 match payload.expect("Failed to read `exports` Wasm") {
2242 wasmparser::Payload::ExportSection(export_reader) => {
2243 required_exports = export_reader
2244 .into_iter()
2245 .collect::<Result<_, _>>()
2246 .expect("Failed to read `exports` export section");
2247 }
2248 _ => {}
2249 }
2250 }
2251
2252 let exports_types = exports_types.as_ref();
2254 let check_and_get_func_type =
2255 |id: wasmparser::types::CoreTypeId| -> (Rc<FuncType>, SubType) {
2256 let subtype = exports_types.get(id).unwrap_or_else(|| {
2257 panic!("Unable to get subtype for {id:?} in `exports` Wasm")
2258 });
2259 match &subtype.composite_type.inner {
2260 wasmparser::CompositeInnerType::Func(func_type) => {
2261 assert!(
2262 subtype.is_final,
2263 "Subtype {subtype:?} from `exports` Wasm is not final"
2264 );
2265 assert!(
2266 subtype.supertype_idx.is_none(),
2267 "Subtype {subtype:?} from `exports` Wasm has non-empty supertype"
2268 );
2269 let func_type = Rc::new(FuncType {
2270 params: func_type
2271 .params()
2272 .iter()
2273 .copied()
2274 .map(|t| t.try_into().unwrap())
2275 .collect(),
2276 results: func_type
2277 .results()
2278 .iter()
2279 .copied()
2280 .map(|t| t.try_into().unwrap())
2281 .collect(),
2282 });
2283 let subtype = SubType {
2284 is_final: true,
2285 supertype: None,
2286 depth: 1,
2287 composite_type: CompositeType::new_func(
2288 Rc::clone(&func_type),
2289 subtype.composite_type.shared,
2290 ),
2291 };
2292 (func_type, subtype)
2293 }
2294 _ => panic!(
2295 "Unable to handle type {:?} from `exports` Wasm",
2296 subtype.composite_type
2297 ),
2298 }
2299 };
2300 for export in required_exports {
2301 let new_index = match exports_types
2302 .entity_type_from_export(&export)
2303 .unwrap_or_else(|| {
2304 panic!("Unable to get type from export {export:?} in `exports` Wasm",)
2305 }) {
2306 wasmparser::types::EntityType::Func(id) => {
2308 let (func_type, subtype) = check_and_get_func_type(id);
2309 self.rec_groups.push(self.types.len()..self.types.len() + 1);
2310 let type_index = self.add_type(subtype);
2311 let func_index = self.funcs.len() as u32;
2312 self.funcs.push((type_index, func_type));
2313 self.num_defined_funcs += 1;
2314 func_index
2315 }
2316 wasmparser::types::EntityType::Global(global_type) => {
2318 self.add_arbitrary_global_of_type(global_type.try_into().unwrap(), u)?
2319 }
2320 wasmparser::types::EntityType::Memory(memory_type) => {
2322 self.add_arbitrary_memory_of_type(memory_type.into())?
2323 }
2324 wasmparser::types::EntityType::Table(table_type) => {
2326 self.add_arbitrary_table_of_type(table_type.try_into().unwrap(), u)?
2327 }
2328 wasmparser::types::EntityType::Tag(id) => {
2330 let (func_type, subtype) = check_and_get_func_type(id);
2331 self.rec_groups.push(self.types.len()..self.types.len() + 1);
2332 let type_index = self.add_type(subtype);
2333 let tag_index = self.tags.len() as u32;
2334 self.tags.push(TagType {
2335 func_type_idx: type_index,
2336 func_type: func_type,
2337 });
2338 self.num_defined_tags += 1;
2339 tag_index
2340 }
2341 wasmparser::types::EntityType::FuncExact(_) => {
2342 panic!("Unexpected func_export: {export:?}",);
2343 }
2344 };
2345 self.exports
2346 .push((export.name.to_string(), export.kind.into(), new_index));
2347 self.export_names.insert(export.name.to_string());
2348 }
2349
2350 Ok(())
2351 }
2352
2353 fn arbitrary_exports(&mut self, u: &mut Unstructured) -> Result<()> {
2354 if self.config.max_type_size < self.type_size && !self.config.export_everything {
2355 return Ok(());
2356 }
2357
2358 let mut choices: Vec<Vec<(ExportKind, u32)>> = Vec::with_capacity(6);
2360 choices.push(
2361 (0..self.funcs.len())
2362 .map(|i| (ExportKind::Func, i as u32))
2363 .collect(),
2364 );
2365 choices.push(
2366 (0..self.tables.len())
2367 .map(|i| (ExportKind::Table, i as u32))
2368 .collect(),
2369 );
2370 choices.push(
2371 (0..self.memories.len())
2372 .map(|i| (ExportKind::Memory, i as u32))
2373 .collect(),
2374 );
2375 choices.push(
2376 (0..self.globals.len())
2377 .map(|i| (ExportKind::Global, i as u32))
2378 .collect(),
2379 );
2380
2381 if self.config.export_everything {
2384 for choices_by_kind in choices {
2385 for (kind, idx) in choices_by_kind {
2386 let name = unique_string(1_000, &mut self.export_names, u)?;
2387 self.add_arbitrary_export(name, kind, idx)?;
2388 }
2389 }
2390 return Ok(());
2391 }
2392
2393 arbitrary_loop(u, self.config.min_exports, self.config.max_exports, |u| {
2394 let max_size = self.config.max_type_size - self.type_size;
2400 for list in choices.iter_mut() {
2401 list.retain(|(kind, idx)| self.type_of(*kind, *idx).size() + 1 < max_size);
2402 }
2403 choices.retain(|list| !list.is_empty());
2404 if choices.is_empty() {
2405 return Ok(false);
2406 }
2407
2408 let name = unique_string(1_000, &mut self.export_names, u)?;
2411 let list = u.choose(&choices)?;
2412 let (kind, idx) = *u.choose(list)?;
2413 self.add_arbitrary_export(name, kind, idx)?;
2414 Ok(true)
2415 })
2416 }
2417
2418 fn add_arbitrary_export(&mut self, name: String, kind: ExportKind, idx: u32) -> Result<()> {
2419 let ty = self.type_of(kind, idx);
2420 self.type_size += 1 + ty.size();
2421 if self.type_size <= self.config.max_type_size {
2422 self.exports.push((name, kind, idx));
2423 Ok(())
2424 } else {
2425 Err(arbitrary::Error::IncorrectFormat)
2429 }
2430 }
2431
2432 fn arbitrary_start(&mut self, u: &mut Unstructured) -> Result<()> {
2433 if !self.config.allow_start_export {
2434 return Ok(());
2435 }
2436
2437 let mut choices = Vec::with_capacity(self.funcs.len());
2438
2439 for (func_idx, ty) in self.funcs() {
2440 if ty.params.is_empty() && ty.results.is_empty() {
2441 choices.push(func_idx);
2442 }
2443 }
2444
2445 if !choices.is_empty() && u.arbitrary().unwrap_or(false) {
2446 let f = *u.choose(&choices)?;
2447 self.start = Some(f);
2448 }
2449
2450 Ok(())
2451 }
2452
2453 fn arbitrary_elems(&mut self, u: &mut Unstructured) -> Result<()> {
2454 let mut global_i32 = vec![];
2456 let mut global_i64 = vec![];
2457 if !self.config.disallow_traps {
2458 for i in self.globals_for_const_expr(ValType::I32, true) {
2459 global_i32.push(i);
2460 }
2461 for i in self.globals_for_const_expr(ValType::I64, true) {
2462 global_i64.push(i);
2463 }
2464 }
2465 let disallow_traps = self.config.disallow_traps;
2466 let arbitrary_active_elem =
2467 |u: &mut Unstructured, min_mem_size: u64, table: Option<u32>, table_ty: &TableType| {
2468 let global_choices = if table_ty.table64 {
2469 &global_i64
2470 } else {
2471 &global_i32
2472 };
2473 let (offset, max_size_hint) = if !global_choices.is_empty() && u.arbitrary()? {
2474 let g = u.choose(&global_choices)?;
2475 (Offset::Global(*g), None)
2476 } else {
2477 let max_mem_size = if disallow_traps {
2478 table_ty.minimum
2479 } else if table_ty.table64 {
2480 u64::MAX
2481 } else {
2482 u64::from(u32::MAX)
2483 };
2484 let offset = arbitrary_offset(u, min_mem_size, max_mem_size, 0)?;
2485 let max_size_hint = if disallow_traps
2486 || (offset <= min_mem_size
2487 && u.int_in_range(0..=CHANCE_OFFSET_INBOUNDS)? != 0)
2488 {
2489 Some(min_mem_size - offset)
2490 } else {
2491 None
2492 };
2493
2494 let offset = if table_ty.table64 {
2495 Offset::Const64(offset as i64)
2496 } else {
2497 Offset::Const32(offset as i32)
2498 };
2499 (offset, max_size_hint)
2500 };
2501 Ok((ElementKind::Active { table, offset }, max_size_hint))
2502 };
2503
2504 type GenElemSegment<'a> =
2508 dyn Fn(&mut Unstructured) -> Result<(ElementKind, Option<u64>)> + 'a;
2509 let mut choices: Vec<Box<GenElemSegment>> = Vec::new();
2510
2511 if self.config.bulk_memory_enabled {
2514 choices.push(Box::new(|_| Ok((ElementKind::Passive, None))));
2515 choices.push(Box::new(|_| Ok((ElementKind::Declared, None))));
2516 }
2517
2518 for (i, ty) in self.tables.iter().enumerate() {
2519 if ty.minimum == 0 && u.int_in_range(0..=CHANCE_SEGMENT_ON_EMPTY)? != 0 {
2524 continue;
2525 }
2526
2527 let minimum = ty.minimum;
2528 let ty = *ty;
2531 if i == 0 && ty.element_type == RefType::FUNCREF {
2532 choices.push(Box::new(move |u| {
2533 arbitrary_active_elem(u, minimum, None, &ty)
2534 }));
2535 }
2536 if self.config.bulk_memory_enabled {
2537 let idx = Some(i as u32);
2538 choices.push(Box::new(move |u| {
2539 arbitrary_active_elem(u, minimum, idx, &ty)
2540 }));
2541 }
2542 }
2543
2544 if choices.is_empty() {
2545 return Ok(());
2546 }
2547
2548 arbitrary_loop(
2549 u,
2550 self.config.min_element_segments,
2551 self.config.max_element_segments,
2552 |u| {
2553 let (kind, max_size_hint) = u.choose(&choices)?(u)?;
2556 let max = max_size_hint
2557 .map(|i| usize::try_from(i).unwrap())
2558 .unwrap_or_else(|| self.config.max_elements);
2559
2560 let ty = match kind {
2564 ElementKind::Passive | ElementKind::Declared => self.arbitrary_ref_type(u)?,
2565 ElementKind::Active { table, .. } => {
2566 let idx = table.unwrap_or(0);
2567 self.arbitrary_matching_ref_type(u, self.tables[idx as usize].element_type)?
2568 }
2569 };
2570
2571 let can_use_function_list = ty == RefType::FUNCREF;
2575 if !self.config.reference_types_enabled {
2576 assert!(can_use_function_list);
2577 }
2578
2579 let mut func_candidates = Vec::new();
2582 if can_use_function_list {
2583 match ty.heap_type {
2584 HeapType::Abstract {
2585 ty: AbstractHeapType::Func,
2586 ..
2587 } => {
2588 func_candidates.extend(0..self.funcs.len() as u32);
2589 }
2590 HeapType::Concrete(ty) => {
2591 for (i, (fty, _)) in self.funcs.iter().enumerate() {
2592 if *fty == ty {
2593 func_candidates.push(i as u32);
2594 }
2595 }
2596 }
2597 _ => {}
2598 }
2599 }
2600
2601 let items = if !self.config.reference_types_enabled
2606 || (can_use_function_list && u.arbitrary()?)
2607 {
2608 let mut init = vec![];
2609 if func_candidates.len() > 0 {
2610 arbitrary_loop(u, self.config.min_elements, max, |u| {
2611 let func_idx = *u.choose(&func_candidates)?;
2612 init.push(func_idx);
2613 Ok(true)
2614 })?;
2615 }
2616 Elements::Functions(init)
2617 } else {
2618 let mut init = vec![];
2619 arbitrary_loop(u, self.config.min_elements, max, |u| {
2620 init.push(self.arbitrary_const_expr(ValType::Ref(ty), u, true)?);
2621 Ok(true)
2622 })?;
2623 Elements::Expressions(init)
2624 };
2625
2626 self.elems.push(ElementSegment { kind, ty, items });
2627 Ok(true)
2628 },
2629 )
2630 }
2631
2632 fn arbitrary_code(&mut self, u: &mut Unstructured) -> Result<()> {
2633 self.compute_interesting_values();
2634
2635 self.code.reserve(self.num_defined_funcs);
2636 let mut allocs = CodeBuilderAllocations::new(
2637 self,
2638 self.config.exports.is_some() || self.config.module_shape.is_some(),
2639 );
2640 for (idx, ty) in self.funcs[self.funcs.len() - self.num_defined_funcs..].iter() {
2641 let shared = self.is_shared_type(*idx);
2642 let body = self.arbitrary_func_body(u, ty, &mut allocs, shared)?;
2643 self.code.push(body);
2644 }
2645 allocs.finish(u, self)?;
2646 Ok(())
2647 }
2648
2649 fn arbitrary_func_body(
2650 &self,
2651 u: &mut Unstructured,
2652 ty: &FuncType,
2653 allocs: &mut CodeBuilderAllocations,
2654 shared: bool,
2655 ) -> Result<Code> {
2656 let mut locals = self.arbitrary_locals(u)?;
2657 let builder = allocs.builder(ty, &mut locals, shared);
2658 let instructions = if self.config.allow_invalid_funcs && u.arbitrary().unwrap_or(false) {
2659 Instructions::Arbitrary(arbitrary_vec_u8(u)?)
2660 } else {
2661 Instructions::Generated(builder.arbitrary(u, self)?)
2662 };
2663
2664 Ok(Code {
2665 locals,
2666 instructions,
2667 })
2668 }
2669
2670 fn arbitrary_locals(&self, u: &mut Unstructured) -> Result<Vec<ValType>> {
2671 let mut ret = Vec::new();
2672 arbitrary_loop(u, 0, 100, |u| {
2673 ret.push(self.arbitrary_valtype(u)?);
2674 Ok(true)
2675 })?;
2676 Ok(ret)
2677 }
2678
2679 fn arbitrary_data(&mut self, u: &mut Unstructured) -> Result<()> {
2680 let memories = self.memories.len() as u32;
2683 if memories == 0 && !self.config.bulk_memory_enabled {
2684 return Ok(());
2685 }
2686 let disallow_traps = self.config.disallow_traps;
2687 let mut choices32: Vec<Box<dyn Fn(&mut Unstructured, u64, usize) -> Result<Offset>>> =
2688 vec![];
2689 choices32.push(Box::new(|u, min_size, data_len| {
2690 let min = u32::try_from(min_size.saturating_mul(64 * 1024))
2691 .unwrap_or(u32::MAX)
2692 .into();
2693 let max = if disallow_traps { min } else { u32::MAX.into() };
2694 Ok(Offset::Const32(
2695 arbitrary_offset(u, min, max, data_len)? as i32
2696 ))
2697 }));
2698 let mut choices64: Vec<Box<dyn Fn(&mut Unstructured, u64, usize) -> Result<Offset>>> =
2699 vec![];
2700 choices64.push(Box::new(|u, min_size, data_len| {
2701 let min = min_size.saturating_mul(64 * 1024);
2702 let max = if disallow_traps { min } else { u64::MAX };
2703 Ok(Offset::Const64(
2704 arbitrary_offset(u, min, max, data_len)? as i64
2705 ))
2706 }));
2707 if !self.config.disallow_traps {
2708 for i in self.globals_for_const_expr(ValType::I32, true) {
2709 choices32.push(Box::new(move |_, _, _| Ok(Offset::Global(i))));
2710 }
2711 for i in self.globals_for_const_expr(ValType::I64, true) {
2712 choices64.push(Box::new(move |_, _, _| Ok(Offset::Global(i))));
2713 }
2714 }
2715
2716 let mut memories = Vec::new();
2722 for (i, mem) in self.memories.iter().enumerate() {
2723 if mem.minimum > 0 || u.int_in_range(0..=CHANCE_SEGMENT_ON_EMPTY)? == 0 {
2724 memories.push(i as u32);
2725 }
2726 }
2727
2728 if memories.is_empty() && !self.config.bulk_memory_enabled {
2732 return Ok(());
2733 }
2734
2735 arbitrary_loop(
2736 u,
2737 self.config.min_data_segments,
2738 self.config.max_data_segments,
2739 |u| {
2740 let mut init: Vec<u8> = u.arbitrary()?;
2741
2742 let kind =
2747 if self.config.bulk_memory_enabled && (memories.is_empty() || u.arbitrary()?) {
2748 DataSegmentKind::Passive
2749 } else {
2750 let memory_index = *u.choose(&memories)?;
2751 let mem = &self.memories[memory_index as usize];
2752 let f = if mem.memory64 {
2753 u.choose(&choices64)?
2754 } else {
2755 u.choose(&choices32)?
2756 };
2757 let mut offset = f(u, mem.minimum, init.len())?;
2758
2759 if self.config.disallow_traps {
2764 let max_size = (u64::MAX / 64 / 1024).min(mem.minimum) * 64 * 1024;
2765 init.truncate(max_size as usize);
2766 let max_offset = max_size - init.len() as u64;
2767 match &mut offset {
2768 Offset::Const32(x) => {
2769 *x = (*x as u64).min(max_offset) as i32;
2770 }
2771 Offset::Const64(x) => {
2772 *x = (*x as u64).min(max_offset) as i64;
2773 }
2774 Offset::Global(_) => unreachable!(),
2775 }
2776 }
2777 DataSegmentKind::Active {
2778 offset,
2779 memory_index,
2780 }
2781 };
2782 self.data.push(DataSegment { kind, init });
2783 Ok(true)
2784 },
2785 )
2786 }
2787
2788 fn params_results(&self, ty: &BlockType) -> (Vec<ValType>, Vec<ValType>) {
2789 match ty {
2790 BlockType::Empty => (vec![], vec![]),
2791 BlockType::Result(t) => (vec![], vec![*t]),
2792 BlockType::FunctionType(ty) => {
2793 let ty = self.func_type(*ty);
2794 (ty.params.to_vec(), ty.results.to_vec())
2795 }
2796 }
2797 }
2798
2799 fn globals_for_const_expr(
2802 &self,
2803 ty: ValType,
2804 allow_defined_globals: bool,
2805 ) -> impl Iterator<Item = u32> + '_ {
2806 let num_imported_globals = self.globals.len() - self.defined_globals.len();
2809 let max_global = if self.config.gc_enabled && allow_defined_globals {
2810 self.globals.len()
2811 } else {
2812 num_imported_globals
2813 };
2814
2815 self.globals[..max_global]
2816 .iter()
2817 .enumerate()
2818 .filter_map(move |(i, g)| {
2819 if !g.mutable && self.val_type_is_sub_type(g.val_type, ty) {
2823 Some(i as u32)
2824 } else {
2825 None
2826 }
2827 })
2828 }
2829
2830 fn compute_interesting_values(&mut self) {
2831 debug_assert!(self.interesting_values32.is_empty());
2832 debug_assert!(self.interesting_values64.is_empty());
2833
2834 let mut interesting_values32 = HashSet::new();
2835 let mut interesting_values64 = HashSet::new();
2836
2837 let mut interesting = |val: u64| {
2838 interesting_values32.insert(val as u32);
2839 interesting_values64.insert(val);
2840 };
2841
2842 interesting(0);
2844
2845 interesting(u8::MAX as _);
2847 interesting(u16::MAX as _);
2848 interesting(u32::MAX as _);
2849 interesting(u64::MAX);
2850
2851 interesting(i8::MIN as _);
2853 interesting(i16::MIN as _);
2854 interesting(i32::MIN as _);
2855 interesting(i64::MIN as _);
2856
2857 for i in 0..64 {
2858 interesting(1 << i);
2860
2861 interesting(!(1 << i));
2863
2864 interesting((1 << i) - 1);
2866
2867 interesting(((1_i64 << 63) >> i) as _);
2869 }
2870
2871 for pattern in [0b01010101, 0b00010001, 0b00010001, 0b00000001] {
2873 for b in [pattern, !pattern] {
2874 interesting(u64::from_ne_bytes([b, b, b, b, b, b, b, b]));
2875 }
2876 }
2877
2878 let mut interesting_f64 = |x: f64| interesting(x.to_bits());
2880 interesting_f64(0.0);
2881 interesting_f64(-0.0);
2882 interesting_f64(f64::INFINITY);
2883 interesting_f64(f64::NEG_INFINITY);
2884 interesting_f64(f64::EPSILON);
2885 interesting_f64(-f64::EPSILON);
2886 interesting_f64(f64::MIN);
2887 interesting_f64(f64::MIN_POSITIVE);
2888 interesting_f64(f64::MAX);
2889 interesting_f64(f64::NAN);
2890 let mut interesting_f32 = |x: f32| interesting(x.to_bits() as _);
2891 interesting_f32(0.0);
2892 interesting_f32(-0.0);
2893 interesting_f32(f32::INFINITY);
2894 interesting_f32(f32::NEG_INFINITY);
2895 interesting_f32(f32::EPSILON);
2896 interesting_f32(-f32::EPSILON);
2897 interesting_f32(f32::MIN);
2898 interesting_f32(f32::MIN_POSITIVE);
2899 interesting_f32(f32::MAX);
2900 interesting_f32(f32::NAN);
2901
2902 for t in self.tables.iter() {
2904 interesting(t.minimum as _);
2905 if let Some(x) = t.minimum.checked_add(1) {
2906 interesting(x as _);
2907 }
2908
2909 if let Some(x) = t.maximum {
2910 interesting(x as _);
2911 if let Some(y) = x.checked_add(1) {
2912 interesting(y as _);
2913 }
2914 }
2915 }
2916
2917 for m in self.memories.iter() {
2919 let min = m.minimum.saturating_mul(crate::page_size(m).into());
2920 interesting(min);
2921 for i in 0..5 {
2922 if let Some(x) = min.checked_add(1 << i) {
2923 interesting(x);
2924 }
2925 if let Some(x) = min.checked_sub(1 << i) {
2926 interesting(x);
2927 }
2928 }
2929
2930 if let Some(max) = m.maximum {
2931 let max = max.saturating_mul(crate::page_size(m).into());
2932 interesting(max);
2933 for i in 0..5 {
2934 if let Some(x) = max.checked_add(1 << i) {
2935 interesting(x);
2936 }
2937 if let Some(x) = max.checked_sub(1 << i) {
2938 interesting(x);
2939 }
2940 }
2941 }
2942 }
2943
2944 self.interesting_values32.extend(interesting_values32);
2945 self.interesting_values64.extend(interesting_values64);
2946
2947 self.interesting_values32.sort();
2949 self.interesting_values64.sort();
2950 }
2951
2952 fn arbitrary_const_instruction(
2953 &self,
2954 ty: ValType,
2955 u: &mut Unstructured<'_>,
2956 ) -> Result<Instruction> {
2957 debug_assert!(self.interesting_values32.len() > 0);
2958 debug_assert!(self.interesting_values64.len() > 0);
2959 match ty {
2960 ValType::I32 => Ok(Instruction::I32Const(if u.arbitrary()? {
2961 *u.choose(&self.interesting_values32)? as i32
2962 } else {
2963 u.arbitrary()?
2964 })),
2965 ValType::I64 => Ok(Instruction::I64Const(if u.arbitrary()? {
2966 *u.choose(&self.interesting_values64)? as i64
2967 } else {
2968 u.arbitrary()?
2969 })),
2970 ValType::F32 => Ok(Instruction::F32Const(if u.arbitrary()? {
2971 f32::from_bits(*u.choose(&self.interesting_values32)?).into()
2972 } else {
2973 u.arbitrary::<f32>()?.into()
2974 })),
2975 ValType::F64 => Ok(Instruction::F64Const(if u.arbitrary()? {
2976 f64::from_bits(*u.choose(&self.interesting_values64)?).into()
2977 } else {
2978 u.arbitrary::<f64>()?.into()
2979 })),
2980 ValType::V128 => Ok(Instruction::V128Const(if u.arbitrary()? {
2981 let upper = (*u.choose(&self.interesting_values64)? as i128) << 64;
2982 let lower = *u.choose(&self.interesting_values64)? as i128;
2983 upper | lower
2984 } else {
2985 u.arbitrary()?
2986 })),
2987 ValType::Ref(ty) => {
2988 assert!(ty.nullable);
2989 Ok(Instruction::RefNull(ty.heap_type))
2990 }
2991 }
2992 }
2993
2994 fn propagate_shared<T>(&mut self, must_share: bool, mut f: impl FnMut(&mut Self) -> T) -> T {
2995 let tmp = mem::replace(&mut self.must_share, must_share);
2996 let result = f(self);
2997 self.must_share = tmp;
2998 result
2999 }
3000
3001 fn arbitrary_shared(&self, u: &mut Unstructured) -> Result<bool> {
3002 if self.must_share {
3003 Ok(true)
3004 } else {
3005 Ok(self.config.shared_everything_threads_enabled && u.ratio(1, 4)?)
3006 }
3007 }
3008
3009 fn is_shared_ref_type(&self, ty: RefType) -> bool {
3010 match ty.heap_type {
3011 HeapType::Abstract { shared, .. } => shared,
3012 HeapType::Concrete(i) | HeapType::Exact(i) => {
3013 self.types[i as usize].composite_type.shared
3014 }
3015 }
3016 }
3017
3018 fn is_shared_type(&self, index: u32) -> bool {
3019 let index = usize::try_from(index).unwrap();
3020 let ty = self.types.get(index).unwrap();
3021 ty.composite_type.shared
3022 }
3023}
3024
3025pub(crate) fn arbitrary_limits64(
3026 u: &mut Unstructured,
3027 min_minimum: Option<u64>,
3028 max_minimum: u64,
3029 max_required: bool,
3030 max_inbounds: u64,
3031) -> Result<(u64, Option<u64>)> {
3032 assert!(
3033 min_minimum.unwrap_or(0) <= max_minimum,
3034 "{} <= {max_minimum}",
3035 min_minimum.unwrap_or(0),
3036 );
3037 assert!(
3038 min_minimum.unwrap_or(0) <= max_inbounds,
3039 "{} <= {max_inbounds}",
3040 min_minimum.unwrap_or(0),
3041 );
3042
3043 let min = gradually_grow(u, min_minimum.unwrap_or(0), max_inbounds, max_minimum)?;
3044 assert!(min <= max_minimum, "{min} <= {max_minimum}");
3045
3046 let max = if max_required || u.arbitrary().unwrap_or(false) {
3047 Some(u.int_in_range(min..=max_minimum)?)
3048 } else {
3049 None
3050 };
3051 assert!(min <= max.unwrap_or(min), "{min} <= {}", max.unwrap_or(min));
3052
3053 Ok((min, max))
3054}
3055
3056pub(crate) fn configured_valtypes(config: &Config) -> Vec<ValType> {
3057 let mut valtypes = Vec::with_capacity(25);
3058 valtypes.push(ValType::I32);
3059 valtypes.push(ValType::I64);
3060 if config.allow_floats {
3061 valtypes.push(ValType::F32);
3062 valtypes.push(ValType::F64);
3063 }
3064 if config.simd_enabled {
3065 valtypes.push(ValType::V128);
3066 }
3067 if config.gc_enabled && config.reference_types_enabled {
3068 for nullable in [
3069 true,
3076 ] {
3077 use AbstractHeapType::*;
3078 let abs_ref_types = [
3079 Any, Eq, I31, Array, Struct, None, Func, NoFunc, Extern, NoExtern,
3080 ];
3081 valtypes.extend(
3082 abs_ref_types
3083 .iter()
3084 .map(|&ty| ValType::Ref(RefType::new_abstract(ty, nullable, false))),
3085 );
3086 if config.shared_everything_threads_enabled {
3087 valtypes.extend(
3088 abs_ref_types
3089 .iter()
3090 .map(|&ty| ValType::Ref(RefType::new_abstract(ty, nullable, true))),
3091 );
3092 }
3093 }
3094 } else if config.reference_types_enabled {
3095 valtypes.push(ValType::EXTERNREF);
3096 valtypes.push(ValType::FUNCREF);
3097 }
3098 valtypes
3099}
3100
3101pub(crate) fn arbitrary_table_type(
3102 u: &mut Unstructured,
3103 config: &Config,
3104 module: Option<&Module>,
3105) -> Result<TableType> {
3106 let table64 = config.memory64_enabled && u.arbitrary()?;
3107 let max_inbounds = 10_000;
3110 let min_elements = if config.disallow_traps { Some(1) } else { None };
3111 let max_elements = min_elements.unwrap_or(0).max(config.max_table_elements);
3112 let (minimum, maximum) = arbitrary_limits64(
3113 u,
3114 min_elements,
3115 max_elements,
3116 config.table_max_size_required,
3117 max_inbounds.min(max_elements),
3118 )?;
3119 if config.disallow_traps {
3120 assert!(minimum > 0);
3121 }
3122 let element_type = match module {
3123 Some(module) => module.arbitrary_ref_type(u)?,
3124 None => RefType::FUNCREF,
3125 };
3126
3127 let shared = match module {
3129 Some(module) => module.is_shared_ref_type(element_type),
3130 None => false,
3131 };
3132
3133 Ok(TableType {
3134 element_type,
3135 minimum,
3136 maximum,
3137 table64,
3138 shared,
3139 })
3140}
3141
3142pub(crate) fn arbitrary_memtype(u: &mut Unstructured, config: &Config) -> Result<MemoryType> {
3143 let shared = config.threads_enabled && u.ratio(1, 4)?;
3146
3147 let memory64 = config.memory64_enabled && u.arbitrary()?;
3148 let page_size_log2 = if config.custom_page_sizes_enabled && u.arbitrary()? {
3149 Some(if u.arbitrary()? { 0 } else { 16 })
3150 } else {
3151 None
3152 };
3153
3154 let min_pages = if config.disallow_traps { Some(1) } else { None };
3155 let max_pages = min_pages.unwrap_or(0).max(if memory64 {
3156 u64::try_from(config.max_memory64_bytes >> page_size_log2.unwrap_or(16))
3157 .unwrap_or(u64::MAX)
3161 } else {
3162 u32::try_from(config.max_memory32_bytes >> page_size_log2.unwrap_or(16))
3163 .unwrap_or(u32::MAX)
3166 .into()
3167 });
3168
3169 let max_all_mems_in_bytes = 1 << 30;
3171 let max_this_mem_in_bytes = max_all_mems_in_bytes / u64::try_from(config.max_memories).unwrap();
3172 let max_inbounds = max_this_mem_in_bytes >> page_size_log2.unwrap_or(16);
3173 let max_inbounds = max_inbounds.clamp(min_pages.unwrap_or(0), max_pages);
3174
3175 let (minimum, maximum) = arbitrary_limits64(
3176 u,
3177 min_pages,
3178 max_pages,
3179 config.memory_max_size_required || shared,
3180 max_inbounds,
3181 )?;
3182
3183 Ok(MemoryType {
3184 minimum,
3185 maximum,
3186 memory64,
3187 shared,
3188 page_size_log2,
3189 })
3190}
3191
3192pub(crate) fn arbitrary_tag_type(
3193 u: &mut Unstructured,
3194 candidate_func_types: &[u32],
3195 get_func_type: impl FnOnce(u32) -> Rc<FuncType>,
3196) -> Result<TagType> {
3197 let max = candidate_func_types.len() - 1;
3198 let ty = candidate_func_types[u.int_in_range(0..=max)?];
3199 Ok(TagType {
3200 func_type_idx: ty,
3201 func_type: get_func_type(ty),
3202 })
3203}
3204
3205fn gradually_grow(u: &mut Unstructured, min: u64, max_inbounds: u64, max: u64) -> Result<u64> {
3213 if min == max {
3214 return Ok(min);
3215 }
3216 let x = {
3217 let min = min as f64;
3218 let max = max as f64;
3219 let max_inbounds = max_inbounds as f64;
3220 let x = u.arbitrary::<u32>()?;
3221 let x = f64::from(x);
3222 let x = map_custom(
3223 x,
3224 f64::from(u32::MIN)..f64::from(u32::MAX),
3225 min..max_inbounds,
3226 min..max,
3227 );
3228 assert!(min <= x, "{min} <= {x}");
3229 assert!(x <= max, "{x} <= {max}");
3230 x.round() as u64
3231 };
3232
3233 return Ok(x.clamp(min, max));
3236
3237 fn map_custom(
3244 value: f64,
3245 input: Range<f64>,
3246 output_inbounds: Range<f64>,
3247 output: Range<f64>,
3248 ) -> f64 {
3249 assert!(!value.is_nan(), "{}", value);
3250 assert!(value.is_finite(), "{}", value);
3251 assert!(input.start < input.end, "{} < {}", input.start, input.end);
3252 assert!(
3253 output.start < output.end,
3254 "{} < {}",
3255 output.start,
3256 output.end
3257 );
3258 assert!(value >= input.start, "{} >= {}", value, input.start);
3259 assert!(value <= input.end, "{} <= {}", value, input.end);
3260 assert!(
3261 output.start <= output_inbounds.start,
3262 "{} <= {}",
3263 output.start,
3264 output_inbounds.start
3265 );
3266 assert!(
3267 output_inbounds.end <= output.end,
3268 "{} <= {}",
3269 output_inbounds.end,
3270 output.end
3271 );
3272
3273 let x = map_linear(value, input, 0.0..1.0);
3274 let result = if x < PCT_INBOUNDS {
3275 if output_inbounds.start == output_inbounds.end {
3276 output_inbounds.start
3277 } else {
3278 let unscaled = x * x * x * x * x * x;
3279 map_linear(unscaled, 0.0..1.0, output_inbounds)
3280 }
3281 } else {
3282 map_linear(x, 0.0..1.0, output.clone())
3283 };
3284
3285 assert!(result >= output.start, "{} >= {}", result, output.start);
3286 assert!(result <= output.end, "{} <= {}", result, output.end);
3287 result
3288 }
3289
3290 fn map_linear(
3295 value: f64,
3296 Range {
3297 start: in_low,
3298 end: in_high,
3299 }: Range<f64>,
3300 Range {
3301 start: out_low,
3302 end: out_high,
3303 }: Range<f64>,
3304 ) -> f64 {
3305 assert!(!value.is_nan(), "{}", value);
3306 assert!(value.is_finite(), "{}", value);
3307 assert!(in_low < in_high, "{in_low} < {in_high}");
3308 assert!(out_low < out_high, "{out_low} < {out_high}");
3309 assert!(value >= in_low, "{value} >= {in_low}");
3310 assert!(value <= in_high, "{value} <= {in_high}");
3311
3312 let dividend = out_high - out_low;
3313 let divisor = in_high - in_low;
3314 let slope = dividend / divisor;
3315 let result = out_low + (slope * (value - in_low));
3316
3317 assert!(result >= out_low, "{result} >= {out_low}");
3318 assert!(result <= out_high, "{result} <= {out_high}");
3319 result
3320 }
3321}
3322
3323fn arbitrary_offset(
3327 u: &mut Unstructured,
3328 limit_min: u64,
3329 limit_max: u64,
3330 segment_size: usize,
3331) -> Result<u64> {
3332 let size = u64::try_from(segment_size).unwrap();
3333
3334 if size > limit_min {
3337 u.int_in_range(0..=limit_max)
3338 } else {
3339 gradually_grow(u, 0, limit_min - size, limit_max)
3340 }
3341}
3342
3343fn unique_import_strings(max_size: usize, u: &mut Unstructured) -> Result<(String, String)> {
3344 let module = limited_string(max_size, u)?;
3345 let field = limited_string(max_size, u)?;
3346 Ok((module, field))
3347}
3348
3349fn arbitrary_vec_u8(u: &mut Unstructured) -> Result<Vec<u8>> {
3350 let size = u.arbitrary_len::<u8>()?;
3351 Ok(u.bytes(size)?.to_vec())
3352}
3353
3354impl EntityType {
3355 fn size(&self) -> u32 {
3356 match self {
3357 EntityType::Tag(_)
3358 | EntityType::Global(_)
3359 | EntityType::Table(_)
3360 | EntityType::Memory(_) => 1,
3361 EntityType::Func(_, ty) => 1 + (ty.params.len() + ty.results.len()) as u32,
3362 }
3363 }
3364}
3365
3366#[derive(Clone, Copy, Debug, Default)]
3377#[cfg_attr(
3378 feature = "serde",
3379 derive(serde_derive::Deserialize, serde_derive::Serialize)
3380)]
3381pub struct InstructionKinds(pub(crate) FlagSet<InstructionKind>);
3382
3383impl InstructionKinds {
3384 pub fn new(kinds: &[InstructionKind]) -> Self {
3386 Self(kinds.iter().fold(FlagSet::default(), |ks, k| ks | *k))
3387 }
3388
3389 pub fn all() -> Self {
3391 Self(FlagSet::full())
3392 }
3393
3394 pub fn none() -> Self {
3396 Self(FlagSet::default())
3397 }
3398
3399 #[inline]
3401 pub fn contains(&self, kind: InstructionKind) -> bool {
3402 self.0.contains(kind)
3403 }
3404
3405 pub fn without_floats(&self) -> Self {
3407 let mut floatless = self.0;
3408 if floatless.contains(InstructionKind::Numeric) {
3409 floatless -= InstructionKind::Numeric;
3410 floatless |= InstructionKind::NumericInt;
3411 }
3412 if floatless.contains(InstructionKind::Vector) {
3413 floatless -= InstructionKind::Vector;
3414 floatless |= InstructionKind::VectorInt;
3415 }
3416 if floatless.contains(InstructionKind::Memory) {
3417 floatless -= InstructionKind::Memory;
3418 floatless |= InstructionKind::MemoryInt;
3419 }
3420 Self(floatless)
3421 }
3422}
3423
3424flags! {
3425 #[allow(missing_docs)]
3428 #[cfg_attr(feature = "_internal_cli", derive(serde_derive::Deserialize))]
3429 pub enum InstructionKind: u16 {
3430 NumericInt = 1 << 0,
3431 Numeric = (1 << 1) | (1 << 0),
3432 VectorInt = 1 << 2,
3433 Vector = (1 << 3) | (1 << 2),
3434 Reference = 1 << 4,
3435 Parametric = 1 << 5,
3436 Variable = 1 << 6,
3437 Table = 1 << 7,
3438 MemoryInt = 1 << 8,
3439 Memory = (1 << 9) | (1 << 8),
3440 Control = 1 << 10,
3441 Aggregate = 1 << 11,
3442 }
3443}
3444
3445impl FromStr for InstructionKinds {
3446 type Err = String;
3447 fn from_str(s: &str) -> std::prelude::v1::Result<Self, Self::Err> {
3448 let mut kinds = vec![];
3449 for part in s.split(",") {
3450 let kind = InstructionKind::from_str(part)?;
3451 kinds.push(kind);
3452 }
3453 Ok(InstructionKinds::new(&kinds))
3454 }
3455}
3456
3457impl FromStr for InstructionKind {
3458 type Err = String;
3459 fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
3460 match s.to_lowercase().as_str() {
3461 "numeric_non_float" => Ok(InstructionKind::NumericInt),
3462 "numeric" => Ok(InstructionKind::Numeric),
3463 "vector_non_float" => Ok(InstructionKind::VectorInt),
3464 "vector" => Ok(InstructionKind::Vector),
3465 "reference" => Ok(InstructionKind::Reference),
3466 "parametric" => Ok(InstructionKind::Parametric),
3467 "variable" => Ok(InstructionKind::Variable),
3468 "table" => Ok(InstructionKind::Table),
3469 "memory_non_float" => Ok(InstructionKind::MemoryInt),
3470 "memory" => Ok(InstructionKind::Memory),
3471 "control" => Ok(InstructionKind::Control),
3472 _ => Err(format!("unknown instruction kind: {s}")),
3473 }
3474 }
3475}
3476
3477#[cfg(feature = "wasmparser")]
3480impl TryFrom<wasmparser::FuncType> for FuncType {
3481 type Error = ();
3482
3483 fn try_from(value: wasmparser::FuncType) -> Result<Self, Self::Error> {
3484 Ok(FuncType {
3485 params: value
3486 .params()
3487 .iter()
3488 .copied()
3489 .map(|ty| ty.try_into().map_err(|_| ()))
3490 .collect::<Result<Vec<_>, _>>()?,
3491 results: value
3492 .results()
3493 .iter()
3494 .copied()
3495 .map(|ty| ty.try_into().map_err(|_| ()))
3496 .collect::<Result<Vec<_>, _>>()?,
3497 })
3498 }
3499}
3500
3501#[cfg(feature = "wasmparser")]
3502impl TryFrom<wasmparser::CompositeType> for CompositeType {
3503 type Error = ();
3504
3505 fn try_from(value: wasmparser::CompositeType) -> Result<Self, Self::Error> {
3506 let inner_type = match value.inner {
3507 wasmparser::CompositeInnerType::Func(func_type) => {
3508 CompositeInnerType::Func(Rc::new(func_type.try_into()?))
3509 }
3510 wasmparser::CompositeInnerType::Array(array_type) => {
3511 CompositeInnerType::Array(array_type.try_into().map_err(|_| ())?)
3512 }
3513 wasmparser::CompositeInnerType::Struct(struct_type) => {
3514 CompositeInnerType::Struct(struct_type.try_into().map_err(|_| ())?)
3515 }
3516 wasmparser::CompositeInnerType::Cont(_) => {
3517 panic!("continuation type is not supported by wasm-smith currently.")
3518 }
3519 };
3520
3521 Ok(CompositeType {
3522 inner: inner_type,
3523 shared: value.shared,
3524 descriptor: value
3525 .descriptor_idx
3526 .map(|idx| idx.as_module_index().ok_or(()))
3527 .transpose()?,
3528 describes: value
3529 .describes_idx
3530 .map(|idx| idx.as_module_index().ok_or(()))
3531 .transpose()?,
3532 })
3533 }
3534}
3535
3536#[cfg(feature = "wasmparser")]
3537impl TryFrom<wasmparser::SubType> for SubType {
3538 type Error = ();
3539
3540 fn try_from(value: wasmparser::SubType) -> Result<Self, Self::Error> {
3541 Ok(SubType {
3542 is_final: value.is_final,
3543 supertype: value
3544 .supertype_idx
3545 .map(|idx| idx.as_module_index().ok_or(()))
3546 .transpose()?,
3547 composite_type: value.composite_type.try_into()?,
3548 depth: 1,
3551 })
3552 }
3553}