1mod code_builder;
4pub(crate) mod encode;
5mod terminate;
6
7use crate::{Config, arbitrary_loop, limited_string, unique_string};
8use arbitrary::{Arbitrary, Result, Unstructured};
9use code_builder::CodeBuilderAllocations;
10use flagset::{FlagSet, flags};
11use std::collections::{HashMap, HashSet};
12use std::fmt;
13use std::mem;
14use std::ops::Range;
15use std::rc::Rc;
16use std::str::{self, FromStr};
17use wasm_encoder::{
18 AbstractHeapType, ArrayType, BlockType, ConstExpr, ExportKind, FieldType, HeapType, RefType,
19 StorageType, StructType, ValType,
20};
21pub(crate) use wasm_encoder::{GlobalType, MemoryType, TableType};
22
23const CHANCE_OFFSET_INBOUNDS: usize = 10; const CHANCE_SEGMENT_ON_EMPTY: usize = 10; const PCT_INBOUNDS: f64 = 0.995; type Instruction = wasm_encoder::Instruction<'static>;
34
35pub struct Module {
47 config: Config,
48 duplicate_imports_behavior: DuplicateImportsBehavior,
49 valtypes: Vec<ValType>,
50
51 types: Vec<SubType>,
54
55 rec_groups: Vec<Range<usize>>,
59
60 super_to_sub_types: HashMap<u32, Vec<u32>>,
62
63 can_subtype: Vec<u32>,
65
66 should_encode_types: bool,
68
69 must_share: bool,
72
73 imports: Vec<Import>,
77
78 should_encode_imports: bool,
81
82 array_types: Vec<u32>,
84
85 func_types: Vec<u32>,
87
88 struct_types: Vec<u32>,
90
91 num_imports: usize,
93
94 num_defined_tags: usize,
97
98 num_defined_funcs: usize,
101
102 defined_tables: Vec<Option<ConstExpr>>,
104
105 num_defined_memories: usize,
108
109 defined_globals: Vec<(u32, ConstExpr)>,
112
113 tags: Vec<TagType>,
116
117 funcs: Vec<(u32, Rc<FuncType>)>,
121
122 tables: Vec<TableType>,
125
126 globals: Vec<GlobalType>,
129
130 memories: Vec<MemoryType>,
133
134 exports: Vec<(String, ExportKind, u32)>,
135 start: Option<u32>,
136 elems: Vec<ElementSegment>,
137 code: Vec<Code>,
138 data: Vec<DataSegment>,
139
140 type_size: u32,
143
144 export_names: HashSet<String>,
146
147 const_expr_choices: Vec<Box<dyn Fn(&mut Unstructured, ValType) -> Result<ConstExpr>>>,
150
151 max_type_limit: MaxTypeLimit,
153
154 interesting_values32: Vec<u32>,
157 interesting_values64: Vec<u64>,
158}
159
160impl<'a> Arbitrary<'a> for Module {
161 fn arbitrary(u: &mut Unstructured<'a>) -> Result<Self> {
162 Module::new(Config::default(), u)
163 }
164}
165
166impl fmt::Debug for Module {
167 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
168 f.debug_struct("Module")
169 .field("config", &self.config)
170 .field(&"...", &"...")
171 .finish()
172 }
173}
174
175#[derive(Debug, Clone, Copy, PartialEq, Eq)]
176pub(crate) enum DuplicateImportsBehavior {
177 Allowed,
178 Disallowed,
179}
180
181#[derive(Debug, Clone, Copy, PartialEq, Eq)]
182enum AllowEmptyRecGroup {
183 Yes,
184 No,
185}
186
187#[derive(Debug, Clone, Copy, PartialEq, Eq)]
188enum MaxTypeLimit {
189 ModuleTypes,
190 Num(u32),
191}
192
193impl Module {
194 pub fn config(&self) -> &Config {
196 &self.config
197 }
198
199 pub fn new(config: Config, u: &mut Unstructured<'_>) -> Result<Self> {
202 Self::new_internal(config, u, DuplicateImportsBehavior::Allowed)
203 }
204
205 pub(crate) fn new_internal(
206 config: Config,
207 u: &mut Unstructured<'_>,
208 duplicate_imports_behavior: DuplicateImportsBehavior,
209 ) -> Result<Self> {
210 let mut module = Module::empty(config, duplicate_imports_behavior);
211 module.build(u)?;
212 Ok(module)
213 }
214
215 fn empty(mut config: Config, duplicate_imports_behavior: DuplicateImportsBehavior) -> Self {
216 config.sanitize();
217 Module {
218 config,
219 duplicate_imports_behavior,
220 valtypes: Vec::new(),
221 types: Vec::new(),
222 rec_groups: Vec::new(),
223 can_subtype: Vec::new(),
224 super_to_sub_types: HashMap::new(),
225 should_encode_types: false,
226 imports: Vec::new(),
227 should_encode_imports: false,
228 array_types: Vec::new(),
229 func_types: Vec::new(),
230 struct_types: Vec::new(),
231 num_imports: 0,
232 num_defined_tags: 0,
233 num_defined_funcs: 0,
234 defined_tables: Vec::new(),
235 num_defined_memories: 0,
236 defined_globals: Vec::new(),
237 tags: Vec::new(),
238 funcs: Vec::new(),
239 tables: Vec::new(),
240 globals: Vec::new(),
241 memories: Vec::new(),
242 exports: Vec::new(),
243 start: None,
244 elems: Vec::new(),
245 code: Vec::new(),
246 data: Vec::new(),
247 type_size: 0,
248 export_names: HashSet::new(),
249 const_expr_choices: Vec::new(),
250 max_type_limit: MaxTypeLimit::ModuleTypes,
251 interesting_values32: Vec::new(),
252 interesting_values64: Vec::new(),
253 must_share: false,
254 }
255 }
256}
257
258#[derive(Clone, Debug, PartialEq, Eq, Hash)]
259pub(crate) struct SubType {
260 pub(crate) is_final: bool,
261 pub(crate) supertype: Option<u32>,
262 pub(crate) composite_type: CompositeType,
263 depth: u32,
266}
267
268impl SubType {
269 fn unwrap_struct(&self) -> &StructType {
270 self.composite_type.unwrap_struct()
271 }
272
273 fn unwrap_func(&self) -> &Rc<FuncType> {
274 self.composite_type.unwrap_func()
275 }
276
277 fn unwrap_array(&self) -> &ArrayType {
278 self.composite_type.unwrap_array()
279 }
280}
281
282#[derive(Clone, Debug, PartialEq, Eq, Hash)]
283pub(crate) struct CompositeType {
284 pub inner: CompositeInnerType,
285 pub shared: bool,
286 pub descriptor: Option<u32>,
287 pub describes: Option<u32>,
288}
289
290impl CompositeType {
291 #[cfg(any(feature = "component-model", feature = "wasmparser"))]
292 pub(crate) fn new_func(func: Rc<FuncType>, shared: bool) -> Self {
293 Self {
294 inner: CompositeInnerType::Func(func),
295 shared,
296 descriptor: None,
297 describes: None,
298 }
299 }
300
301 fn unwrap_func(&self) -> &Rc<FuncType> {
302 match &self.inner {
303 CompositeInnerType::Func(f) => f,
304 _ => panic!("not a func"),
305 }
306 }
307
308 fn unwrap_array(&self) -> &ArrayType {
309 match &self.inner {
310 CompositeInnerType::Array(a) => a,
311 _ => panic!("not an array"),
312 }
313 }
314
315 fn unwrap_struct(&self) -> &StructType {
316 match &self.inner {
317 CompositeInnerType::Struct(s) => s,
318 _ => panic!("not a struct"),
319 }
320 }
321}
322
323impl From<&CompositeType> for wasm_encoder::CompositeType {
324 fn from(ty: &CompositeType) -> Self {
325 let inner = match &ty.inner {
326 CompositeInnerType::Array(a) => wasm_encoder::CompositeInnerType::Array(*a),
327 CompositeInnerType::Func(f) => wasm_encoder::CompositeInnerType::Func(
328 wasm_encoder::FuncType::new(f.params.iter().cloned(), f.results.iter().cloned()),
329 ),
330 CompositeInnerType::Struct(s) => wasm_encoder::CompositeInnerType::Struct(s.clone()),
331 };
332 wasm_encoder::CompositeType {
333 shared: ty.shared,
334 inner,
335 descriptor: ty.descriptor,
336 describes: ty.describes,
337 }
338 }
339}
340
341#[derive(Clone, Debug, PartialEq, Eq, Hash)]
342pub(crate) enum CompositeInnerType {
343 Array(ArrayType),
344 Func(Rc<FuncType>),
345 Struct(StructType),
346}
347
348#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
350pub(crate) struct FuncType {
351 pub(crate) params: Vec<ValType>,
353 pub(crate) results: Vec<ValType>,
355}
356
357#[derive(Clone, Debug, PartialEq, Eq, Hash)]
359pub(crate) struct Import {
360 pub(crate) module: String,
362 pub(crate) field: String,
364 pub(crate) entity_type: EntityType,
366}
367
368#[derive(Clone, Debug, PartialEq, Eq, Hash)]
370pub(crate) enum EntityType {
371 Global(GlobalType),
373 Table(TableType),
375 Memory(MemoryType),
377 Tag(TagType),
379 Func(u32, Rc<FuncType>),
381}
382
383#[derive(Clone, Debug, PartialEq, Eq, Hash)]
385pub(crate) struct TagType {
386 func_type_idx: u32,
388 func_type: Rc<FuncType>,
390}
391
392#[derive(Debug)]
393struct ElementSegment {
394 kind: ElementKind,
395 ty: RefType,
396 items: Elements,
397}
398
399#[derive(Debug)]
400enum ElementKind {
401 Passive,
402 Declared,
403 Active {
404 table: Option<u32>, offset: Offset,
406 },
407}
408
409#[derive(Debug)]
410enum Elements {
411 Functions(Vec<u32>),
412 Expressions(Vec<ConstExpr>),
413}
414
415#[derive(Debug)]
416struct Code {
417 locals: Vec<ValType>,
418 instructions: Instructions,
419}
420
421#[derive(Debug)]
422enum Instructions {
423 Generated(Vec<Instruction>),
424 Arbitrary(Vec<u8>),
425}
426
427#[derive(Debug)]
428struct DataSegment {
429 kind: DataSegmentKind,
430 init: Vec<u8>,
431}
432
433#[derive(Debug)]
434enum DataSegmentKind {
435 Passive,
436 Active { memory_index: u32, offset: Offset },
437}
438
439#[derive(Debug)]
440pub(crate) enum Offset {
441 Const32(i32),
442 Const64(i64),
443 Global(u32),
444}
445
446impl Module {
447 fn build(&mut self, u: &mut Unstructured) -> Result<()> {
448 self.valtypes = configured_valtypes(&self.config);
449
450 let mut generate_arbitrary_imports = true;
451 let mut generate_arbitrary_exports = true;
452 if self.imports_exports_from_module_shape(u)? {
453 generate_arbitrary_imports = false;
454 generate_arbitrary_exports = false;
455 }
456 if self.arbitrary_imports_from_available(u)? {
462 generate_arbitrary_imports = false;
463 }
464 self.arbitrary_types(u)?;
465 if generate_arbitrary_imports {
466 self.arbitrary_imports(u)?;
467 }
468
469 self.should_encode_imports = !self.imports.is_empty() || u.arbitrary()?;
470
471 self.arbitrary_tags(u)?;
472 self.arbitrary_funcs(u)?;
473 self.arbitrary_tables(u)?;
474 self.arbitrary_memories(u)?;
475 self.arbitrary_globals(u)?;
476 if self.required_exports(u)? {
477 generate_arbitrary_exports = false;
478 }
479 if generate_arbitrary_exports {
480 self.arbitrary_exports(u)?;
481 }
482 self.should_encode_types = !self.types.is_empty() || u.arbitrary()?;
483 self.arbitrary_start(u)?;
484 self.arbitrary_elems(u)?;
485 self.arbitrary_data(u)?;
486 self.arbitrary_code(u)?;
487 Ok(())
488 }
489
490 #[inline]
491 fn val_type_is_sub_type(&self, a: ValType, b: ValType) -> bool {
492 match (a, b) {
493 (a, b) if a == b => true,
494 (ValType::Ref(a), ValType::Ref(b)) => self.ref_type_is_sub_type(a, b),
495 _ => false,
496 }
497 }
498
499 fn ref_type_is_sub_type(&self, a: RefType, b: RefType) -> bool {
501 if a == b {
502 return true;
503 }
504
505 if a.nullable && !b.nullable {
506 return false;
507 }
508
509 self.heap_type_is_sub_type(a.heap_type, b.heap_type)
510 }
511
512 fn heap_type_is_sub_type(&self, a: HeapType, b: HeapType) -> bool {
513 use AbstractHeapType::*;
514 use CompositeInnerType as CT;
515 use HeapType as HT;
516 match (a, b) {
517 (a, b) if a == b => true,
518
519 (
520 HT::Abstract {
521 shared: a_shared,
522 ty: a_ty,
523 },
524 HT::Abstract {
525 shared: b_shared,
526 ty: b_ty,
527 },
528 ) => {
529 a_shared == b_shared
530 && match (a_ty, b_ty) {
531 (Eq | I31 | Struct | Array | None, Any) => true,
532 (I31 | Struct | Array | None, Eq) => true,
533 (NoExtern, Extern) => true,
534 (NoFunc, Func) => true,
535 (None, I31 | Array | Struct) => true,
536 (NoExn, Exn) => true,
537 _ => false,
538 }
539 }
540
541 (HT::Concrete(a), HT::Abstract { shared, ty })
542 | (HT::Exact(a), HT::Abstract { shared, ty }) => {
543 let a_ty = &self.ty(a).composite_type;
544 if a_ty.shared != shared {
545 return false;
546 }
547 match ty {
548 Eq | Any => matches!(a_ty.inner, CT::Array(_) | CT::Struct(_)),
549 Struct => matches!(a_ty.inner, CT::Struct(_)),
550 Array => matches!(a_ty.inner, CT::Array(_)),
551 Func => matches!(a_ty.inner, CT::Func(_)),
552 _ => false,
553 }
554 }
555
556 (HT::Abstract { shared, ty }, HT::Concrete(b))
557 | (HT::Abstract { shared, ty }, HT::Exact(b)) => {
558 let b_ty = &self.ty(b).composite_type;
559 if shared != b_ty.shared {
560 return false;
561 }
562 match ty {
563 None => matches!(b_ty.inner, CT::Array(_) | CT::Struct(_)),
564 NoFunc => matches!(b_ty.inner, CT::Func(_)),
565 _ => false,
566 }
567 }
568
569 (HT::Concrete(mut a), HT::Concrete(b)) | (HT::Exact(mut a), HT::Concrete(b)) => loop {
570 if a == b {
571 return true;
572 }
573 if let Some(supertype) = self.ty(a).supertype {
574 a = supertype;
575 } else {
576 return false;
577 }
578 },
579
580 (HT::Concrete(a), HT::Exact(b)) | (HT::Exact(a), HT::Exact(b)) => {
581 return a == b;
582 }
583 }
584 }
585
586 fn arbitrary_types(&mut self, u: &mut Unstructured) -> Result<()> {
587 assert!(self.config.min_types <= self.config.max_types);
588 while self.types.len() < self.config.min_types {
589 self.arbitrary_rec_group(u, AllowEmptyRecGroup::No)?;
590 }
591 while self.types.len() < self.config.max_types {
592 let keep_going = u.arbitrary().unwrap_or(false);
593 if !keep_going {
594 break;
595 }
596 self.arbitrary_rec_group(u, AllowEmptyRecGroup::Yes)?;
597 }
598 Ok(())
599 }
600
601 fn add_type(&mut self, ty: SubType) -> u32 {
602 let index = u32::try_from(self.types.len()).unwrap();
603
604 if let Some(supertype) = ty.supertype {
605 assert_eq!(self.is_shared_type(supertype), ty.composite_type.shared);
606 self.super_to_sub_types
607 .entry(supertype)
608 .or_default()
609 .push(index);
610 }
611
612 let list = match &ty.composite_type.inner {
613 CompositeInnerType::Array(_) => &mut self.array_types,
614 CompositeInnerType::Func(_) => &mut self.func_types,
615 CompositeInnerType::Struct(_) => &mut self.struct_types,
616 };
617 list.push(index);
618
619 const MAX_SUBTYPING_DEPTH: u32 = 60;
627 if !ty.is_final && ty.depth < MAX_SUBTYPING_DEPTH {
628 self.can_subtype.push(index);
629 }
630
631 self.types.push(ty);
632 index
633 }
634
635 fn arbitrary_rec_group(
636 &mut self,
637 u: &mut Unstructured,
638 kind: AllowEmptyRecGroup,
639 ) -> Result<()> {
640 let rec_group_start = self.types.len();
641
642 assert!(matches!(self.max_type_limit, MaxTypeLimit::ModuleTypes));
643
644 if self.config.gc_enabled {
645 if self.rec_groups.len() > 0 && u.ratio(1, u8::MAX)? {
647 return self.clone_rec_group(u, kind);
648 }
649
650 let max_rec_group_size = self.config.max_types - self.types.len();
652 let min_rec_group_size = match kind {
653 AllowEmptyRecGroup::Yes => 0,
654 AllowEmptyRecGroup::No => 1,
655 };
656 let rec_group_size = u.int_in_range(min_rec_group_size..=max_rec_group_size)?;
657 let type_ref_limit = u32::try_from(self.types.len() + rec_group_size).unwrap();
658 self.max_type_limit = MaxTypeLimit::Num(type_ref_limit);
659 for _ in 0..rec_group_size {
660 let ty = self.arbitrary_sub_type(u)?;
661 self.add_type(ty);
662 }
663 } else {
664 let type_ref_limit = u32::try_from(self.types.len()).unwrap();
665 self.max_type_limit = MaxTypeLimit::Num(type_ref_limit);
666 let ty = self.arbitrary_sub_type(u)?;
667 self.add_type(ty);
668 }
669
670 self.max_type_limit = MaxTypeLimit::ModuleTypes;
671
672 self.rec_groups.push(rec_group_start..self.types.len());
673 Ok(())
674 }
675
676 fn clone_rec_group(&mut self, u: &mut Unstructured, kind: AllowEmptyRecGroup) -> Result<()> {
677 let group = u.choose(&self.rec_groups)?.clone();
682 if group.is_empty() && kind == AllowEmptyRecGroup::No {
683 return Ok(());
684 }
685 if group.len() > self.config.max_types.saturating_sub(self.types.len()) {
686 return Ok(());
687 }
688
689 let new_rec_group_start = self.types.len();
697 for index in group {
698 let orig_ty_index = u32::try_from(index).unwrap();
699 let ty = self.ty(orig_ty_index).clone();
700 self.add_type(ty);
701 }
702 self.rec_groups.push(new_rec_group_start..self.types.len());
703 Ok(())
704 }
705
706 fn arbitrary_sub_type(&mut self, u: &mut Unstructured) -> Result<SubType> {
707 if !self.config.gc_enabled {
708 let shared = self.arbitrary_shared(u)?;
709 let func_type = self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?;
710 let composite_type = CompositeType {
711 inner: CompositeInnerType::Func(func_type),
712 shared,
713 descriptor: None,
714 describes: None,
715 };
716 return Ok(SubType {
717 is_final: true,
718 supertype: None,
719 composite_type,
720 depth: 1,
721 });
722 }
723
724 if !self.can_subtype.is_empty() && u.ratio(1, 32_u8)? {
725 self.arbitrary_sub_type_of_super_type(u)
726 } else {
727 Ok(SubType {
728 is_final: u.arbitrary()?,
729 supertype: None,
730 composite_type: self.arbitrary_composite_type(u)?,
731 depth: 1,
732 })
733 }
734 }
735
736 fn arbitrary_sub_type_of_super_type(&mut self, u: &mut Unstructured) -> Result<SubType> {
737 let supertype = *u.choose(&self.can_subtype)?;
738 let mut composite_type = self.types[usize::try_from(supertype).unwrap()]
739 .composite_type
740 .clone();
741 match &mut composite_type.inner {
742 CompositeInnerType::Array(a) => {
743 a.0 = self.arbitrary_matching_field_type(u, a.0)?;
744 }
745 CompositeInnerType::Func(f) => {
746 *f = self.arbitrary_matching_func_type(u, f)?;
747 }
748 CompositeInnerType::Struct(s) => {
749 *s = self.propagate_shared(composite_type.shared, |m| {
750 m.arbitrary_matching_struct_type(u, s)
751 })?;
752 }
753 }
754 Ok(SubType {
755 is_final: u.arbitrary()?,
756 supertype: Some(supertype),
757 composite_type,
758 depth: 1 + self.types[supertype as usize].depth,
759 })
760 }
761
762 fn arbitrary_matching_struct_type(
763 &mut self,
764 u: &mut Unstructured,
765 ty: &StructType,
766 ) -> Result<StructType> {
767 let len_extra_fields = u.int_in_range(0..=5)?;
768 let mut fields = Vec::with_capacity(ty.fields.len() + len_extra_fields);
769 for field in ty.fields.iter() {
770 fields.push(self.arbitrary_matching_field_type(u, *field)?);
771 }
772 for _ in 0..len_extra_fields {
773 fields.push(self.arbitrary_field_type(u)?);
774 }
775 Ok(StructType {
776 fields: fields.into_boxed_slice(),
777 })
778 }
779
780 fn arbitrary_matching_field_type(
781 &mut self,
782 u: &mut Unstructured,
783 ty: FieldType,
784 ) -> Result<FieldType> {
785 if ty.mutable {
786 Ok(ty)
787 } else {
788 Ok(FieldType {
789 element_type: self.arbitrary_matching_storage_type(u, ty.element_type)?,
790 mutable: false,
791 })
792 }
793 }
794
795 fn arbitrary_matching_storage_type(
796 &mut self,
797 u: &mut Unstructured,
798 ty: StorageType,
799 ) -> Result<StorageType> {
800 match ty {
801 StorageType::I8 => Ok(StorageType::I8),
802 StorageType::I16 => Ok(StorageType::I16),
803 StorageType::Val(ty) => Ok(StorageType::Val(self.arbitrary_matching_val_type(u, ty)?)),
804 }
805 }
806
807 fn arbitrary_matching_val_type(
808 &mut self,
809 u: &mut Unstructured,
810 ty: ValType,
811 ) -> Result<ValType> {
812 match ty {
813 ValType::I32 => Ok(ValType::I32),
814 ValType::I64 => Ok(ValType::I64),
815 ValType::F32 => Ok(ValType::F32),
816 ValType::F64 => Ok(ValType::F64),
817 ValType::V128 => Ok(ValType::V128),
818 ValType::Ref(ty) => Ok(ValType::Ref(self.arbitrary_matching_ref_type(u, ty)?)),
819 }
820 }
821
822 fn arbitrary_matching_ref_type(&self, u: &mut Unstructured, ty: RefType) -> Result<RefType> {
823 Ok(RefType {
824 nullable: ty.nullable,
825 heap_type: self.arbitrary_matching_heap_type(u, ty.heap_type)?,
826 })
827 }
828
829 fn arbitrary_matching_heap_type(&self, u: &mut Unstructured, ty: HeapType) -> Result<HeapType> {
830 use {AbstractHeapType as AHT, CompositeInnerType as CT, HeapType as HT};
831
832 if !self.config.gc_enabled {
833 return Ok(ty);
834 }
835
836 let mut choices = vec![ty];
837 match ty {
838 HT::Abstract { shared, ty } => {
839 use AbstractHeapType::*;
840 let add_abstract = |choices: &mut Vec<HT>, tys: &[AHT]| {
841 choices.extend(tys.iter().map(|&ty| HT::Abstract { shared, ty }));
842 };
843 let add_concrete = |choices: &mut Vec<HT>, tys: &[u32]| {
844 choices.extend(
845 tys.iter()
846 .filter(|&&idx| shared == self.is_shared_type(idx))
847 .copied()
848 .map(HT::Concrete),
849 );
850 };
851 match ty {
852 Any => {
853 add_abstract(&mut choices, &[Eq, Struct, Array, I31, None]);
854 add_concrete(&mut choices, &self.array_types);
855 add_concrete(&mut choices, &self.struct_types);
856 }
857 Eq => {
858 add_abstract(&mut choices, &[Struct, Array, I31, None]);
859 add_concrete(&mut choices, &self.array_types);
860 add_concrete(&mut choices, &self.struct_types);
861 }
862 Struct => {
863 add_abstract(&mut choices, &[Struct, None]);
864 add_concrete(&mut choices, &self.struct_types);
865 }
866 Array => {
867 add_abstract(&mut choices, &[Array, None]);
868 add_concrete(&mut choices, &self.array_types);
869 }
870 I31 => {
871 add_abstract(&mut choices, &[None]);
872 }
873 Func => {
874 add_abstract(&mut choices, &[NoFunc]);
875 add_concrete(&mut choices, &self.func_types);
876 }
877 Extern => {
878 add_abstract(&mut choices, &[NoExtern]);
879 }
880 Exn | NoExn | None | NoExtern | NoFunc | Cont | NoCont => {}
881 }
882 }
883 HT::Concrete(idx) => {
884 if let Some(subs) = self.super_to_sub_types.get(&idx) {
885 choices.extend(subs.iter().copied().map(HT::Concrete));
886 }
887 if self.config.custom_descriptors_enabled {
888 choices.push(HT::Exact(idx));
889 if let Some(subs) = self.super_to_sub_types.get(&idx) {
890 choices.extend(subs.iter().copied().map(HT::Concrete));
891 }
892 }
893 match self
894 .types
895 .get(usize::try_from(idx).unwrap())
896 .map(|ty| (ty.composite_type.shared, &ty.composite_type.inner))
897 {
898 Some((shared, CT::Array(_) | CT::Struct(_))) => choices.push(HT::Abstract {
899 shared,
900 ty: AbstractHeapType::None,
901 }),
902 Some((shared, CT::Func(_))) => choices.push(HT::Abstract {
903 shared,
904 ty: AbstractHeapType::NoFunc,
905 }),
906 None => {
907 }
913 }
914 }
915 HT::Exact(_) => (),
916 }
917 Ok(*u.choose(&choices)?)
918 }
919
920 fn arbitrary_matching_func_type(
921 &mut self,
922 u: &mut Unstructured,
923 ty: &FuncType,
924 ) -> Result<Rc<FuncType>> {
925 let mut params = Vec::with_capacity(ty.params.len());
929 for param in &ty.params {
930 params.push(self.arbitrary_super_type_of_val_type(u, *param)?);
931 }
932 let mut results = Vec::with_capacity(ty.results.len());
933 for result in &ty.results {
934 results.push(self.arbitrary_matching_val_type(u, *result)?);
935 }
936 Ok(Rc::new(FuncType { params, results }))
937 }
938
939 fn arbitrary_super_type_of_val_type(
940 &mut self,
941 u: &mut Unstructured,
942 ty: ValType,
943 ) -> Result<ValType> {
944 match ty {
945 ValType::I32 => Ok(ValType::I32),
946 ValType::I64 => Ok(ValType::I64),
947 ValType::F32 => Ok(ValType::F32),
948 ValType::F64 => Ok(ValType::F64),
949 ValType::V128 => Ok(ValType::V128),
950 ValType::Ref(ty) => Ok(ValType::Ref(self.arbitrary_super_type_of_ref_type(u, ty)?)),
951 }
952 }
953
954 fn arbitrary_super_type_of_ref_type(
955 &self,
956 u: &mut Unstructured,
957 ty: RefType,
958 ) -> Result<RefType> {
959 Ok(RefType {
960 nullable: true,
967 heap_type: self.arbitrary_super_type_of_heap_type(u, ty.heap_type)?,
968 })
969 }
970
971 fn arbitrary_super_type_of_heap_type(
972 &self,
973 u: &mut Unstructured,
974 ty: HeapType,
975 ) -> Result<HeapType> {
976 use {AbstractHeapType as AHT, CompositeInnerType as CT, HeapType as HT};
977
978 if !self.config.gc_enabled {
979 return Ok(ty);
980 }
981
982 let mut choices = vec![ty];
983 match ty {
984 HT::Abstract { shared, ty } => {
985 use AbstractHeapType::*;
986 let add_abstract = |choices: &mut Vec<HT>, tys: &[AHT]| {
987 choices.extend(tys.iter().map(|&ty| HT::Abstract { shared, ty }));
988 };
989 let add_concrete = |choices: &mut Vec<HT>, tys: &[u32]| {
990 choices.extend(
991 tys.iter()
992 .filter(|&&idx| shared == self.is_shared_type(idx))
993 .copied()
994 .map(HT::Concrete),
995 );
996 };
997 match ty {
998 None => {
999 add_abstract(&mut choices, &[Any, Eq, Struct, Array, I31]);
1000 add_concrete(&mut choices, &self.array_types);
1001 add_concrete(&mut choices, &self.struct_types);
1002 }
1003 NoExtern => {
1004 add_abstract(&mut choices, &[Extern]);
1005 }
1006 NoFunc => {
1007 add_abstract(&mut choices, &[Func]);
1008 add_concrete(&mut choices, &self.func_types);
1009 }
1010 NoExn => {
1011 add_abstract(&mut choices, &[Exn]);
1012 }
1013 Struct | Array | I31 => {
1014 add_abstract(&mut choices, &[Any, Eq]);
1015 }
1016 Eq => {
1017 add_abstract(&mut choices, &[Any]);
1018 }
1019 NoCont => {
1020 add_abstract(&mut choices, &[Cont]);
1021 }
1022 Exn | Any | Func | Extern | Cont => {}
1023 }
1024 }
1025 HT::Concrete(mut idx) => {
1026 if let Some(sub_ty) = &self.types.get(usize::try_from(idx).unwrap()) {
1027 use AbstractHeapType::*;
1028 let ht = |ty| HT::Abstract {
1029 shared: sub_ty.composite_type.shared,
1030 ty,
1031 };
1032 match &sub_ty.composite_type.inner {
1033 CT::Array(_) => {
1034 choices.extend([ht(Any), ht(Eq), ht(Array)]);
1035 }
1036 CT::Func(_) => {
1037 choices.push(ht(Func));
1038 }
1039 CT::Struct(_) => {
1040 choices.extend([ht(Any), ht(Eq), ht(Struct)]);
1041 }
1042 }
1043 } else {
1044 }
1051 while let Some(supertype) = self
1052 .types
1053 .get(usize::try_from(idx).unwrap())
1054 .and_then(|ty| ty.supertype)
1055 {
1056 choices.push(HT::Concrete(supertype));
1057 idx = supertype;
1058 }
1059 }
1060 HT::Exact(_) => (),
1061 }
1062 Ok(*u.choose(&choices)?)
1063 }
1064
1065 fn arbitrary_composite_type(&mut self, u: &mut Unstructured) -> Result<CompositeType> {
1066 use CompositeInnerType as CT;
1067 let shared = self.arbitrary_shared(u)?;
1068
1069 if !self.config.gc_enabled {
1070 return Ok(CompositeType {
1071 shared,
1072 inner: CT::Func(self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?),
1073 descriptor: None,
1074 describes: None,
1075 });
1076 }
1077
1078 match u.int_in_range(0..=2)? {
1079 0 => Ok(CompositeType {
1080 shared,
1081 inner: CT::Array(ArrayType(
1082 self.propagate_shared(shared, |m| m.arbitrary_field_type(u))?,
1083 )),
1084 descriptor: None,
1085 describes: None,
1086 }),
1087 1 => Ok(CompositeType {
1088 shared,
1089 inner: CT::Func(self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?),
1090 descriptor: None,
1091 describes: None,
1092 }),
1093 2 => Ok(CompositeType {
1094 shared,
1095 inner: CT::Struct(self.propagate_shared(shared, |m| m.arbitrary_struct_type(u))?),
1096 descriptor: None, describes: None,
1098 }),
1099 _ => unreachable!(),
1100 }
1101 }
1102
1103 fn arbitrary_struct_type(&mut self, u: &mut Unstructured) -> Result<StructType> {
1104 let len = u.int_in_range(0..=20)?;
1105 let mut fields = Vec::with_capacity(len);
1106 for _ in 0..len {
1107 fields.push(self.arbitrary_field_type(u)?);
1108 }
1109 Ok(StructType {
1110 fields: fields.into_boxed_slice(),
1111 })
1112 }
1113
1114 fn arbitrary_field_type(&mut self, u: &mut Unstructured) -> Result<FieldType> {
1115 Ok(FieldType {
1116 element_type: self.arbitrary_storage_type(u)?,
1117 mutable: u.arbitrary()?,
1118 })
1119 }
1120
1121 fn arbitrary_storage_type(&mut self, u: &mut Unstructured) -> Result<StorageType> {
1122 match u.int_in_range(0..=2)? {
1123 0 => Ok(StorageType::I8),
1124 1 => Ok(StorageType::I16),
1125 2 => Ok(StorageType::Val(self.arbitrary_valtype(u)?)),
1126 _ => unreachable!(),
1127 }
1128 }
1129
1130 fn arbitrary_ref_type(&self, u: &mut Unstructured) -> Result<RefType> {
1131 if !self.config.reference_types_enabled {
1132 return Ok(RefType::FUNCREF);
1133 }
1134 Ok(RefType {
1135 nullable: true,
1136 heap_type: self.arbitrary_heap_type(u)?,
1137 })
1138 }
1139
1140 fn arbitrary_heap_type(&self, u: &mut Unstructured) -> Result<HeapType> {
1141 assert!(self.config.reference_types_enabled);
1142
1143 let concrete_type_limit = match self.max_type_limit {
1144 MaxTypeLimit::Num(n) => n,
1145 MaxTypeLimit::ModuleTypes => u32::try_from(self.types.len()).unwrap(),
1146 };
1147
1148 if self.config.gc_enabled && concrete_type_limit > 0 && u.arbitrary()? {
1149 let idx = u.int_in_range(0..=concrete_type_limit - 1)?;
1150 if let Some(ty) = self.types.get(idx as usize) {
1156 if !(self.must_share && !ty.composite_type.shared) {
1159 return Ok(HeapType::Concrete(idx));
1160 }
1161 }
1162 }
1163
1164 use AbstractHeapType::*;
1165 let mut choices = vec![Func, Extern];
1166 if self.config.exceptions_enabled {
1167 choices.push(Exn);
1168 }
1169 if self.config.gc_enabled {
1170 choices.extend(
1171 [Any, None, NoExtern, NoFunc, Eq, Struct, Array, I31]
1172 .iter()
1173 .copied(),
1174 );
1175 }
1176
1177 Ok(HeapType::Abstract {
1178 shared: self.arbitrary_shared(u)?,
1179 ty: *u.choose(&choices)?,
1180 })
1181 }
1182
1183 fn arbitrary_func_type(&mut self, u: &mut Unstructured) -> Result<Rc<FuncType>> {
1184 let mut params = vec![];
1185 let mut results = vec![];
1186 let max_params = 20;
1187 arbitrary_loop(u, 0, max_params, |u| {
1188 params.push(self.arbitrary_valtype(u)?);
1189 Ok(true)
1190 })?;
1191 let max_results = if self.config.multi_value_enabled {
1192 max_params
1193 } else {
1194 1
1195 };
1196 arbitrary_loop(u, 0, max_results, |u| {
1197 results.push(self.arbitrary_valtype(u)?);
1198 Ok(true)
1199 })?;
1200 Ok(Rc::new(FuncType { params, results }))
1201 }
1202
1203 fn can_add_local_or_import_tag(&self) -> bool {
1204 self.config.exceptions_enabled
1205 && self.has_tag_func_types()
1206 && self.tags.len() < self.config.max_tags
1207 }
1208
1209 fn can_add_local_or_import_func(&self) -> bool {
1210 !self.func_types.is_empty() && self.funcs.len() < self.config.max_funcs
1211 }
1212
1213 fn can_add_local_or_import_table(&self) -> bool {
1214 self.tables.len() < self.config.max_tables
1215 }
1216
1217 fn can_add_local_or_import_global(&self) -> bool {
1218 self.globals.len() < self.config.max_globals
1219 }
1220
1221 fn can_add_local_or_import_memory(&self) -> bool {
1222 self.memories.len() < self.config.max_memories
1223 }
1224
1225 fn imports_exports_from_module_shape(&mut self, u: &mut Unstructured) -> Result<bool> {
1226 let example_module = if let Some(wasm) = self.config.module_shape.clone() {
1227 wasm
1228 } else {
1229 return Ok(false);
1230 };
1231
1232 #[cfg(feature = "wasmparser")]
1233 {
1234 self._imports_exports_from_module_shape(u, &example_module)?;
1235 Ok(true)
1236 }
1237 #[cfg(not(feature = "wasmparser"))]
1238 {
1239 let _ = (example_module, u);
1240 panic!("support for `module_shape` was disabled at compile time");
1241 }
1242 }
1243
1244 #[cfg(feature = "wasmparser")]
1245 fn _imports_exports_from_module_shape(
1246 &mut self,
1247 u: &mut Unstructured,
1248 example_module: &[u8],
1249 ) -> Result<()> {
1250 let mut available_funcs: Vec<u32> = Vec::new();
1254 let mut available_tags: Vec<wasmparser::TagType> = Vec::new();
1255 let mut available_tables: Vec<wasmparser::TableType> = Vec::new();
1256 let mut available_globals: Vec<wasmparser::GlobalType> = Vec::new();
1257 let mut available_memories: Vec<wasmparser::MemoryType> = Vec::new();
1258
1259 let mut required_types: Vec<SubType> = Vec::new();
1260 let mut required_recgrps: Vec<usize> = Vec::new();
1261 let mut required_imports: Vec<wasmparser::Import> = Vec::new();
1262 let mut required_exports: Vec<wasmparser::Export> = Vec::new();
1263 let mut validator = wasmparser::Validator::new();
1264 validator
1265 .validate_all(example_module)
1266 .expect("Failed to validate `module_shape` module");
1267 for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
1268 match payload.expect("could not parse the `module_shape` module") {
1269 wasmparser::Payload::TypeSection(type_reader) => {
1270 for recgrp in type_reader {
1271 let recgrp = recgrp.expect("could not read recursive group");
1272 required_recgrps.push(recgrp.types().len());
1273 for subtype in recgrp.into_types() {
1274 let mut subtype: SubType = subtype.try_into().unwrap();
1275 if let Some(supertype_idx) = subtype.supertype {
1276 subtype.depth = required_types[supertype_idx as usize].depth + 1;
1277 }
1278 required_types.push(subtype);
1279 }
1280 }
1281 }
1282 wasmparser::Payload::ImportSection(import_reader) => {
1283 for im in import_reader {
1284 let im = im.expect("could not read import");
1285 required_imports.push(im);
1286 }
1287 }
1288 wasmparser::Payload::ExportSection(export_reader) => {
1289 for ex in export_reader {
1290 let ex = ex.expect("could not read export");
1291 required_exports.push(ex);
1292 }
1293 }
1294 wasmparser::Payload::FunctionSection(function_reader) => {
1295 for func in function_reader {
1296 let func = func.expect("could not read function");
1297 available_funcs.push(func);
1298 }
1299 }
1300 wasmparser::Payload::TagSection(tag_reader) => {
1301 for tag in tag_reader {
1302 let tag = tag.expect("could not read tag");
1303 available_tags.push(tag);
1304 }
1305 }
1306 wasmparser::Payload::TableSection(table_reader) => {
1307 for table in table_reader {
1308 let table = table.expect("could not read table");
1309 available_tables.push(table.ty);
1310 }
1311 }
1312 wasmparser::Payload::MemorySection(memory_reader) => {
1313 for memory in memory_reader {
1314 let memory = memory.expect("could not read memory");
1315 available_memories.push(memory);
1316 }
1317 }
1318 wasmparser::Payload::GlobalSection(global_reader) => {
1319 for global in global_reader {
1320 let global = global.expect("could not read global");
1321 available_globals.push(global.ty);
1322 }
1323 }
1324 _ => {}
1325 }
1326 }
1327
1328 let mut recgrp_start_idx = self.types.len();
1331 for size in required_recgrps {
1332 self.rec_groups
1333 .push(recgrp_start_idx..recgrp_start_idx + size);
1334 recgrp_start_idx += size;
1335 }
1336 for ty in &required_types {
1337 self.add_type(ty.clone());
1338 }
1339
1340 let mut imported_funcs: Vec<u32> = Vec::new();
1344 let mut imported_tags: Vec<wasmparser::TagType> = Vec::new();
1345 let mut imported_tables: Vec<wasmparser::TableType> = Vec::new();
1346 let mut imported_globals: Vec<wasmparser::GlobalType> = Vec::new();
1347 let mut imported_memories: Vec<wasmparser::MemoryType> = Vec::new();
1348 let mut new_imports = Vec::with_capacity(required_imports.len());
1349 for import in required_imports {
1350 let entity_type = match &import.ty {
1351 wasmparser::TypeRef::Func(sig_idx) => {
1352 imported_funcs.push(*sig_idx);
1353 match required_types.get(*sig_idx as usize) {
1354 None => panic!("signature index refers to a type out of bounds"),
1355 Some(ty) => match &ty.composite_type.inner {
1356 CompositeInnerType::Func(func_type) => {
1357 let entity = EntityType::Func(*sig_idx, Rc::clone(func_type));
1358 self.funcs.push((*sig_idx, Rc::clone(func_type)));
1359 entity
1360 }
1361 _ => panic!("a function type is required for function import"),
1362 },
1363 }
1364 }
1365
1366 wasmparser::TypeRef::Tag(wasmparser::TagType {
1367 kind,
1368 func_type_idx,
1369 }) => {
1370 imported_tags.push(wasmparser::TagType {
1371 kind: *kind,
1372 func_type_idx: *func_type_idx,
1373 });
1374 match required_types.get(*func_type_idx as usize) {
1375 None => {
1376 panic!("function type index for tag refers to a type out of bounds")
1377 }
1378 Some(ty) => match &ty.composite_type.inner {
1379 CompositeInnerType::Func(func_type) => {
1380 let tag_type = TagType {
1381 func_type_idx: *func_type_idx,
1382 func_type: Rc::clone(func_type),
1383 };
1384 let entity = EntityType::Tag(tag_type.clone());
1385 self.tags.push(tag_type);
1386 entity
1387 }
1388 _ => panic!("a function type is required for tag import"),
1389 },
1390 }
1391 }
1392
1393 wasmparser::TypeRef::Table(table_ty) => {
1394 imported_tables.push(*table_ty);
1395 let table_ty = TableType::try_from(*table_ty).unwrap();
1396 let entity = EntityType::Table(table_ty);
1397 self.tables.push(table_ty);
1398 entity
1399 }
1400
1401 wasmparser::TypeRef::Memory(memory_ty) => {
1402 imported_memories.push(*memory_ty);
1403 let memory_ty = MemoryType::from(*memory_ty);
1404 let entity = EntityType::Memory(memory_ty);
1405 self.memories.push(memory_ty);
1406 entity
1407 }
1408
1409 wasmparser::TypeRef::Global(global_ty) => {
1410 imported_globals.push(*global_ty);
1411 let global_ty = GlobalType::try_from(*global_ty).unwrap();
1412 let entity = EntityType::Global(global_ty);
1413 self.globals.push(global_ty);
1414 entity
1415 }
1416 };
1417 new_imports.push(Import {
1418 module: import.module.to_string(),
1419 field: import.name.to_string(),
1420 entity_type,
1421 });
1422 self.num_imports += 1;
1423 }
1424 self.imports.extend(new_imports);
1425 available_tags.splice(0..0, imported_tags);
1426 available_funcs.splice(0..0, imported_funcs);
1427 available_tables.splice(0..0, imported_tables);
1428 available_globals.splice(0..0, imported_globals);
1429 available_memories.splice(0..0, imported_memories);
1430
1431 for export in required_exports {
1433 let index = match export.kind {
1434 wasmparser::ExternalKind::Func => {
1435 match available_funcs.get(export.index as usize) {
1436 None => panic!("function index out of bounds"),
1437 Some(sig_idx) => match required_types.get(*sig_idx as usize) {
1438 None => panic!("signature index refers to a type out of bounds"),
1439 Some(ty) => match &ty.composite_type.inner {
1440 CompositeInnerType::Func(func_type) => {
1441 let func_index = self.funcs.len() as u32;
1442 self.funcs.push((*sig_idx, Rc::clone(func_type)));
1443 self.num_defined_funcs += 1;
1444 func_index
1445 }
1446 _ => panic!("a function type is required for function export"),
1447 },
1448 },
1449 }
1450 }
1451
1452 wasmparser::ExternalKind::Tag => match available_tags.get(export.index as usize) {
1453 None => panic!("tag index out of bounds"),
1454 Some(wasmparser::TagType { func_type_idx, .. }) => {
1455 match required_types.get(*func_type_idx as usize) {
1456 None => {
1457 panic!("function type index for tag refers to a type out of bounds")
1458 }
1459 Some(ty) => match &ty.composite_type.inner {
1460 CompositeInnerType::Func(func_type) => {
1461 let tag_index = self.tags.len() as u32;
1462 self.tags.push(TagType {
1463 func_type_idx: *func_type_idx,
1464 func_type: Rc::clone(func_type),
1465 });
1466 self.num_defined_tags += 1;
1467 tag_index
1468 }
1469 _ => panic!("a function type is required for tag export"),
1470 },
1471 }
1472 }
1473 },
1474
1475 wasmparser::ExternalKind::Table => {
1476 match available_tables.get(export.index as usize) {
1477 None => panic!("table index out of bounds"),
1478 Some(ty) => {
1479 self.add_arbitrary_table_of_type((*ty).try_into().unwrap(), u)?
1480 }
1481 }
1482 }
1483
1484 wasmparser::ExternalKind::Memory => {
1485 match available_memories.get(export.index as usize) {
1486 None => panic!("memory index out of bounds"),
1487 Some(ty) => self.add_arbitrary_memory_of_type((*ty).into())?,
1488 }
1489 }
1490
1491 wasmparser::ExternalKind::Global => {
1492 match available_globals.get(export.index as usize) {
1493 None => panic!("global index out of bounds"),
1494 Some(ty) => {
1495 self.add_arbitrary_global_of_type((*ty).try_into().unwrap(), u)?
1496 }
1497 }
1498 }
1499 };
1500 self.exports
1501 .push((export.name.to_string(), export.kind.into(), index));
1502 self.export_names.insert(export.name.to_string());
1503 }
1504
1505 Ok(())
1506 }
1507
1508 fn arbitrary_imports(&mut self, u: &mut Unstructured) -> Result<()> {
1509 if self.config.max_type_size < self.type_size {
1510 return Ok(());
1511 }
1512
1513 let mut import_strings = HashSet::new();
1514 let mut choices: Vec<fn(&mut Unstructured, &mut Module) -> Result<EntityType>> =
1515 Vec::with_capacity(5);
1516 let min = self.config.min_imports.saturating_sub(self.num_imports);
1517 let max = self.config.max_imports.saturating_sub(self.num_imports);
1518 arbitrary_loop(u, min, max, |u| {
1519 choices.clear();
1520 if self.can_add_local_or_import_tag() {
1521 choices.push(|u, m| {
1522 let ty = m.arbitrary_tag_type(u)?;
1523 Ok(EntityType::Tag(ty))
1524 });
1525 }
1526 if self.can_add_local_or_import_func() {
1527 choices.push(|u, m| {
1528 let idx = *u.choose(&m.func_types)?;
1529 let ty = m.func_type(idx).clone();
1530 Ok(EntityType::Func(idx, ty))
1531 });
1532 }
1533 if self.can_add_local_or_import_global() {
1534 choices.push(|u, m| {
1535 let ty = m.arbitrary_global_type(u)?;
1536 Ok(EntityType::Global(ty))
1537 });
1538 }
1539 if self.can_add_local_or_import_memory() {
1540 choices.push(|u, m| {
1541 let ty = arbitrary_memtype(u, m.config())?;
1542 Ok(EntityType::Memory(ty))
1543 });
1544 }
1545 if self.can_add_local_or_import_table() {
1546 choices.push(|u, m| {
1547 let ty = arbitrary_table_type(u, m.config(), Some(m))?;
1548 Ok(EntityType::Table(ty))
1549 });
1550 }
1551
1552 if choices.is_empty() {
1553 return Ok(false);
1558 }
1559
1560 let f = u.choose(&choices)?;
1563 let entity_type = f(u, self)?;
1564 let budget = self.config.max_type_size - self.type_size;
1565 if entity_type.size() + 1 > budget {
1566 return Ok(false);
1567 }
1568 self.type_size += entity_type.size() + 1;
1569
1570 let mut import_pair = unique_import_strings(1_000, u)?;
1572 if self.duplicate_imports_behavior == DuplicateImportsBehavior::Disallowed {
1573 while import_strings.contains(&import_pair) {
1574 use std::fmt::Write;
1575 write!(&mut import_pair.1, "{}", import_strings.len()).unwrap();
1576 }
1577 import_strings.insert(import_pair.clone());
1578 }
1579 let (module, field) = import_pair;
1580
1581 match &entity_type {
1584 EntityType::Tag(ty) => self.tags.push(ty.clone()),
1585 EntityType::Func(idx, ty) => self.funcs.push((*idx, ty.clone())),
1586 EntityType::Global(ty) => self.globals.push(*ty),
1587 EntityType::Table(ty) => self.tables.push(*ty),
1588 EntityType::Memory(ty) => self.memories.push(*ty),
1589 }
1590
1591 self.num_imports += 1;
1592 self.imports.push(Import {
1593 module,
1594 field,
1595 entity_type,
1596 });
1597 Ok(true)
1598 })?;
1599
1600 Ok(())
1601 }
1602
1603 fn arbitrary_imports_from_available(&mut self, u: &mut Unstructured) -> Result<bool> {
1609 let example_module = if let Some(wasm) = self.config.available_imports.take() {
1610 wasm
1611 } else {
1612 return Ok(false);
1613 };
1614
1615 #[cfg(feature = "wasmparser")]
1616 {
1617 self._arbitrary_imports_from_available(u, &example_module)?;
1618 Ok(true)
1619 }
1620 #[cfg(not(feature = "wasmparser"))]
1621 {
1622 let _ = (example_module, u);
1623 panic!("support for `available_imports` was disabled at compile time");
1624 }
1625 }
1626
1627 #[cfg(feature = "wasmparser")]
1628 fn _arbitrary_imports_from_available(
1629 &mut self,
1630 u: &mut Unstructured,
1631 example_module: &[u8],
1632 ) -> Result<()> {
1633 let mut new_recgrps = Vec::<usize>::new();
1640 let mut available_types = Vec::<SubType>::new();
1641 let mut available_imports = Vec::<wasmparser::Import>::new();
1642 let mut validator = wasmparser::Validator::new();
1643 validator
1644 .validate_all(example_module)
1645 .expect("Failed to validate `module_shape` module");
1646 for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
1647 match payload.expect("could not parse the available import payload") {
1648 wasmparser::Payload::TypeSection(type_reader) => {
1649 for recgrp in type_reader {
1650 let recgrp = recgrp.expect("could not read recursive group");
1651 new_recgrps.push(recgrp.types().len());
1652 for subtype in recgrp.into_types() {
1653 let mut subtype: SubType = subtype.try_into().unwrap();
1654 if let Some(supertype_idx) = subtype.supertype {
1655 subtype.depth = available_types[supertype_idx as usize].depth + 1;
1656 }
1657 available_types.push(subtype);
1658 }
1659 }
1660 }
1661 wasmparser::Payload::ImportSection(import_reader) => {
1662 for im in import_reader {
1663 let im = im.expect("could not read import");
1664 let use_import = u.arbitrary().unwrap_or(false);
1667 if !use_import {
1668 continue;
1669 }
1670 available_imports.push(im);
1671 }
1672 }
1673 _ => {}
1674 }
1675 }
1676
1677 let mut new_imports = Vec::with_capacity(available_imports.len());
1682 for import in available_imports {
1683 let type_size_budget = self.config.max_type_size - self.type_size;
1684 let entity_type = match &import.ty {
1685 wasmparser::TypeRef::Func(sig_idx) => {
1686 if self.funcs.len() >= self.config.max_funcs {
1687 continue;
1688 } else {
1689 match available_types.get(*sig_idx as usize) {
1690 None => panic!("signature index refers to a type out of bounds"),
1691 Some(ty) => match &ty.composite_type.inner {
1692 CompositeInnerType::Func(func_type) => {
1693 let entity = EntityType::Func(*sig_idx, Rc::clone(func_type));
1694 if type_size_budget < entity.size() {
1695 continue;
1696 }
1697 self.funcs.push((*sig_idx, Rc::clone(func_type)));
1698 entity
1699 }
1700 _ => panic!("a function type is required for function import"),
1701 },
1702 }
1703 }
1704 }
1705
1706 wasmparser::TypeRef::Tag(wasmparser::TagType { func_type_idx, .. }) => {
1707 let can_add_tag = self.tags.len() < self.config.max_tags;
1708 if !self.config.exceptions_enabled || !can_add_tag {
1709 continue;
1710 } else {
1711 match available_types.get(*func_type_idx as usize) {
1712 None => {
1713 panic!("function type index for tag refers to a type out of bounds")
1714 }
1715 Some(ty) => match &ty.composite_type.inner {
1716 CompositeInnerType::Func(func_type) => {
1717 let tag_type = TagType {
1718 func_type_idx: *func_type_idx,
1719 func_type: Rc::clone(func_type),
1720 };
1721 let entity = EntityType::Tag(tag_type.clone());
1722 if type_size_budget < entity.size() {
1723 continue;
1724 }
1725 self.tags.push(tag_type);
1726 entity
1727 }
1728 _ => panic!("a function type is required for tag import"),
1729 },
1730 }
1731 }
1732 }
1733
1734 wasmparser::TypeRef::Table(table_ty) => {
1735 let table_ty = TableType::try_from(*table_ty).unwrap();
1736 let entity = EntityType::Table(table_ty);
1737 let type_size = entity.size();
1738 if type_size_budget < type_size || !self.can_add_local_or_import_table() {
1739 continue;
1740 }
1741 self.type_size += type_size;
1742 self.tables.push(table_ty);
1743 entity
1744 }
1745
1746 wasmparser::TypeRef::Memory(memory_ty) => {
1747 let memory_ty = MemoryType::from(*memory_ty);
1748 let entity = EntityType::Memory(memory_ty);
1749 let type_size = entity.size();
1750 if type_size_budget < type_size || !self.can_add_local_or_import_memory() {
1751 continue;
1752 }
1753 self.type_size += type_size;
1754 self.memories.push(memory_ty);
1755 entity
1756 }
1757
1758 wasmparser::TypeRef::Global(global_ty) => {
1759 let global_ty = GlobalType::try_from(*global_ty).unwrap();
1760 let entity = EntityType::Global(global_ty);
1761 let type_size = entity.size();
1762 if type_size_budget < type_size || !self.can_add_local_or_import_global() {
1763 continue;
1764 }
1765 self.type_size += type_size;
1766 self.globals.push(global_ty);
1767 entity
1768 }
1769 };
1770 new_imports.push(Import {
1771 module: import.module.to_string(),
1772 field: import.name.to_string(),
1773 entity_type,
1774 });
1775 self.num_imports += 1;
1776 }
1777
1778 let mut recgrp_start_idx = self.types.len();
1780 for size in new_recgrps {
1781 self.rec_groups
1782 .push(recgrp_start_idx..recgrp_start_idx + size);
1783 recgrp_start_idx += size;
1784 }
1785 for ty in available_types {
1786 self.add_type(ty);
1787 }
1788 self.imports.extend(new_imports);
1789
1790 Ok(())
1791 }
1792
1793 fn type_of(&self, kind: ExportKind, index: u32) -> EntityType {
1794 match kind {
1795 ExportKind::Global => EntityType::Global(self.globals[index as usize]),
1796 ExportKind::Memory => EntityType::Memory(self.memories[index as usize]),
1797 ExportKind::Table => EntityType::Table(self.tables[index as usize]),
1798 ExportKind::Func => {
1799 let (_idx, ty) = &self.funcs[index as usize];
1800 EntityType::Func(u32::max_value(), ty.clone())
1801 }
1802 ExportKind::Tag => EntityType::Tag(self.tags[index as usize].clone()),
1803 }
1804 }
1805
1806 fn ty(&self, idx: u32) -> &SubType {
1807 &self.types[idx as usize]
1808 }
1809
1810 fn func_types(&self) -> impl Iterator<Item = (u32, &FuncType)> + '_ {
1811 self.func_types
1812 .iter()
1813 .copied()
1814 .map(move |type_i| (type_i, &**self.func_type(type_i)))
1815 }
1816
1817 fn func_type(&self, idx: u32) -> &Rc<FuncType> {
1818 match &self.ty(idx).composite_type.inner {
1819 CompositeInnerType::Func(f) => f,
1820 _ => panic!("types[{idx}] is not a func type"),
1821 }
1822 }
1823
1824 fn tags(&self) -> impl Iterator<Item = (u32, &TagType)> + '_ {
1825 self.tags
1826 .iter()
1827 .enumerate()
1828 .map(move |(i, ty)| (i as u32, ty))
1829 }
1830
1831 fn funcs(&self) -> impl Iterator<Item = (u32, &Rc<FuncType>)> + '_ {
1832 self.funcs
1833 .iter()
1834 .enumerate()
1835 .map(move |(i, (_, ty))| (i as u32, ty))
1836 }
1837
1838 fn has_tag_func_types(&self) -> bool {
1839 self.tag_func_types().next().is_some()
1840 }
1841
1842 fn tag_func_types(&self) -> impl Iterator<Item = u32> + '_ {
1843 self.func_types
1844 .iter()
1845 .copied()
1846 .filter(move |i| self.func_type(*i).results.is_empty())
1847 }
1848
1849 fn arbitrary_valtype(&self, u: &mut Unstructured) -> Result<ValType> {
1850 #[derive(PartialEq, Eq, PartialOrd, Ord)]
1851 enum ValTypeClass {
1852 I32,
1853 I64,
1854 F32,
1855 F64,
1856 V128,
1857 Ref,
1858 }
1859
1860 let mut val_classes: Vec<_> = self
1861 .valtypes
1862 .iter()
1863 .map(|vt| match vt {
1864 ValType::I32 => ValTypeClass::I32,
1865 ValType::I64 => ValTypeClass::I64,
1866 ValType::F32 => ValTypeClass::F32,
1867 ValType::F64 => ValTypeClass::F64,
1868 ValType::V128 => ValTypeClass::V128,
1869 ValType::Ref(_) => ValTypeClass::Ref,
1870 })
1871 .collect();
1872 val_classes.sort_unstable();
1873 val_classes.dedup();
1874
1875 match u.choose(&val_classes)? {
1876 ValTypeClass::I32 => Ok(ValType::I32),
1877 ValTypeClass::I64 => Ok(ValType::I64),
1878 ValTypeClass::F32 => Ok(ValType::F32),
1879 ValTypeClass::F64 => Ok(ValType::F64),
1880 ValTypeClass::V128 => Ok(ValType::V128),
1881 ValTypeClass::Ref => Ok(ValType::Ref(self.arbitrary_ref_type(u)?)),
1882 }
1883 }
1884
1885 fn arbitrary_global_type(&self, u: &mut Unstructured) -> Result<GlobalType> {
1886 let val_type = self.arbitrary_valtype(u)?;
1887 let shared = match val_type {
1889 ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {
1890 self.arbitrary_shared(u)?
1891 }
1892 ValType::Ref(r) => self.is_shared_ref_type(r),
1893 };
1894 Ok(GlobalType {
1895 val_type,
1896 mutable: u.arbitrary()?,
1897 shared,
1898 })
1899 }
1900
1901 fn arbitrary_tag_type(&self, u: &mut Unstructured) -> Result<TagType> {
1902 let candidate_func_types: Vec<_> = self.tag_func_types().collect();
1903 arbitrary_tag_type(u, &candidate_func_types, |ty_idx| {
1904 self.func_type(ty_idx).clone()
1905 })
1906 }
1907
1908 fn arbitrary_tags(&mut self, u: &mut Unstructured) -> Result<()> {
1909 if !self.config.exceptions_enabled || !self.has_tag_func_types() {
1910 return Ok(());
1911 }
1912
1913 arbitrary_loop(u, self.config.min_tags, self.config.max_tags, |u| {
1914 if !self.can_add_local_or_import_tag() {
1915 return Ok(false);
1916 }
1917 self.tags.push(self.arbitrary_tag_type(u)?);
1918 self.num_defined_tags += 1;
1919 Ok(true)
1920 })
1921 }
1922
1923 fn arbitrary_funcs(&mut self, u: &mut Unstructured) -> Result<()> {
1924 if self.func_types.is_empty() {
1925 return Ok(());
1926 }
1927
1928 let unshared_func_types: Vec<_> = self
1933 .func_types
1934 .iter()
1935 .copied()
1936 .filter(|&i| !self.is_shared_type(i))
1937 .collect();
1938 if unshared_func_types.is_empty() {
1939 return Ok(());
1940 }
1941
1942 arbitrary_loop(u, self.config.min_funcs, self.config.max_funcs, |u| {
1943 if !self.can_add_local_or_import_func() {
1944 return Ok(false);
1945 }
1946 let max = unshared_func_types.len() - 1;
1947 let ty = unshared_func_types[u.int_in_range(0..=max)?];
1948 self.funcs.push((ty, self.func_type(ty).clone()));
1949 self.num_defined_funcs += 1;
1950 Ok(true)
1951 })
1952 }
1953
1954 fn arbitrary_tables(&mut self, u: &mut Unstructured) -> Result<()> {
1955 arbitrary_loop(
1956 u,
1957 self.config.min_tables as usize,
1958 self.config.max_tables,
1959 |u| {
1960 if !self.can_add_local_or_import_table() {
1961 return Ok(false);
1962 }
1963 let ty = arbitrary_table_type(u, self.config(), Some(self))?;
1964 self.add_arbitrary_table_of_type(ty, u)?;
1965 Ok(true)
1966 },
1967 )
1968 }
1969
1970 fn arbitrary_table_init(
1976 &mut self,
1977 u: &mut Unstructured,
1978 ty: RefType,
1979 ) -> Result<Option<ConstExpr>> {
1980 if !self.config.gc_enabled {
1981 assert!(ty.nullable);
1982 return Ok(None);
1983 }
1984 if ty.nullable && u.arbitrary()? {
1987 return Ok(None);
1988 }
1989 let expr = self.arbitrary_const_expr(ValType::Ref(ty), u, false)?;
1992 Ok(Some(expr))
1993 }
1994
1995 fn arbitrary_memories(&mut self, u: &mut Unstructured) -> Result<()> {
1996 arbitrary_loop(
1997 u,
1998 self.config.min_memories as usize,
1999 self.config.max_memories,
2000 |u| {
2001 if !self.can_add_local_or_import_memory() {
2002 return Ok(false);
2003 }
2004 let ty = arbitrary_memtype(u, self.config())?;
2005 self.add_arbitrary_memory_of_type(ty)?;
2006 Ok(true)
2007 },
2008 )
2009 }
2010
2011 fn add_arbitrary_global_of_type(
2013 &mut self,
2014 ty: GlobalType,
2015 u: &mut Unstructured,
2016 ) -> Result<u32> {
2017 let expr = self.arbitrary_const_expr(ty.val_type, u, true)?;
2018 let global_idx = self.globals.len() as u32;
2019 self.globals.push(ty);
2020 self.defined_globals.push((global_idx, expr));
2021 Ok(global_idx)
2022 }
2023
2024 fn add_arbitrary_memory_of_type(&mut self, ty: MemoryType) -> Result<u32> {
2026 let memory_idx = self.memories.len() as u32;
2027 self.num_defined_memories += 1;
2028 self.memories.push(ty);
2029 Ok(memory_idx)
2030 }
2031
2032 fn add_arbitrary_table_of_type(&mut self, ty: TableType, u: &mut Unstructured) -> Result<u32> {
2034 let expr = self.arbitrary_table_init(u, ty.element_type)?;
2035 let table_idx = self.tables.len() as u32;
2036 self.tables.push(ty);
2037 self.defined_tables.push(expr);
2038 Ok(table_idx)
2039 }
2040
2041 fn arbitrary_const_expr(
2043 &mut self,
2044 ty: ValType,
2045 u: &mut Unstructured,
2046 allow_defined_globals: bool,
2047 ) -> Result<ConstExpr> {
2048 let mut choices = mem::take(&mut self.const_expr_choices);
2049 choices.clear();
2050
2051 for i in self.globals_for_const_expr(ty, allow_defined_globals) {
2055 choices.push(Box::new(move |_, _| Ok(ConstExpr::global_get(i))));
2056 }
2057
2058 let ty = self.arbitrary_matching_val_type(u, ty)?;
2062 match ty {
2063 ValType::I32 => {
2064 choices.push(Box::new(|u, _| Ok(ConstExpr::i32_const(u.arbitrary()?))));
2065 if self.config.extended_const_enabled {
2066 choices.push(Box::new(arbitrary_extended_const));
2067 }
2068 }
2069 ValType::I64 => {
2070 choices.push(Box::new(|u, _| Ok(ConstExpr::i64_const(u.arbitrary()?))));
2071 if self.config.extended_const_enabled {
2072 choices.push(Box::new(arbitrary_extended_const));
2073 }
2074 }
2075 ValType::F32 => choices.push(Box::new(|u, _| {
2076 Ok(ConstExpr::f32_const(u.arbitrary::<f32>()?.into()))
2077 })),
2078 ValType::F64 => choices.push(Box::new(|u, _| {
2079 Ok(ConstExpr::f64_const(u.arbitrary::<f64>()?.into()))
2080 })),
2081 ValType::V128 => {
2082 choices.push(Box::new(|u, _| Ok(ConstExpr::v128_const(u.arbitrary()?))))
2083 }
2084
2085 ValType::Ref(ty) => {
2086 if ty.nullable {
2087 choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_null(ty.heap_type))));
2088 }
2089
2090 match ty.heap_type {
2091 HeapType::Abstract {
2092 ty: AbstractHeapType::Func,
2093 shared,
2094 } => {
2095 let num_funcs = self
2096 .funcs
2097 .iter()
2098 .filter(|(t, _)| shared == self.is_shared_type(*t))
2099 .count();
2100 if num_funcs > 0 {
2101 let pick = u.int_in_range(0..=num_funcs - 1)?;
2102 let (i, _) = self
2103 .funcs
2104 .iter()
2105 .map(|(t, _)| *t)
2106 .enumerate()
2107 .filter(|(_, t)| shared == self.is_shared_type(*t))
2108 .nth(pick)
2109 .unwrap();
2110 choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_func(i as u32))));
2111 }
2112 }
2113
2114 HeapType::Concrete(ty) => {
2115 for (i, fty) in self.funcs.iter().map(|(t, _)| *t).enumerate() {
2116 if ty != fty {
2117 continue;
2118 }
2119 choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_func(i as u32))));
2120 }
2121 }
2122
2123 _ => {}
2126 }
2127 }
2128 }
2129
2130 let f = u.choose(&choices)?;
2131 let ret = f(u, ty);
2132 self.const_expr_choices = choices;
2133 return ret;
2134
2135 fn arbitrary_extended_const(u: &mut Unstructured<'_>, ty: ValType) -> Result<ConstExpr> {
2143 use wasm_encoder::Instruction::*;
2144
2145 assert!(ty == ValType::I32 || ty == ValType::I64);
2148 let add = if ty == ValType::I32 { I32Add } else { I64Add };
2149 let sub = if ty == ValType::I32 { I32Sub } else { I64Sub };
2150 let mul = if ty == ValType::I32 { I32Mul } else { I64Mul };
2151 let const_: fn(&mut Unstructured<'_>) -> Result<wasm_encoder::Instruction<'static>> =
2152 if ty == ValType::I32 {
2153 |u| u.arbitrary().map(I32Const)
2154 } else {
2155 |u| u.arbitrary().map(I64Const)
2156 };
2157
2158 let mut instrs = Vec::new();
2163 let mut needed = 1;
2164 while needed > 0 {
2165 let choice = if u.is_empty() || instrs.len() > 10 {
2169 0
2170 } else {
2171 u.int_in_range(0..=3)?
2172 };
2173 match choice {
2174 0 => {
2175 instrs.push(const_(u)?);
2176 needed -= 1;
2177 }
2178 1 => {
2179 instrs.push(add.clone());
2180 needed += 1;
2181 }
2182 2 => {
2183 instrs.push(sub.clone());
2184 needed += 1;
2185 }
2186 3 => {
2187 instrs.push(mul.clone());
2188 needed += 1;
2189 }
2190 _ => unreachable!(),
2191 }
2192 }
2193 Ok(ConstExpr::extended(instrs.into_iter().rev()))
2194 }
2195 }
2196
2197 fn arbitrary_globals(&mut self, u: &mut Unstructured) -> Result<()> {
2198 arbitrary_loop(u, self.config.min_globals, self.config.max_globals, |u| {
2199 if !self.can_add_local_or_import_global() {
2200 return Ok(false);
2201 }
2202
2203 let ty = self.arbitrary_global_type(u)?;
2204 self.add_arbitrary_global_of_type(ty, u)?;
2205
2206 Ok(true)
2207 })
2208 }
2209
2210 fn required_exports(&mut self, u: &mut Unstructured) -> Result<bool> {
2211 let example_module = if let Some(wasm) = self.config.exports.clone() {
2212 wasm
2213 } else {
2214 return Ok(false);
2215 };
2216
2217 #[cfg(feature = "wasmparser")]
2218 {
2219 self._required_exports(u, &example_module)?;
2220 Ok(true)
2221 }
2222 #[cfg(not(feature = "wasmparser"))]
2223 {
2224 let _ = (example_module, u);
2225 panic!("support for `exports` was disabled at compile time");
2226 }
2227 }
2228
2229 #[cfg(feature = "wasmparser")]
2230 fn _required_exports(&mut self, u: &mut Unstructured, example_module: &[u8]) -> Result<()> {
2231 let mut required_exports: Vec<wasmparser::Export> = vec![];
2232 let mut validator = wasmparser::Validator::new();
2233 let exports_types = validator
2234 .validate_all(&example_module)
2235 .expect("Failed to validate `exports` Wasm");
2236 for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
2237 match payload.expect("Failed to read `exports` Wasm") {
2238 wasmparser::Payload::ExportSection(export_reader) => {
2239 required_exports = export_reader
2240 .into_iter()
2241 .collect::<Result<_, _>>()
2242 .expect("Failed to read `exports` export section");
2243 }
2244 _ => {}
2245 }
2246 }
2247
2248 let exports_types = exports_types.as_ref();
2250 let check_and_get_func_type =
2251 |id: wasmparser::types::CoreTypeId| -> (Rc<FuncType>, SubType) {
2252 let subtype = exports_types.get(id).unwrap_or_else(|| {
2253 panic!("Unable to get subtype for {id:?} in `exports` Wasm")
2254 });
2255 match &subtype.composite_type.inner {
2256 wasmparser::CompositeInnerType::Func(func_type) => {
2257 assert!(
2258 subtype.is_final,
2259 "Subtype {subtype:?} from `exports` Wasm is not final"
2260 );
2261 assert!(
2262 subtype.supertype_idx.is_none(),
2263 "Subtype {subtype:?} from `exports` Wasm has non-empty supertype"
2264 );
2265 let func_type = Rc::new(FuncType {
2266 params: func_type
2267 .params()
2268 .iter()
2269 .copied()
2270 .map(|t| t.try_into().unwrap())
2271 .collect(),
2272 results: func_type
2273 .results()
2274 .iter()
2275 .copied()
2276 .map(|t| t.try_into().unwrap())
2277 .collect(),
2278 });
2279 let subtype = SubType {
2280 is_final: true,
2281 supertype: None,
2282 depth: 1,
2283 composite_type: CompositeType::new_func(
2284 Rc::clone(&func_type),
2285 subtype.composite_type.shared,
2286 ),
2287 };
2288 (func_type, subtype)
2289 }
2290 _ => panic!(
2291 "Unable to handle type {:?} from `exports` Wasm",
2292 subtype.composite_type
2293 ),
2294 }
2295 };
2296 for export in required_exports {
2297 let new_index = match exports_types
2298 .entity_type_from_export(&export)
2299 .unwrap_or_else(|| {
2300 panic!("Unable to get type from export {export:?} in `exports` Wasm",)
2301 }) {
2302 wasmparser::types::EntityType::Func(id) => {
2304 let (func_type, subtype) = check_and_get_func_type(id);
2305 self.rec_groups.push(self.types.len()..self.types.len() + 1);
2306 let type_index = self.add_type(subtype);
2307 let func_index = self.funcs.len() as u32;
2308 self.funcs.push((type_index, func_type));
2309 self.num_defined_funcs += 1;
2310 func_index
2311 }
2312 wasmparser::types::EntityType::Global(global_type) => {
2314 self.add_arbitrary_global_of_type(global_type.try_into().unwrap(), u)?
2315 }
2316 wasmparser::types::EntityType::Memory(memory_type) => {
2318 self.add_arbitrary_memory_of_type(memory_type.into())?
2319 }
2320 wasmparser::types::EntityType::Table(table_type) => {
2322 self.add_arbitrary_table_of_type(table_type.try_into().unwrap(), u)?
2323 }
2324 wasmparser::types::EntityType::Tag(id) => {
2326 let (func_type, subtype) = check_and_get_func_type(id);
2327 self.rec_groups.push(self.types.len()..self.types.len() + 1);
2328 let type_index = self.add_type(subtype);
2329 let tag_index = self.tags.len() as u32;
2330 self.tags.push(TagType {
2331 func_type_idx: type_index,
2332 func_type: func_type,
2333 });
2334 self.num_defined_tags += 1;
2335 tag_index
2336 }
2337 };
2338 self.exports
2339 .push((export.name.to_string(), export.kind.into(), new_index));
2340 self.export_names.insert(export.name.to_string());
2341 }
2342
2343 Ok(())
2344 }
2345
2346 fn arbitrary_exports(&mut self, u: &mut Unstructured) -> Result<()> {
2347 if self.config.max_type_size < self.type_size && !self.config.export_everything {
2348 return Ok(());
2349 }
2350
2351 let mut choices: Vec<Vec<(ExportKind, u32)>> = Vec::with_capacity(6);
2353 choices.push(
2354 (0..self.funcs.len())
2355 .map(|i| (ExportKind::Func, i as u32))
2356 .collect(),
2357 );
2358 choices.push(
2359 (0..self.tables.len())
2360 .map(|i| (ExportKind::Table, i as u32))
2361 .collect(),
2362 );
2363 choices.push(
2364 (0..self.memories.len())
2365 .map(|i| (ExportKind::Memory, i as u32))
2366 .collect(),
2367 );
2368 choices.push(
2369 (0..self.globals.len())
2370 .map(|i| (ExportKind::Global, i as u32))
2371 .collect(),
2372 );
2373
2374 if self.config.export_everything {
2377 for choices_by_kind in choices {
2378 for (kind, idx) in choices_by_kind {
2379 let name = unique_string(1_000, &mut self.export_names, u)?;
2380 self.add_arbitrary_export(name, kind, idx)?;
2381 }
2382 }
2383 return Ok(());
2384 }
2385
2386 arbitrary_loop(u, self.config.min_exports, self.config.max_exports, |u| {
2387 let max_size = self.config.max_type_size - self.type_size;
2393 for list in choices.iter_mut() {
2394 list.retain(|(kind, idx)| self.type_of(*kind, *idx).size() + 1 < max_size);
2395 }
2396 choices.retain(|list| !list.is_empty());
2397 if choices.is_empty() {
2398 return Ok(false);
2399 }
2400
2401 let name = unique_string(1_000, &mut self.export_names, u)?;
2404 let list = u.choose(&choices)?;
2405 let (kind, idx) = *u.choose(list)?;
2406 self.add_arbitrary_export(name, kind, idx)?;
2407 Ok(true)
2408 })
2409 }
2410
2411 fn add_arbitrary_export(&mut self, name: String, kind: ExportKind, idx: u32) -> Result<()> {
2412 let ty = self.type_of(kind, idx);
2413 self.type_size += 1 + ty.size();
2414 if self.type_size <= self.config.max_type_size {
2415 self.exports.push((name, kind, idx));
2416 Ok(())
2417 } else {
2418 Err(arbitrary::Error::IncorrectFormat)
2422 }
2423 }
2424
2425 fn arbitrary_start(&mut self, u: &mut Unstructured) -> Result<()> {
2426 if !self.config.allow_start_export {
2427 return Ok(());
2428 }
2429
2430 let mut choices = Vec::with_capacity(self.funcs.len());
2431
2432 for (func_idx, ty) in self.funcs() {
2433 if ty.params.is_empty() && ty.results.is_empty() {
2434 choices.push(func_idx);
2435 }
2436 }
2437
2438 if !choices.is_empty() && u.arbitrary().unwrap_or(false) {
2439 let f = *u.choose(&choices)?;
2440 self.start = Some(f);
2441 }
2442
2443 Ok(())
2444 }
2445
2446 fn arbitrary_elems(&mut self, u: &mut Unstructured) -> Result<()> {
2447 let mut global_i32 = vec![];
2449 let mut global_i64 = vec![];
2450 if !self.config.disallow_traps {
2451 for i in self.globals_for_const_expr(ValType::I32, true) {
2452 global_i32.push(i);
2453 }
2454 for i in self.globals_for_const_expr(ValType::I64, true) {
2455 global_i64.push(i);
2456 }
2457 }
2458 let disallow_traps = self.config.disallow_traps;
2459 let arbitrary_active_elem =
2460 |u: &mut Unstructured, min_mem_size: u64, table: Option<u32>, table_ty: &TableType| {
2461 let global_choices = if table_ty.table64 {
2462 &global_i64
2463 } else {
2464 &global_i32
2465 };
2466 let (offset, max_size_hint) = if !global_choices.is_empty() && u.arbitrary()? {
2467 let g = u.choose(&global_choices)?;
2468 (Offset::Global(*g), None)
2469 } else {
2470 let max_mem_size = if disallow_traps {
2471 table_ty.minimum
2472 } else if table_ty.table64 {
2473 u64::MAX
2474 } else {
2475 u64::from(u32::MAX)
2476 };
2477 let offset = arbitrary_offset(u, min_mem_size, max_mem_size, 0)?;
2478 let max_size_hint = if disallow_traps
2479 || (offset <= min_mem_size
2480 && u.int_in_range(0..=CHANCE_OFFSET_INBOUNDS)? != 0)
2481 {
2482 Some(min_mem_size - offset)
2483 } else {
2484 None
2485 };
2486
2487 let offset = if table_ty.table64 {
2488 Offset::Const64(offset as i64)
2489 } else {
2490 Offset::Const32(offset as i32)
2491 };
2492 (offset, max_size_hint)
2493 };
2494 Ok((ElementKind::Active { table, offset }, max_size_hint))
2495 };
2496
2497 type GenElemSegment<'a> =
2501 dyn Fn(&mut Unstructured) -> Result<(ElementKind, Option<u64>)> + 'a;
2502 let mut choices: Vec<Box<GenElemSegment>> = Vec::new();
2503
2504 if self.config.bulk_memory_enabled {
2507 choices.push(Box::new(|_| Ok((ElementKind::Passive, None))));
2508 choices.push(Box::new(|_| Ok((ElementKind::Declared, None))));
2509 }
2510
2511 for (i, ty) in self.tables.iter().enumerate() {
2512 if ty.minimum == 0 && u.int_in_range(0..=CHANCE_SEGMENT_ON_EMPTY)? != 0 {
2517 continue;
2518 }
2519
2520 let minimum = ty.minimum;
2521 let ty = *ty;
2524 if i == 0 && ty.element_type == RefType::FUNCREF {
2525 choices.push(Box::new(move |u| {
2526 arbitrary_active_elem(u, minimum, None, &ty)
2527 }));
2528 }
2529 if self.config.bulk_memory_enabled {
2530 let idx = Some(i as u32);
2531 choices.push(Box::new(move |u| {
2532 arbitrary_active_elem(u, minimum, idx, &ty)
2533 }));
2534 }
2535 }
2536
2537 if choices.is_empty() {
2538 return Ok(());
2539 }
2540
2541 arbitrary_loop(
2542 u,
2543 self.config.min_element_segments,
2544 self.config.max_element_segments,
2545 |u| {
2546 let (kind, max_size_hint) = u.choose(&choices)?(u)?;
2549 let max = max_size_hint
2550 .map(|i| usize::try_from(i).unwrap())
2551 .unwrap_or_else(|| self.config.max_elements);
2552
2553 let ty = match kind {
2557 ElementKind::Passive | ElementKind::Declared => self.arbitrary_ref_type(u)?,
2558 ElementKind::Active { table, .. } => {
2559 let idx = table.unwrap_or(0);
2560 self.arbitrary_matching_ref_type(u, self.tables[idx as usize].element_type)?
2561 }
2562 };
2563
2564 let can_use_function_list = ty == RefType::FUNCREF;
2568 if !self.config.reference_types_enabled {
2569 assert!(can_use_function_list);
2570 }
2571
2572 let mut func_candidates = Vec::new();
2575 if can_use_function_list {
2576 match ty.heap_type {
2577 HeapType::Abstract {
2578 ty: AbstractHeapType::Func,
2579 ..
2580 } => {
2581 func_candidates.extend(0..self.funcs.len() as u32);
2582 }
2583 HeapType::Concrete(ty) => {
2584 for (i, (fty, _)) in self.funcs.iter().enumerate() {
2585 if *fty == ty {
2586 func_candidates.push(i as u32);
2587 }
2588 }
2589 }
2590 _ => {}
2591 }
2592 }
2593
2594 let items = if !self.config.reference_types_enabled
2599 || (can_use_function_list && u.arbitrary()?)
2600 {
2601 let mut init = vec![];
2602 if func_candidates.len() > 0 {
2603 arbitrary_loop(u, self.config.min_elements, max, |u| {
2604 let func_idx = *u.choose(&func_candidates)?;
2605 init.push(func_idx);
2606 Ok(true)
2607 })?;
2608 }
2609 Elements::Functions(init)
2610 } else {
2611 let mut init = vec![];
2612 arbitrary_loop(u, self.config.min_elements, max, |u| {
2613 init.push(self.arbitrary_const_expr(ValType::Ref(ty), u, true)?);
2614 Ok(true)
2615 })?;
2616 Elements::Expressions(init)
2617 };
2618
2619 self.elems.push(ElementSegment { kind, ty, items });
2620 Ok(true)
2621 },
2622 )
2623 }
2624
2625 fn arbitrary_code(&mut self, u: &mut Unstructured) -> Result<()> {
2626 self.compute_interesting_values();
2627
2628 self.code.reserve(self.num_defined_funcs);
2629 let mut allocs = CodeBuilderAllocations::new(
2630 self,
2631 self.config.exports.is_some() || self.config.module_shape.is_some(),
2632 );
2633 for (idx, ty) in self.funcs[self.funcs.len() - self.num_defined_funcs..].iter() {
2634 let shared = self.is_shared_type(*idx);
2635 let body = self.arbitrary_func_body(u, ty, &mut allocs, shared)?;
2636 self.code.push(body);
2637 }
2638 allocs.finish(u, self)?;
2639 Ok(())
2640 }
2641
2642 fn arbitrary_func_body(
2643 &self,
2644 u: &mut Unstructured,
2645 ty: &FuncType,
2646 allocs: &mut CodeBuilderAllocations,
2647 shared: bool,
2648 ) -> Result<Code> {
2649 let mut locals = self.arbitrary_locals(u)?;
2650 let builder = allocs.builder(ty, &mut locals, shared);
2651 let instructions = if self.config.allow_invalid_funcs && u.arbitrary().unwrap_or(false) {
2652 Instructions::Arbitrary(arbitrary_vec_u8(u)?)
2653 } else {
2654 Instructions::Generated(builder.arbitrary(u, self)?)
2655 };
2656
2657 Ok(Code {
2658 locals,
2659 instructions,
2660 })
2661 }
2662
2663 fn arbitrary_locals(&self, u: &mut Unstructured) -> Result<Vec<ValType>> {
2664 let mut ret = Vec::new();
2665 arbitrary_loop(u, 0, 100, |u| {
2666 ret.push(self.arbitrary_valtype(u)?);
2667 Ok(true)
2668 })?;
2669 Ok(ret)
2670 }
2671
2672 fn arbitrary_data(&mut self, u: &mut Unstructured) -> Result<()> {
2673 let memories = self.memories.len() as u32;
2676 if memories == 0 && !self.config.bulk_memory_enabled {
2677 return Ok(());
2678 }
2679 let disallow_traps = self.config.disallow_traps;
2680 let mut choices32: Vec<Box<dyn Fn(&mut Unstructured, u64, usize) -> Result<Offset>>> =
2681 vec![];
2682 choices32.push(Box::new(|u, min_size, data_len| {
2683 let min = u32::try_from(min_size.saturating_mul(64 * 1024))
2684 .unwrap_or(u32::MAX)
2685 .into();
2686 let max = if disallow_traps { min } else { u32::MAX.into() };
2687 Ok(Offset::Const32(
2688 arbitrary_offset(u, min, max, data_len)? as i32
2689 ))
2690 }));
2691 let mut choices64: Vec<Box<dyn Fn(&mut Unstructured, u64, usize) -> Result<Offset>>> =
2692 vec![];
2693 choices64.push(Box::new(|u, min_size, data_len| {
2694 let min = min_size.saturating_mul(64 * 1024);
2695 let max = if disallow_traps { min } else { u64::MAX };
2696 Ok(Offset::Const64(
2697 arbitrary_offset(u, min, max, data_len)? as i64
2698 ))
2699 }));
2700 if !self.config.disallow_traps {
2701 for i in self.globals_for_const_expr(ValType::I32, true) {
2702 choices32.push(Box::new(move |_, _, _| Ok(Offset::Global(i))));
2703 }
2704 for i in self.globals_for_const_expr(ValType::I64, true) {
2705 choices64.push(Box::new(move |_, _, _| Ok(Offset::Global(i))));
2706 }
2707 }
2708
2709 let mut memories = Vec::new();
2715 for (i, mem) in self.memories.iter().enumerate() {
2716 if mem.minimum > 0 || u.int_in_range(0..=CHANCE_SEGMENT_ON_EMPTY)? == 0 {
2717 memories.push(i as u32);
2718 }
2719 }
2720
2721 if memories.is_empty() && !self.config.bulk_memory_enabled {
2725 return Ok(());
2726 }
2727
2728 arbitrary_loop(
2729 u,
2730 self.config.min_data_segments,
2731 self.config.max_data_segments,
2732 |u| {
2733 let mut init: Vec<u8> = u.arbitrary()?;
2734
2735 let kind =
2740 if self.config.bulk_memory_enabled && (memories.is_empty() || u.arbitrary()?) {
2741 DataSegmentKind::Passive
2742 } else {
2743 let memory_index = *u.choose(&memories)?;
2744 let mem = &self.memories[memory_index as usize];
2745 let f = if mem.memory64 {
2746 u.choose(&choices64)?
2747 } else {
2748 u.choose(&choices32)?
2749 };
2750 let mut offset = f(u, mem.minimum, init.len())?;
2751
2752 if self.config.disallow_traps {
2757 let max_size = (u64::MAX / 64 / 1024).min(mem.minimum) * 64 * 1024;
2758 init.truncate(max_size as usize);
2759 let max_offset = max_size - init.len() as u64;
2760 match &mut offset {
2761 Offset::Const32(x) => {
2762 *x = (*x as u64).min(max_offset) as i32;
2763 }
2764 Offset::Const64(x) => {
2765 *x = (*x as u64).min(max_offset) as i64;
2766 }
2767 Offset::Global(_) => unreachable!(),
2768 }
2769 }
2770 DataSegmentKind::Active {
2771 offset,
2772 memory_index,
2773 }
2774 };
2775 self.data.push(DataSegment { kind, init });
2776 Ok(true)
2777 },
2778 )
2779 }
2780
2781 fn params_results(&self, ty: &BlockType) -> (Vec<ValType>, Vec<ValType>) {
2782 match ty {
2783 BlockType::Empty => (vec![], vec![]),
2784 BlockType::Result(t) => (vec![], vec![*t]),
2785 BlockType::FunctionType(ty) => {
2786 let ty = self.func_type(*ty);
2787 (ty.params.to_vec(), ty.results.to_vec())
2788 }
2789 }
2790 }
2791
2792 fn globals_for_const_expr(
2795 &self,
2796 ty: ValType,
2797 allow_defined_globals: bool,
2798 ) -> impl Iterator<Item = u32> + '_ {
2799 let num_imported_globals = self.globals.len() - self.defined_globals.len();
2802 let max_global = if self.config.gc_enabled && allow_defined_globals {
2803 self.globals.len()
2804 } else {
2805 num_imported_globals
2806 };
2807
2808 self.globals[..max_global]
2809 .iter()
2810 .enumerate()
2811 .filter_map(move |(i, g)| {
2812 if !g.mutable && self.val_type_is_sub_type(g.val_type, ty) {
2816 Some(i as u32)
2817 } else {
2818 None
2819 }
2820 })
2821 }
2822
2823 fn compute_interesting_values(&mut self) {
2824 debug_assert!(self.interesting_values32.is_empty());
2825 debug_assert!(self.interesting_values64.is_empty());
2826
2827 let mut interesting_values32 = HashSet::new();
2828 let mut interesting_values64 = HashSet::new();
2829
2830 let mut interesting = |val: u64| {
2831 interesting_values32.insert(val as u32);
2832 interesting_values64.insert(val);
2833 };
2834
2835 interesting(0);
2837
2838 interesting(u8::MAX as _);
2840 interesting(u16::MAX as _);
2841 interesting(u32::MAX as _);
2842 interesting(u64::MAX);
2843
2844 interesting(i8::MIN as _);
2846 interesting(i16::MIN as _);
2847 interesting(i32::MIN as _);
2848 interesting(i64::MIN as _);
2849
2850 for i in 0..64 {
2851 interesting(1 << i);
2853
2854 interesting(!(1 << i));
2856
2857 interesting((1 << i) - 1);
2859
2860 interesting(((1_i64 << 63) >> i) as _);
2862 }
2863
2864 for pattern in [0b01010101, 0b00010001, 0b00010001, 0b00000001] {
2866 for b in [pattern, !pattern] {
2867 interesting(u64::from_ne_bytes([b, b, b, b, b, b, b, b]));
2868 }
2869 }
2870
2871 let mut interesting_f64 = |x: f64| interesting(x.to_bits());
2873 interesting_f64(0.0);
2874 interesting_f64(-0.0);
2875 interesting_f64(f64::INFINITY);
2876 interesting_f64(f64::NEG_INFINITY);
2877 interesting_f64(f64::EPSILON);
2878 interesting_f64(-f64::EPSILON);
2879 interesting_f64(f64::MIN);
2880 interesting_f64(f64::MIN_POSITIVE);
2881 interesting_f64(f64::MAX);
2882 interesting_f64(f64::NAN);
2883 let mut interesting_f32 = |x: f32| interesting(x.to_bits() as _);
2884 interesting_f32(0.0);
2885 interesting_f32(-0.0);
2886 interesting_f32(f32::INFINITY);
2887 interesting_f32(f32::NEG_INFINITY);
2888 interesting_f32(f32::EPSILON);
2889 interesting_f32(-f32::EPSILON);
2890 interesting_f32(f32::MIN);
2891 interesting_f32(f32::MIN_POSITIVE);
2892 interesting_f32(f32::MAX);
2893 interesting_f32(f32::NAN);
2894
2895 for t in self.tables.iter() {
2897 interesting(t.minimum as _);
2898 if let Some(x) = t.minimum.checked_add(1) {
2899 interesting(x as _);
2900 }
2901
2902 if let Some(x) = t.maximum {
2903 interesting(x as _);
2904 if let Some(y) = x.checked_add(1) {
2905 interesting(y as _);
2906 }
2907 }
2908 }
2909
2910 for m in self.memories.iter() {
2912 let min = m.minimum.saturating_mul(crate::page_size(m).into());
2913 interesting(min);
2914 for i in 0..5 {
2915 if let Some(x) = min.checked_add(1 << i) {
2916 interesting(x);
2917 }
2918 if let Some(x) = min.checked_sub(1 << i) {
2919 interesting(x);
2920 }
2921 }
2922
2923 if let Some(max) = m.maximum {
2924 let max = max.saturating_mul(crate::page_size(m).into());
2925 interesting(max);
2926 for i in 0..5 {
2927 if let Some(x) = max.checked_add(1 << i) {
2928 interesting(x);
2929 }
2930 if let Some(x) = max.checked_sub(1 << i) {
2931 interesting(x);
2932 }
2933 }
2934 }
2935 }
2936
2937 self.interesting_values32.extend(interesting_values32);
2938 self.interesting_values64.extend(interesting_values64);
2939
2940 self.interesting_values32.sort();
2942 self.interesting_values64.sort();
2943 }
2944
2945 fn arbitrary_const_instruction(
2946 &self,
2947 ty: ValType,
2948 u: &mut Unstructured<'_>,
2949 ) -> Result<Instruction> {
2950 debug_assert!(self.interesting_values32.len() > 0);
2951 debug_assert!(self.interesting_values64.len() > 0);
2952 match ty {
2953 ValType::I32 => Ok(Instruction::I32Const(if u.arbitrary()? {
2954 *u.choose(&self.interesting_values32)? as i32
2955 } else {
2956 u.arbitrary()?
2957 })),
2958 ValType::I64 => Ok(Instruction::I64Const(if u.arbitrary()? {
2959 *u.choose(&self.interesting_values64)? as i64
2960 } else {
2961 u.arbitrary()?
2962 })),
2963 ValType::F32 => Ok(Instruction::F32Const(if u.arbitrary()? {
2964 f32::from_bits(*u.choose(&self.interesting_values32)?).into()
2965 } else {
2966 u.arbitrary::<f32>()?.into()
2967 })),
2968 ValType::F64 => Ok(Instruction::F64Const(if u.arbitrary()? {
2969 f64::from_bits(*u.choose(&self.interesting_values64)?).into()
2970 } else {
2971 u.arbitrary::<f64>()?.into()
2972 })),
2973 ValType::V128 => Ok(Instruction::V128Const(if u.arbitrary()? {
2974 let upper = (*u.choose(&self.interesting_values64)? as i128) << 64;
2975 let lower = *u.choose(&self.interesting_values64)? as i128;
2976 upper | lower
2977 } else {
2978 u.arbitrary()?
2979 })),
2980 ValType::Ref(ty) => {
2981 assert!(ty.nullable);
2982 Ok(Instruction::RefNull(ty.heap_type))
2983 }
2984 }
2985 }
2986
2987 fn propagate_shared<T>(&mut self, must_share: bool, mut f: impl FnMut(&mut Self) -> T) -> T {
2988 let tmp = mem::replace(&mut self.must_share, must_share);
2989 let result = f(self);
2990 self.must_share = tmp;
2991 result
2992 }
2993
2994 fn arbitrary_shared(&self, u: &mut Unstructured) -> Result<bool> {
2995 if self.must_share {
2996 Ok(true)
2997 } else {
2998 Ok(self.config.shared_everything_threads_enabled && u.ratio(1, 4)?)
2999 }
3000 }
3001
3002 fn is_shared_ref_type(&self, ty: RefType) -> bool {
3003 match ty.heap_type {
3004 HeapType::Abstract { shared, .. } => shared,
3005 HeapType::Concrete(i) | HeapType::Exact(i) => {
3006 self.types[i as usize].composite_type.shared
3007 }
3008 }
3009 }
3010
3011 fn is_shared_type(&self, index: u32) -> bool {
3012 let index = usize::try_from(index).unwrap();
3013 let ty = self.types.get(index).unwrap();
3014 ty.composite_type.shared
3015 }
3016}
3017
3018pub(crate) fn arbitrary_limits64(
3019 u: &mut Unstructured,
3020 min_minimum: Option<u64>,
3021 max_minimum: u64,
3022 max_required: bool,
3023 max_inbounds: u64,
3024) -> Result<(u64, Option<u64>)> {
3025 assert!(
3026 min_minimum.unwrap_or(0) <= max_minimum,
3027 "{} <= {max_minimum}",
3028 min_minimum.unwrap_or(0),
3029 );
3030 assert!(
3031 min_minimum.unwrap_or(0) <= max_inbounds,
3032 "{} <= {max_inbounds}",
3033 min_minimum.unwrap_or(0),
3034 );
3035
3036 let min = gradually_grow(u, min_minimum.unwrap_or(0), max_inbounds, max_minimum)?;
3037 assert!(min <= max_minimum, "{min} <= {max_minimum}");
3038
3039 let max = if max_required || u.arbitrary().unwrap_or(false) {
3040 Some(u.int_in_range(min..=max_minimum)?)
3041 } else {
3042 None
3043 };
3044 assert!(min <= max.unwrap_or(min), "{min} <= {}", max.unwrap_or(min));
3045
3046 Ok((min, max))
3047}
3048
3049pub(crate) fn configured_valtypes(config: &Config) -> Vec<ValType> {
3050 let mut valtypes = Vec::with_capacity(25);
3051 valtypes.push(ValType::I32);
3052 valtypes.push(ValType::I64);
3053 if config.allow_floats {
3054 valtypes.push(ValType::F32);
3055 valtypes.push(ValType::F64);
3056 }
3057 if config.simd_enabled {
3058 valtypes.push(ValType::V128);
3059 }
3060 if config.gc_enabled && config.reference_types_enabled {
3061 for nullable in [
3062 true,
3069 ] {
3070 use AbstractHeapType::*;
3071 let abs_ref_types = [
3072 Any, Eq, I31, Array, Struct, None, Func, NoFunc, Extern, NoExtern,
3073 ];
3074 valtypes.extend(
3075 abs_ref_types
3076 .iter()
3077 .map(|&ty| ValType::Ref(RefType::new_abstract(ty, nullable, false))),
3078 );
3079 if config.shared_everything_threads_enabled {
3080 valtypes.extend(
3081 abs_ref_types
3082 .iter()
3083 .map(|&ty| ValType::Ref(RefType::new_abstract(ty, nullable, true))),
3084 );
3085 }
3086 }
3087 } else if config.reference_types_enabled {
3088 valtypes.push(ValType::EXTERNREF);
3089 valtypes.push(ValType::FUNCREF);
3090 }
3091 valtypes
3092}
3093
3094pub(crate) fn arbitrary_table_type(
3095 u: &mut Unstructured,
3096 config: &Config,
3097 module: Option<&Module>,
3098) -> Result<TableType> {
3099 let table64 = config.memory64_enabled && u.arbitrary()?;
3100 let max_inbounds = 10_000;
3103 let min_elements = if config.disallow_traps { Some(1) } else { None };
3104 let max_elements = min_elements.unwrap_or(0).max(config.max_table_elements);
3105 let (minimum, maximum) = arbitrary_limits64(
3106 u,
3107 min_elements,
3108 max_elements,
3109 config.table_max_size_required,
3110 max_inbounds.min(max_elements),
3111 )?;
3112 if config.disallow_traps {
3113 assert!(minimum > 0);
3114 }
3115 let element_type = match module {
3116 Some(module) => module.arbitrary_ref_type(u)?,
3117 None => RefType::FUNCREF,
3118 };
3119
3120 let shared = match module {
3122 Some(module) => module.is_shared_ref_type(element_type),
3123 None => false,
3124 };
3125
3126 Ok(TableType {
3127 element_type,
3128 minimum,
3129 maximum,
3130 table64,
3131 shared,
3132 })
3133}
3134
3135pub(crate) fn arbitrary_memtype(u: &mut Unstructured, config: &Config) -> Result<MemoryType> {
3136 let shared = config.threads_enabled && u.ratio(1, 4)?;
3139
3140 let memory64 = config.memory64_enabled && u.arbitrary()?;
3141 let page_size_log2 = if config.custom_page_sizes_enabled && u.arbitrary()? {
3142 Some(if u.arbitrary()? { 0 } else { 16 })
3143 } else {
3144 None
3145 };
3146
3147 let min_pages = if config.disallow_traps { Some(1) } else { None };
3148 let max_pages = min_pages.unwrap_or(0).max(if memory64 {
3149 u64::try_from(config.max_memory64_bytes >> page_size_log2.unwrap_or(16))
3150 .unwrap_or(u64::MAX)
3154 } else {
3155 u32::try_from(config.max_memory32_bytes >> page_size_log2.unwrap_or(16))
3156 .unwrap_or(u32::MAX)
3159 .into()
3160 });
3161
3162 let max_all_mems_in_bytes = 1 << 30;
3164 let max_this_mem_in_bytes = max_all_mems_in_bytes / u64::try_from(config.max_memories).unwrap();
3165 let max_inbounds = max_this_mem_in_bytes >> page_size_log2.unwrap_or(16);
3166 let max_inbounds = max_inbounds.clamp(min_pages.unwrap_or(0), max_pages);
3167
3168 let (minimum, maximum) = arbitrary_limits64(
3169 u,
3170 min_pages,
3171 max_pages,
3172 config.memory_max_size_required || shared,
3173 max_inbounds,
3174 )?;
3175
3176 Ok(MemoryType {
3177 minimum,
3178 maximum,
3179 memory64,
3180 shared,
3181 page_size_log2,
3182 })
3183}
3184
3185pub(crate) fn arbitrary_tag_type(
3186 u: &mut Unstructured,
3187 candidate_func_types: &[u32],
3188 get_func_type: impl FnOnce(u32) -> Rc<FuncType>,
3189) -> Result<TagType> {
3190 let max = candidate_func_types.len() - 1;
3191 let ty = candidate_func_types[u.int_in_range(0..=max)?];
3192 Ok(TagType {
3193 func_type_idx: ty,
3194 func_type: get_func_type(ty),
3195 })
3196}
3197
3198fn gradually_grow(u: &mut Unstructured, min: u64, max_inbounds: u64, max: u64) -> Result<u64> {
3206 if min == max {
3207 return Ok(min);
3208 }
3209 let x = {
3210 let min = min as f64;
3211 let max = max as f64;
3212 let max_inbounds = max_inbounds as f64;
3213 let x = u.arbitrary::<u32>()?;
3214 let x = f64::from(x);
3215 let x = map_custom(
3216 x,
3217 f64::from(u32::MIN)..f64::from(u32::MAX),
3218 min..max_inbounds,
3219 min..max,
3220 );
3221 assert!(min <= x, "{min} <= {x}");
3222 assert!(x <= max, "{x} <= {max}");
3223 x.round() as u64
3224 };
3225
3226 return Ok(x.clamp(min, max));
3229
3230 fn map_custom(
3237 value: f64,
3238 input: Range<f64>,
3239 output_inbounds: Range<f64>,
3240 output: Range<f64>,
3241 ) -> f64 {
3242 assert!(!value.is_nan(), "{}", value);
3243 assert!(value.is_finite(), "{}", value);
3244 assert!(input.start < input.end, "{} < {}", input.start, input.end);
3245 assert!(
3246 output.start < output.end,
3247 "{} < {}",
3248 output.start,
3249 output.end
3250 );
3251 assert!(value >= input.start, "{} >= {}", value, input.start);
3252 assert!(value <= input.end, "{} <= {}", value, input.end);
3253 assert!(
3254 output.start <= output_inbounds.start,
3255 "{} <= {}",
3256 output.start,
3257 output_inbounds.start
3258 );
3259 assert!(
3260 output_inbounds.end <= output.end,
3261 "{} <= {}",
3262 output_inbounds.end,
3263 output.end
3264 );
3265
3266 let x = map_linear(value, input, 0.0..1.0);
3267 let result = if x < PCT_INBOUNDS {
3268 if output_inbounds.start == output_inbounds.end {
3269 output_inbounds.start
3270 } else {
3271 let unscaled = x * x * x * x * x * x;
3272 map_linear(unscaled, 0.0..1.0, output_inbounds)
3273 }
3274 } else {
3275 map_linear(x, 0.0..1.0, output.clone())
3276 };
3277
3278 assert!(result >= output.start, "{} >= {}", result, output.start);
3279 assert!(result <= output.end, "{} <= {}", result, output.end);
3280 result
3281 }
3282
3283 fn map_linear(
3288 value: f64,
3289 Range {
3290 start: in_low,
3291 end: in_high,
3292 }: Range<f64>,
3293 Range {
3294 start: out_low,
3295 end: out_high,
3296 }: Range<f64>,
3297 ) -> f64 {
3298 assert!(!value.is_nan(), "{}", value);
3299 assert!(value.is_finite(), "{}", value);
3300 assert!(in_low < in_high, "{in_low} < {in_high}");
3301 assert!(out_low < out_high, "{out_low} < {out_high}");
3302 assert!(value >= in_low, "{value} >= {in_low}");
3303 assert!(value <= in_high, "{value} <= {in_high}");
3304
3305 let dividend = out_high - out_low;
3306 let divisor = in_high - in_low;
3307 let slope = dividend / divisor;
3308 let result = out_low + (slope * (value - in_low));
3309
3310 assert!(result >= out_low, "{result} >= {out_low}");
3311 assert!(result <= out_high, "{result} <= {out_high}");
3312 result
3313 }
3314}
3315
3316fn arbitrary_offset(
3320 u: &mut Unstructured,
3321 limit_min: u64,
3322 limit_max: u64,
3323 segment_size: usize,
3324) -> Result<u64> {
3325 let size = u64::try_from(segment_size).unwrap();
3326
3327 if size > limit_min {
3330 u.int_in_range(0..=limit_max)
3331 } else {
3332 gradually_grow(u, 0, limit_min - size, limit_max)
3333 }
3334}
3335
3336fn unique_import_strings(max_size: usize, u: &mut Unstructured) -> Result<(String, String)> {
3337 let module = limited_string(max_size, u)?;
3338 let field = limited_string(max_size, u)?;
3339 Ok((module, field))
3340}
3341
3342fn arbitrary_vec_u8(u: &mut Unstructured) -> Result<Vec<u8>> {
3343 let size = u.arbitrary_len::<u8>()?;
3344 Ok(u.bytes(size)?.to_vec())
3345}
3346
3347impl EntityType {
3348 fn size(&self) -> u32 {
3349 match self {
3350 EntityType::Tag(_)
3351 | EntityType::Global(_)
3352 | EntityType::Table(_)
3353 | EntityType::Memory(_) => 1,
3354 EntityType::Func(_, ty) => 1 + (ty.params.len() + ty.results.len()) as u32,
3355 }
3356 }
3357}
3358
3359#[derive(Clone, Copy, Debug, Default)]
3370#[cfg_attr(
3371 feature = "serde",
3372 derive(serde_derive::Deserialize, serde_derive::Serialize)
3373)]
3374pub struct InstructionKinds(pub(crate) FlagSet<InstructionKind>);
3375
3376impl InstructionKinds {
3377 pub fn new(kinds: &[InstructionKind]) -> Self {
3379 Self(kinds.iter().fold(FlagSet::default(), |ks, k| ks | *k))
3380 }
3381
3382 pub fn all() -> Self {
3384 Self(FlagSet::full())
3385 }
3386
3387 pub fn none() -> Self {
3389 Self(FlagSet::default())
3390 }
3391
3392 #[inline]
3394 pub fn contains(&self, kind: InstructionKind) -> bool {
3395 self.0.contains(kind)
3396 }
3397
3398 pub fn without_floats(&self) -> Self {
3400 let mut floatless = self.0;
3401 if floatless.contains(InstructionKind::Numeric) {
3402 floatless -= InstructionKind::Numeric;
3403 floatless |= InstructionKind::NumericInt;
3404 }
3405 if floatless.contains(InstructionKind::Vector) {
3406 floatless -= InstructionKind::Vector;
3407 floatless |= InstructionKind::VectorInt;
3408 }
3409 if floatless.contains(InstructionKind::Memory) {
3410 floatless -= InstructionKind::Memory;
3411 floatless |= InstructionKind::MemoryInt;
3412 }
3413 Self(floatless)
3414 }
3415}
3416
3417flags! {
3418 #[allow(missing_docs)]
3421 #[cfg_attr(feature = "_internal_cli", derive(serde_derive::Deserialize))]
3422 pub enum InstructionKind: u16 {
3423 NumericInt = 1 << 0,
3424 Numeric = (1 << 1) | (1 << 0),
3425 VectorInt = 1 << 2,
3426 Vector = (1 << 3) | (1 << 2),
3427 Reference = 1 << 4,
3428 Parametric = 1 << 5,
3429 Variable = 1 << 6,
3430 Table = 1 << 7,
3431 MemoryInt = 1 << 8,
3432 Memory = (1 << 9) | (1 << 8),
3433 Control = 1 << 10,
3434 Aggregate = 1 << 11,
3435 }
3436}
3437
3438impl FromStr for InstructionKinds {
3439 type Err = String;
3440 fn from_str(s: &str) -> std::prelude::v1::Result<Self, Self::Err> {
3441 let mut kinds = vec![];
3442 for part in s.split(",") {
3443 let kind = InstructionKind::from_str(part)?;
3444 kinds.push(kind);
3445 }
3446 Ok(InstructionKinds::new(&kinds))
3447 }
3448}
3449
3450impl FromStr for InstructionKind {
3451 type Err = String;
3452 fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
3453 match s.to_lowercase().as_str() {
3454 "numeric_non_float" => Ok(InstructionKind::NumericInt),
3455 "numeric" => Ok(InstructionKind::Numeric),
3456 "vector_non_float" => Ok(InstructionKind::VectorInt),
3457 "vector" => Ok(InstructionKind::Vector),
3458 "reference" => Ok(InstructionKind::Reference),
3459 "parametric" => Ok(InstructionKind::Parametric),
3460 "variable" => Ok(InstructionKind::Variable),
3461 "table" => Ok(InstructionKind::Table),
3462 "memory_non_float" => Ok(InstructionKind::MemoryInt),
3463 "memory" => Ok(InstructionKind::Memory),
3464 "control" => Ok(InstructionKind::Control),
3465 _ => Err(format!("unknown instruction kind: {s}")),
3466 }
3467 }
3468}
3469
3470#[cfg(feature = "wasmparser")]
3473impl TryFrom<wasmparser::FuncType> for FuncType {
3474 type Error = ();
3475
3476 fn try_from(value: wasmparser::FuncType) -> Result<Self, Self::Error> {
3477 Ok(FuncType {
3478 params: value
3479 .params()
3480 .iter()
3481 .copied()
3482 .map(|ty| ty.try_into().map_err(|_| ()))
3483 .collect::<Result<Vec<_>, _>>()?,
3484 results: value
3485 .results()
3486 .iter()
3487 .copied()
3488 .map(|ty| ty.try_into().map_err(|_| ()))
3489 .collect::<Result<Vec<_>, _>>()?,
3490 })
3491 }
3492}
3493
3494#[cfg(feature = "wasmparser")]
3495impl TryFrom<wasmparser::CompositeType> for CompositeType {
3496 type Error = ();
3497
3498 fn try_from(value: wasmparser::CompositeType) -> Result<Self, Self::Error> {
3499 let inner_type = match value.inner {
3500 wasmparser::CompositeInnerType::Func(func_type) => {
3501 CompositeInnerType::Func(Rc::new(func_type.try_into()?))
3502 }
3503 wasmparser::CompositeInnerType::Array(array_type) => {
3504 CompositeInnerType::Array(array_type.try_into().map_err(|_| ())?)
3505 }
3506 wasmparser::CompositeInnerType::Struct(struct_type) => {
3507 CompositeInnerType::Struct(struct_type.try_into().map_err(|_| ())?)
3508 }
3509 wasmparser::CompositeInnerType::Cont(_) => {
3510 panic!("continuation type is not supported by wasm-smith currently.")
3511 }
3512 };
3513
3514 Ok(CompositeType {
3515 inner: inner_type,
3516 shared: value.shared,
3517 descriptor: value
3518 .descriptor_idx
3519 .map(|idx| idx.as_module_index().ok_or(()))
3520 .transpose()?,
3521 describes: value
3522 .describes_idx
3523 .map(|idx| idx.as_module_index().ok_or(()))
3524 .transpose()?,
3525 })
3526 }
3527}
3528
3529#[cfg(feature = "wasmparser")]
3530impl TryFrom<wasmparser::SubType> for SubType {
3531 type Error = ();
3532
3533 fn try_from(value: wasmparser::SubType) -> Result<Self, Self::Error> {
3534 Ok(SubType {
3535 is_final: value.is_final,
3536 supertype: value
3537 .supertype_idx
3538 .map(|idx| idx.as_module_index().ok_or(()))
3539 .transpose()?,
3540 composite_type: value.composite_type.try_into()?,
3541 depth: 1,
3544 })
3545 }
3546}