1use crate::constant_hash::Table;
10use alloc::vec::Vec;
11use core::fmt::{self, Display, Formatter};
12use core::ops::{Deref, DerefMut};
13use core::str::FromStr;
14
15#[cfg(feature = "enable-serde")]
16use serde_derive::{Deserialize, Serialize};
17
18use crate::bitset::ScalarBitSet;
19use crate::entity;
20use crate::ir::{
21 self,
22 condcodes::{FloatCC, IntCC},
23 trapcode::TrapCode,
24 types, Block, FuncRef, MemFlags, SigRef, StackSlot, Type, Value,
25};
26
27pub type ValueList = entity::EntityList<Value>;
31
32pub type ValueListPool = entity::ListPool<Value>;
34
35#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
50#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
51pub struct BlockCall {
52 values: entity::EntityList<Value>,
56}
57
58impl BlockCall {
59 fn value_to_block(val: Value) -> Block {
62 Block::from_u32(val.as_u32())
63 }
64
65 fn block_to_value(block: Block) -> Value {
68 Value::from_u32(block.as_u32())
69 }
70
71 pub fn new(block: Block, args: &[Value], pool: &mut ValueListPool) -> Self {
73 let mut values = ValueList::default();
74 values.push(Self::block_to_value(block), pool);
75 values.extend(args.iter().copied(), pool);
76 Self { values }
77 }
78
79 pub fn block(&self, pool: &ValueListPool) -> Block {
81 let val = self.values.first(pool).unwrap();
82 Self::value_to_block(val)
83 }
84
85 pub fn set_block(&mut self, block: Block, pool: &mut ValueListPool) {
87 *self.values.get_mut(0, pool).unwrap() = Self::block_to_value(block);
88 }
89
90 pub fn append_argument(&mut self, arg: Value, pool: &mut ValueListPool) {
92 self.values.push(arg, pool);
93 }
94
95 pub fn args_slice<'a>(&self, pool: &'a ValueListPool) -> &'a [Value] {
97 &self.values.as_slice(pool)[1..]
98 }
99
100 pub fn args_slice_mut<'a>(&'a mut self, pool: &'a mut ValueListPool) -> &'a mut [Value] {
102 &mut self.values.as_mut_slice(pool)[1..]
103 }
104
105 pub fn remove(&mut self, ix: usize, pool: &mut ValueListPool) {
107 self.values.remove(1 + ix, pool)
108 }
109
110 pub fn clear(&mut self, pool: &mut ValueListPool) {
112 self.values.truncate(1, pool)
113 }
114
115 pub fn extend<I>(&mut self, elements: I, pool: &mut ValueListPool)
117 where
118 I: IntoIterator<Item = Value>,
119 {
120 self.values.extend(elements, pool)
121 }
122
123 pub fn display<'a>(&self, pool: &'a ValueListPool) -> DisplayBlockCall<'a> {
125 DisplayBlockCall { block: *self, pool }
126 }
127
128 pub fn deep_clone(&self, pool: &mut ValueListPool) -> Self {
132 Self {
133 values: self.values.deep_clone(pool),
134 }
135 }
136}
137
138pub struct DisplayBlockCall<'a> {
140 block: BlockCall,
141 pool: &'a ValueListPool,
142}
143
144impl<'a> Display for DisplayBlockCall<'a> {
145 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
146 write!(f, "{}", self.block.block(&self.pool))?;
147 let args = self.block.args_slice(&self.pool);
148 if !args.is_empty() {
149 write!(f, "(")?;
150 for (ix, arg) in args.iter().enumerate() {
151 if ix > 0 {
152 write!(f, ", ")?;
153 }
154 write!(f, "{}", arg)?;
155 }
156 write!(f, ")")?;
157 }
158 Ok(())
159 }
160}
161
162include!(concat!(env!("OUT_DIR"), "/opcodes.rs"));
178
179impl Display for Opcode {
180 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
181 write!(f, "{}", opcode_name(*self))
182 }
183}
184
185impl Opcode {
186 pub fn format(self) -> InstructionFormat {
188 OPCODE_FORMAT[self as usize - 1]
189 }
190
191 pub fn constraints(self) -> OpcodeConstraints {
194 OPCODE_CONSTRAINTS[self as usize - 1]
195 }
196
197 #[inline]
201 pub fn is_safepoint(self) -> bool {
202 self.is_call() && !self.is_return()
203 }
204}
205
206impl FromStr for Opcode {
211 type Err = &'static str;
212
213 fn from_str(s: &str) -> Result<Self, &'static str> {
215 use crate::constant_hash::{probe, simple_hash};
216
217 match probe::<&str, [Option<Self>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
218 Err(_) => Err("Unknown opcode"),
219 Ok(i) => Ok(OPCODE_HASH_TABLE[i].unwrap()),
222 }
223 }
224}
225
226impl<'a> Table<&'a str> for [Option<Opcode>] {
227 fn len(&self) -> usize {
228 self.len()
229 }
230
231 fn key(&self, idx: usize) -> Option<&'a str> {
232 self[idx].map(opcode_name)
233 }
234}
235
236#[derive(Clone, Debug)]
239pub struct VariableArgs(Vec<Value>);
240
241impl VariableArgs {
242 pub fn new() -> Self {
244 Self(Vec::new())
245 }
246
247 pub fn push(&mut self, v: Value) {
249 self.0.push(v)
250 }
251
252 pub fn is_empty(&self) -> bool {
254 self.0.is_empty()
255 }
256
257 pub fn into_value_list(self, fixed: &[Value], pool: &mut ValueListPool) -> ValueList {
259 let mut vlist = ValueList::default();
260 vlist.extend(fixed.iter().cloned(), pool);
261 vlist.extend(self.0, pool);
262 vlist
263 }
264}
265
266impl Deref for VariableArgs {
268 type Target = [Value];
269
270 fn deref(&self) -> &[Value] {
271 &self.0
272 }
273}
274
275impl DerefMut for VariableArgs {
276 fn deref_mut(&mut self) -> &mut [Value] {
277 &mut self.0
278 }
279}
280
281impl Display for VariableArgs {
282 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
283 for (i, val) in self.0.iter().enumerate() {
284 if i == 0 {
285 write!(fmt, "{}", val)?;
286 } else {
287 write!(fmt, ", {}", val)?;
288 }
289 }
290 Ok(())
291 }
292}
293
294impl Default for VariableArgs {
295 fn default() -> Self {
296 Self::new()
297 }
298}
299
300impl InstructionData {
305 pub fn branch_destination<'a>(&'a self, jump_tables: &'a ir::JumpTables) -> &[BlockCall] {
309 match self {
310 Self::Jump {
311 ref destination, ..
312 } => std::slice::from_ref(destination),
313 Self::Brif { blocks, .. } => blocks.as_slice(),
314 Self::BranchTable { table, .. } => jump_tables.get(*table).unwrap().all_branches(),
315 _ => {
316 debug_assert!(!self.opcode().is_branch());
317 &[]
318 }
319 }
320 }
321
322 pub fn branch_destination_mut<'a>(
326 &'a mut self,
327 jump_tables: &'a mut ir::JumpTables,
328 ) -> &mut [BlockCall] {
329 match self {
330 Self::Jump {
331 ref mut destination,
332 ..
333 } => std::slice::from_mut(destination),
334 Self::Brif { blocks, .. } => blocks.as_mut_slice(),
335 Self::BranchTable { table, .. } => {
336 jump_tables.get_mut(*table).unwrap().all_branches_mut()
337 }
338 _ => {
339 debug_assert!(!self.opcode().is_branch());
340 &mut []
341 }
342 }
343 }
344
345 pub fn map_values(
348 &mut self,
349 pool: &mut ValueListPool,
350 jump_tables: &mut ir::JumpTables,
351 mut f: impl FnMut(Value) -> Value,
352 ) {
353 for arg in self.arguments_mut(pool) {
354 *arg = f(*arg);
355 }
356
357 for block in self.branch_destination_mut(jump_tables) {
358 for arg in block.args_slice_mut(pool) {
359 *arg = f(*arg);
360 }
361 }
362 }
363
364 pub fn trap_code(&self) -> Option<TrapCode> {
367 match *self {
368 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
369 _ => None,
370 }
371 }
372
373 pub fn cond_code(&self) -> Option<IntCC> {
376 match self {
377 &InstructionData::IntCompare { cond, .. }
378 | &InstructionData::IntCompareImm { cond, .. } => Some(cond),
379 _ => None,
380 }
381 }
382
383 pub fn fp_cond_code(&self) -> Option<FloatCC> {
386 match self {
387 &InstructionData::FloatCompare { cond, .. } => Some(cond),
388 _ => None,
389 }
390 }
391
392 pub fn trap_code_mut(&mut self) -> Option<&mut TrapCode> {
395 match self {
396 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
397 _ => None,
398 }
399 }
400
401 pub fn atomic_rmw_op(&self) -> Option<ir::AtomicRmwOp> {
403 match self {
404 &InstructionData::AtomicRmw { op, .. } => Some(op),
405 _ => None,
406 }
407 }
408
409 pub fn load_store_offset(&self) -> Option<i32> {
411 match self {
412 &InstructionData::Load { offset, .. }
413 | &InstructionData::StackLoad { offset, .. }
414 | &InstructionData::Store { offset, .. }
415 | &InstructionData::StackStore { offset, .. } => Some(offset.into()),
416 _ => None,
417 }
418 }
419
420 pub fn memflags(&self) -> Option<MemFlags> {
422 match self {
423 &InstructionData::Load { flags, .. }
424 | &InstructionData::LoadNoOffset { flags, .. }
425 | &InstructionData::Store { flags, .. }
426 | &InstructionData::StoreNoOffset { flags, .. }
427 | &InstructionData::AtomicCas { flags, .. }
428 | &InstructionData::AtomicRmw { flags, .. } => Some(flags),
429 _ => None,
430 }
431 }
432
433 pub fn stack_slot(&self) -> Option<StackSlot> {
435 match self {
436 &InstructionData::StackStore { stack_slot, .. }
437 | &InstructionData::StackLoad { stack_slot, .. } => Some(stack_slot),
438 _ => None,
439 }
440 }
441
442 pub fn analyze_call<'a>(&'a self, pool: &'a ValueListPool) -> CallInfo<'a> {
446 match *self {
447 Self::Call {
448 func_ref, ref args, ..
449 } => CallInfo::Direct(func_ref, args.as_slice(pool)),
450 Self::CallIndirect {
451 sig_ref, ref args, ..
452 } => CallInfo::Indirect(sig_ref, &args.as_slice(pool)[1..]),
453 _ => {
454 debug_assert!(!self.opcode().is_call());
455 CallInfo::NotACall
456 }
457 }
458 }
459
460 #[inline]
461 pub(crate) fn mask_immediates(&mut self, ctrl_typevar: Type) {
462 if ctrl_typevar.is_invalid() {
463 return;
464 }
465
466 let bit_width = ctrl_typevar.bits();
467
468 match self {
469 Self::UnaryImm { opcode: _, imm } => {
470 *imm = imm.mask_to_width(bit_width);
471 }
472 Self::BinaryImm64 {
473 opcode,
474 arg: _,
475 imm,
476 } => {
477 if *opcode == Opcode::SdivImm || *opcode == Opcode::SremImm {
478 *imm = imm.mask_to_width(bit_width);
479 }
480 }
481 Self::IntCompareImm {
482 opcode,
483 arg: _,
484 cond,
485 imm,
486 } => {
487 debug_assert_eq!(*opcode, Opcode::IcmpImm);
488 if cond.unsigned() != *cond {
489 *imm = imm.mask_to_width(bit_width);
490 }
491 }
492 _ => {}
493 }
494 }
495}
496
497pub enum CallInfo<'a> {
499 NotACall,
501
502 Direct(FuncRef, &'a [Value]),
505
506 Indirect(SigRef, &'a [Value]),
508}
509
510#[derive(Clone, Copy)]
516pub struct OpcodeConstraints {
517 flags: u8,
536
537 typeset_offset: u8,
539
540 constraint_offset: u16,
544}
545
546impl OpcodeConstraints {
547 pub fn use_typevar_operand(self) -> bool {
551 (self.flags & 0x8) != 0
552 }
553
554 pub fn requires_typevar_operand(self) -> bool {
561 (self.flags & 0x10) != 0
562 }
563
564 pub fn num_fixed_results(self) -> usize {
567 (self.flags & 0x7) as usize
568 }
569
570 pub fn num_fixed_value_arguments(self) -> usize {
578 ((self.flags >> 5) & 0x7) as usize
579 }
580
581 fn typeset_offset(self) -> Option<usize> {
584 let offset = usize::from(self.typeset_offset);
585 if offset < TYPE_SETS.len() {
586 Some(offset)
587 } else {
588 None
589 }
590 }
591
592 fn constraint_offset(self) -> usize {
594 self.constraint_offset as usize
595 }
596
597 pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
600 debug_assert!(n < self.num_fixed_results(), "Invalid result index");
601 match OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type) {
602 ResolvedConstraint::Bound(t) => t,
603 ResolvedConstraint::Free(ts) => panic!("Result constraints can't be free: {:?}", ts),
604 }
605 }
606
607 pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
613 debug_assert!(
614 n < self.num_fixed_value_arguments(),
615 "Invalid value argument index"
616 );
617 let offset = self.constraint_offset() + self.num_fixed_results();
618 OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
619 }
620
621 pub fn ctrl_typeset(self) -> Option<ValueTypeSet> {
624 self.typeset_offset().map(|offset| TYPE_SETS[offset])
625 }
626
627 pub fn is_polymorphic(self) -> bool {
629 self.ctrl_typeset().is_some()
630 }
631}
632
633type BitSet8 = ScalarBitSet<u8>;
634type BitSet16 = ScalarBitSet<u16>;
635
636#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
638pub struct ValueTypeSet {
639 pub lanes: BitSet16,
641 pub ints: BitSet8,
643 pub floats: BitSet8,
645 pub refs: BitSet8,
647 pub dynamic_lanes: BitSet16,
649}
650
651impl ValueTypeSet {
652 fn is_base_type(self, scalar: Type) -> bool {
656 let l2b = u8::try_from(scalar.log2_lane_bits()).unwrap();
657 if scalar.is_int() {
658 self.ints.contains(l2b)
659 } else if scalar.is_float() {
660 self.floats.contains(l2b)
661 } else if scalar.is_ref() {
662 self.refs.contains(l2b)
663 } else {
664 false
665 }
666 }
667
668 pub fn contains(self, typ: Type) -> bool {
670 if typ.is_dynamic_vector() {
671 let l2l = u8::try_from(typ.log2_min_lane_count()).unwrap();
672 self.dynamic_lanes.contains(l2l) && self.is_base_type(typ.lane_type())
673 } else {
674 let l2l = u8::try_from(typ.log2_lane_count()).unwrap();
675 self.lanes.contains(l2l) && self.is_base_type(typ.lane_type())
676 }
677 }
678
679 pub fn example(self) -> Type {
683 let t = if self.ints.max().unwrap_or(0) > 5 {
684 types::I32
685 } else if self.floats.max().unwrap_or(0) > 5 {
686 types::F32
687 } else {
688 types::I8
689 };
690 t.by(1 << self.lanes.min().unwrap()).unwrap()
691 }
692}
693
694enum OperandConstraint {
696 Concrete(Type),
698
699 Free(u8),
702
703 Same,
705
706 LaneOf,
708
709 AsTruthy,
711
712 HalfWidth,
714
715 DoubleWidth,
717
718 SplitLanes,
720
721 MergeLanes,
723
724 DynamicToVector,
726
727 Narrower,
729
730 Wider,
732}
733
734impl OperandConstraint {
735 pub fn resolve(&self, ctrl_type: Type) -> ResolvedConstraint {
738 use self::OperandConstraint::*;
739 use self::ResolvedConstraint::Bound;
740 match *self {
741 Concrete(t) => Bound(t),
742 Free(vts) => ResolvedConstraint::Free(TYPE_SETS[vts as usize]),
743 Same => Bound(ctrl_type),
744 LaneOf => Bound(ctrl_type.lane_of()),
745 AsTruthy => Bound(ctrl_type.as_truthy()),
746 HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
747 DoubleWidth => Bound(
748 ctrl_type
749 .double_width()
750 .expect("invalid type for double_width"),
751 ),
752 SplitLanes => {
753 if ctrl_type.is_dynamic_vector() {
754 Bound(
755 ctrl_type
756 .dynamic_to_vector()
757 .expect("invalid type for dynamic_to_vector")
758 .split_lanes()
759 .expect("invalid type for split_lanes")
760 .vector_to_dynamic()
761 .expect("invalid dynamic type"),
762 )
763 } else {
764 Bound(
765 ctrl_type
766 .split_lanes()
767 .expect("invalid type for split_lanes"),
768 )
769 }
770 }
771 MergeLanes => {
772 if ctrl_type.is_dynamic_vector() {
773 Bound(
774 ctrl_type
775 .dynamic_to_vector()
776 .expect("invalid type for dynamic_to_vector")
777 .merge_lanes()
778 .expect("invalid type for merge_lanes")
779 .vector_to_dynamic()
780 .expect("invalid dynamic type"),
781 )
782 } else {
783 Bound(
784 ctrl_type
785 .merge_lanes()
786 .expect("invalid type for merge_lanes"),
787 )
788 }
789 }
790 DynamicToVector => Bound(
791 ctrl_type
792 .dynamic_to_vector()
793 .expect("invalid type for dynamic_to_vector"),
794 ),
795 Narrower => {
796 let ctrl_type_bits = ctrl_type.log2_lane_bits();
797 let mut tys = ValueTypeSet::default();
798
799 tys.lanes = ScalarBitSet::from_range(0, 1);
801
802 if ctrl_type.is_int() {
803 tys.ints = BitSet8::from_range(3, ctrl_type_bits as u8);
806 } else if ctrl_type.is_float() {
807 tys.floats = BitSet8::from_range(4, ctrl_type_bits as u8);
810 } else {
811 panic!("The Narrower constraint only operates on floats or ints");
812 }
813 ResolvedConstraint::Free(tys)
814 }
815 Wider => {
816 let ctrl_type_bits = ctrl_type.log2_lane_bits();
817 let mut tys = ValueTypeSet::default();
818
819 tys.lanes = ScalarBitSet::from_range(0, 1);
821
822 if ctrl_type.is_int() {
823 let lower_bound = ctrl_type_bits as u8 + 1;
824 if lower_bound < BitSet8::capacity() {
830 tys.ints = BitSet8::from_range(lower_bound, 8);
834 }
835 } else if ctrl_type.is_float() {
836 let lower_bound = ctrl_type_bits as u8 + 1;
838 if lower_bound < BitSet8::capacity() {
839 tys.floats = BitSet8::from_range(lower_bound, 8);
840 }
841 } else {
842 panic!("The Wider constraint only operates on floats or ints");
843 }
844
845 ResolvedConstraint::Free(tys)
846 }
847 }
848 }
849}
850
851#[derive(Copy, Clone, Debug, PartialEq, Eq)]
853pub enum ResolvedConstraint {
854 Bound(Type),
856 Free(ValueTypeSet),
858}
859
860#[cfg(test)]
861mod tests {
862 use super::*;
863 use alloc::string::ToString;
864
865 #[test]
866 fn inst_data_is_copy() {
867 fn is_copy<T: Copy>() {}
868 is_copy::<InstructionData>();
869 }
870
871 #[test]
872 fn inst_data_size() {
873 assert_eq!(std::mem::size_of::<InstructionData>(), 16);
876 }
877
878 #[test]
879 fn opcodes() {
880 use core::mem;
881
882 let x = Opcode::Iadd;
883 let mut y = Opcode::Isub;
884
885 assert!(x != y);
886 y = Opcode::Iadd;
887 assert_eq!(x, y);
888 assert_eq!(x.format(), InstructionFormat::Binary);
889
890 assert_eq!(format!("{:?}", Opcode::IaddImm), "IaddImm");
891 assert_eq!(Opcode::IaddImm.to_string(), "iadd_imm");
892
893 assert_eq!("iadd".parse::<Opcode>(), Ok(Opcode::Iadd));
895 assert_eq!("iadd_imm".parse::<Opcode>(), Ok(Opcode::IaddImm));
896 assert_eq!("iadd\0".parse::<Opcode>(), Err("Unknown opcode"));
897 assert_eq!("".parse::<Opcode>(), Err("Unknown opcode"));
898 assert_eq!("\0".parse::<Opcode>(), Err("Unknown opcode"));
899
900 assert_eq!(mem::size_of::<Opcode>(), mem::size_of::<Option<Opcode>>());
905 }
906
907 #[test]
908 fn instruction_data() {
909 use core::mem;
910 assert_eq!(mem::size_of::<InstructionData>(), 16);
915 }
916
917 #[test]
918 fn constraints() {
919 let a = Opcode::Iadd.constraints();
920 assert!(a.use_typevar_operand());
921 assert!(!a.requires_typevar_operand());
922 assert_eq!(a.num_fixed_results(), 1);
923 assert_eq!(a.num_fixed_value_arguments(), 2);
924 assert_eq!(a.result_type(0, types::I32), types::I32);
925 assert_eq!(a.result_type(0, types::I8), types::I8);
926 assert_eq!(
927 a.value_argument_constraint(0, types::I32),
928 ResolvedConstraint::Bound(types::I32)
929 );
930 assert_eq!(
931 a.value_argument_constraint(1, types::I32),
932 ResolvedConstraint::Bound(types::I32)
933 );
934
935 let b = Opcode::Bitcast.constraints();
936 assert!(!b.use_typevar_operand());
937 assert!(!b.requires_typevar_operand());
938 assert_eq!(b.num_fixed_results(), 1);
939 assert_eq!(b.num_fixed_value_arguments(), 1);
940 assert_eq!(b.result_type(0, types::I32), types::I32);
941 assert_eq!(b.result_type(0, types::I8), types::I8);
942 match b.value_argument_constraint(0, types::I32) {
943 ResolvedConstraint::Free(vts) => assert!(vts.contains(types::F32)),
944 _ => panic!("Unexpected constraint from value_argument_constraint"),
945 }
946
947 let c = Opcode::Call.constraints();
948 assert_eq!(c.num_fixed_results(), 0);
949 assert_eq!(c.num_fixed_value_arguments(), 0);
950
951 let i = Opcode::CallIndirect.constraints();
952 assert_eq!(i.num_fixed_results(), 0);
953 assert_eq!(i.num_fixed_value_arguments(), 1);
954
955 let cmp = Opcode::Icmp.constraints();
956 assert!(cmp.use_typevar_operand());
957 assert!(cmp.requires_typevar_operand());
958 assert_eq!(cmp.num_fixed_results(), 1);
959 assert_eq!(cmp.num_fixed_value_arguments(), 2);
960 assert_eq!(cmp.result_type(0, types::I64), types::I8);
961 }
962
963 #[test]
964 fn value_set() {
965 use crate::ir::types::*;
966
967 let vts = ValueTypeSet {
968 lanes: BitSet16::from_range(0, 8),
969 ints: BitSet8::from_range(4, 7),
970 floats: BitSet8::from_range(0, 0),
971 refs: BitSet8::from_range(5, 7),
972 dynamic_lanes: BitSet16::from_range(0, 4),
973 };
974 assert!(!vts.contains(I8));
975 assert!(vts.contains(I32));
976 assert!(vts.contains(I64));
977 assert!(vts.contains(I32X4));
978 assert!(vts.contains(I32X4XN));
979 assert!(!vts.contains(F16));
980 assert!(!vts.contains(F32));
981 assert!(!vts.contains(F128));
982 assert!(vts.contains(R32));
983 assert!(vts.contains(R64));
984 assert_eq!(vts.example().to_string(), "i32");
985
986 let vts = ValueTypeSet {
987 lanes: BitSet16::from_range(0, 8),
988 ints: BitSet8::from_range(0, 0),
989 floats: BitSet8::from_range(5, 7),
990 refs: BitSet8::from_range(0, 0),
991 dynamic_lanes: BitSet16::from_range(0, 8),
992 };
993 assert_eq!(vts.example().to_string(), "f32");
994
995 let vts = ValueTypeSet {
996 lanes: BitSet16::from_range(1, 8),
997 ints: BitSet8::from_range(0, 0),
998 floats: BitSet8::from_range(5, 7),
999 refs: BitSet8::from_range(0, 0),
1000 dynamic_lanes: BitSet16::from_range(0, 8),
1001 };
1002 assert_eq!(vts.example().to_string(), "f32x2");
1003
1004 let vts = ValueTypeSet {
1005 lanes: BitSet16::from_range(2, 8),
1006 ints: BitSet8::from_range(3, 7),
1007 floats: BitSet8::from_range(0, 0),
1008 refs: BitSet8::from_range(0, 0),
1009 dynamic_lanes: BitSet16::from_range(0, 8),
1010 };
1011 assert_eq!(vts.example().to_string(), "i32x4");
1012
1013 let vts = ValueTypeSet {
1014 lanes: BitSet16::from_range(0, 9),
1016 ints: BitSet8::from_range(3, 7),
1017 floats: BitSet8::from_range(0, 0),
1018 refs: BitSet8::from_range(0, 0),
1019 dynamic_lanes: BitSet16::from_range(0, 8),
1020 };
1021 assert!(vts.contains(I32));
1022 assert!(vts.contains(I32X4));
1023 assert!(!vts.contains(R32));
1024 assert!(!vts.contains(R64));
1025 }
1026}