use crate::constant_hash::Table;
use alloc::vec::Vec;
use core::fmt::{self, Display, Formatter};
use core::ops::{Deref, DerefMut};
use core::str::FromStr;
#[cfg(feature = "enable-serde")]
use serde_derive::{Deserialize, Serialize};
use crate::bitset::ScalarBitSet;
use crate::entity;
use crate::ir::{
self, Block, ExceptionTable, ExceptionTables, FuncRef, MemFlags, SigRef, StackSlot, Type,
Value,
condcodes::{FloatCC, IntCC},
trapcode::TrapCode,
types,
};
pub type ValueList = entity::EntityList<Value>;
pub type ValueListPool = entity::ListPool<Value>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct BlockCall {
values: entity::EntityList<Value>,
}
impl BlockCall {
fn value_to_block(val: Value) -> Block {
Block::from_u32(val.as_u32())
}
fn block_to_value(block: Block) -> Value {
Value::from_u32(block.as_u32())
}
pub fn new(
block: Block,
args: impl IntoIterator<Item = BlockArg>,
pool: &mut ValueListPool,
) -> Self {
let mut values = ValueList::default();
values.push(Self::block_to_value(block), pool);
values.extend(args.into_iter().map(|arg| arg.encode_as_value()), pool);
Self { values }
}
pub fn block(&self, pool: &ValueListPool) -> Block {
let val = self.values.first(pool).unwrap();
Self::value_to_block(val)
}
pub fn set_block(&mut self, block: Block, pool: &mut ValueListPool) {
*self.values.get_mut(0, pool).unwrap() = Self::block_to_value(block);
}
pub fn append_argument(&mut self, arg: impl Into<BlockArg>, pool: &mut ValueListPool) {
self.values.push(arg.into().encode_as_value(), pool);
}
pub fn len(&self, pool: &ValueListPool) -> usize {
self.values.len(pool) - 1
}
pub fn args<'a>(
&self,
pool: &'a ValueListPool,
) -> impl ExactSizeIterator<Item = BlockArg> + DoubleEndedIterator<Item = BlockArg> + use<'a>
{
self.values.as_slice(pool)[1..]
.iter()
.map(|value| BlockArg::decode_from_value(*value))
}
pub fn update_args<F: FnMut(BlockArg) -> BlockArg>(
&mut self,
pool: &mut ValueListPool,
mut f: F,
) {
for raw in self.values.as_mut_slice(pool)[1..].iter_mut() {
let new = f(BlockArg::decode_from_value(*raw));
*raw = new.encode_as_value();
}
}
pub fn remove(&mut self, ix: usize, pool: &mut ValueListPool) {
self.values.remove(1 + ix, pool)
}
pub fn clear(&mut self, pool: &mut ValueListPool) {
self.values.truncate(1, pool)
}
pub fn extend<I, T>(&mut self, elements: I, pool: &mut ValueListPool)
where
I: IntoIterator<Item = T>,
T: Into<BlockArg>,
{
self.values.extend(
elements
.into_iter()
.map(|elem| elem.into().encode_as_value()),
pool,
)
}
pub fn display<'a>(&self, pool: &'a ValueListPool) -> DisplayBlockCall<'a> {
DisplayBlockCall { block: *self, pool }
}
pub fn deep_clone(&self, pool: &mut ValueListPool) -> Self {
Self {
values: self.values.deep_clone(pool),
}
}
}
pub struct DisplayBlockCall<'a> {
block: BlockCall,
pool: &'a ValueListPool,
}
impl<'a> Display for DisplayBlockCall<'a> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.block.block(&self.pool))?;
if self.block.len(self.pool) > 0 {
write!(f, "(")?;
for (ix, arg) in self.block.args(self.pool).enumerate() {
if ix > 0 {
write!(f, ", ")?;
}
write!(f, "{arg}")?;
}
write!(f, ")")?;
}
Ok(())
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum BlockArg {
Value(Value),
TryCallRet(u32),
TryCallExn(u32),
}
impl BlockArg {
fn encode_as_value(&self) -> Value {
let (tag, payload) = match *self {
BlockArg::Value(v) => (0, v.as_bits()),
BlockArg::TryCallRet(i) => (1, i),
BlockArg::TryCallExn(i) => (2, i),
};
assert!(payload < (1 << 30));
let raw = (tag << 30) | payload;
Value::from_bits(raw)
}
fn decode_from_value(v: Value) -> Self {
let raw = v.as_u32();
let tag = raw >> 30;
let payload = raw & ((1 << 30) - 1);
match tag {
0 => BlockArg::Value(Value::from_bits(payload)),
1 => BlockArg::TryCallRet(payload),
2 => BlockArg::TryCallExn(payload),
_ => unreachable!(),
}
}
pub fn as_value(&self) -> Option<Value> {
match *self {
BlockArg::Value(v) => Some(v),
_ => None,
}
}
pub fn map_value<F: FnMut(Value) -> Value>(&self, mut f: F) -> Self {
match *self {
BlockArg::Value(v) => BlockArg::Value(f(v)),
other => other,
}
}
}
impl Display for BlockArg {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
BlockArg::Value(v) => write!(f, "{v}"),
BlockArg::TryCallRet(i) => write!(f, "ret{i}"),
BlockArg::TryCallExn(i) => write!(f, "exn{i}"),
}
}
}
impl From<Value> for BlockArg {
fn from(value: Value) -> BlockArg {
BlockArg::Value(value)
}
}
include!(concat!(env!("OUT_DIR"), "/opcodes.rs"));
impl Display for Opcode {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", opcode_name(*self))
}
}
impl Opcode {
pub fn format(self) -> InstructionFormat {
OPCODE_FORMAT[self as usize - 1]
}
pub fn constraints(self) -> OpcodeConstraints {
OPCODE_CONSTRAINTS[self as usize - 1]
}
#[inline]
pub fn is_safepoint(self) -> bool {
self.is_call() && !self.is_return()
}
}
impl FromStr for Opcode {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, &'static str> {
use crate::constant_hash::{probe, simple_hash};
match probe::<&str, [Option<Self>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
Err(_) => Err("Unknown opcode"),
Ok(i) => Ok(OPCODE_HASH_TABLE[i].unwrap()),
}
}
}
impl<'a> Table<&'a str> for [Option<Opcode>] {
fn len(&self) -> usize {
self.len()
}
fn key(&self, idx: usize) -> Option<&'a str> {
self[idx].map(opcode_name)
}
}
#[derive(Clone, Debug)]
pub struct VariableArgs(Vec<Value>);
impl VariableArgs {
pub fn new() -> Self {
Self(Vec::new())
}
pub fn push(&mut self, v: Value) {
self.0.push(v)
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn into_value_list(self, fixed: &[Value], pool: &mut ValueListPool) -> ValueList {
let mut vlist = ValueList::default();
vlist.extend(fixed.iter().cloned(), pool);
vlist.extend(self.0, pool);
vlist
}
}
impl Deref for VariableArgs {
type Target = [Value];
fn deref(&self) -> &[Value] {
&self.0
}
}
impl DerefMut for VariableArgs {
fn deref_mut(&mut self) -> &mut [Value] {
&mut self.0
}
}
impl Display for VariableArgs {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
for (i, val) in self.0.iter().enumerate() {
if i == 0 {
write!(fmt, "{val}")?;
} else {
write!(fmt, ", {val}")?;
}
}
Ok(())
}
}
impl Default for VariableArgs {
fn default() -> Self {
Self::new()
}
}
impl InstructionData {
pub fn branch_destination<'a>(
&'a self,
jump_tables: &'a ir::JumpTables,
exception_tables: &'a ir::ExceptionTables,
) -> &'a [BlockCall] {
match self {
Self::Jump { destination, .. } => core::slice::from_ref(destination),
Self::Brif { blocks, .. } => blocks.as_slice(),
Self::BranchTable { table, .. } => jump_tables.get(*table).unwrap().all_branches(),
Self::TryCall { exception, .. } | Self::TryCallIndirect { exception, .. } => {
exception_tables.get(*exception).unwrap().all_branches()
}
_ => {
debug_assert!(!self.opcode().is_branch());
&[]
}
}
}
pub fn branch_destination_mut<'a>(
&'a mut self,
jump_tables: &'a mut ir::JumpTables,
exception_tables: &'a mut ir::ExceptionTables,
) -> &'a mut [BlockCall] {
match self {
Self::Jump { destination, .. } => core::slice::from_mut(destination),
Self::Brif { blocks, .. } => blocks.as_mut_slice(),
Self::BranchTable { table, .. } => {
jump_tables.get_mut(*table).unwrap().all_branches_mut()
}
Self::TryCall { exception, .. } | Self::TryCallIndirect { exception, .. } => {
exception_tables
.get_mut(*exception)
.unwrap()
.all_branches_mut()
}
_ => {
debug_assert!(!self.opcode().is_branch());
&mut []
}
}
}
pub fn map_values(
&mut self,
pool: &mut ValueListPool,
jump_tables: &mut ir::JumpTables,
exception_tables: &mut ir::ExceptionTables,
mut f: impl FnMut(Value) -> Value,
) {
for arg in self.arguments_mut(pool) {
*arg = f(*arg);
}
for block in self.branch_destination_mut(jump_tables, exception_tables) {
block.update_args(pool, |arg| arg.map_value(|val| f(val)));
}
if let Some(et) = self.exception_table() {
for ctx in exception_tables[et].contexts_mut() {
*ctx = f(*ctx);
}
}
}
pub fn trap_code(&self) -> Option<TrapCode> {
match *self {
Self::CondTrap { code, .. }
| Self::IntAddTrap { code, .. }
| Self::Trap { code, .. } => Some(code),
_ => None,
}
}
pub fn cond_code(&self) -> Option<IntCC> {
match self {
&InstructionData::IntCompare { cond, .. }
| &InstructionData::IntCompareImm { cond, .. } => Some(cond),
_ => None,
}
}
pub fn fp_cond_code(&self) -> Option<FloatCC> {
match self {
&InstructionData::FloatCompare { cond, .. } => Some(cond),
_ => None,
}
}
pub fn trap_code_mut(&mut self) -> Option<&mut TrapCode> {
match self {
Self::CondTrap { code, .. }
| Self::IntAddTrap { code, .. }
| Self::Trap { code, .. } => Some(code),
_ => None,
}
}
pub fn atomic_rmw_op(&self) -> Option<ir::AtomicRmwOp> {
match self {
&InstructionData::AtomicRmw { op, .. } => Some(op),
_ => None,
}
}
pub fn load_store_offset(&self) -> Option<i32> {
match self {
&InstructionData::Load { offset, .. }
| &InstructionData::StackLoad { offset, .. }
| &InstructionData::Store { offset, .. }
| &InstructionData::StackStore { offset, .. } => Some(offset.into()),
_ => None,
}
}
pub fn memflags(&self) -> Option<MemFlags> {
match self {
&InstructionData::Load { flags, .. }
| &InstructionData::LoadNoOffset { flags, .. }
| &InstructionData::Store { flags, .. }
| &InstructionData::StoreNoOffset { flags, .. }
| &InstructionData::AtomicCas { flags, .. }
| &InstructionData::AtomicRmw { flags, .. } => Some(flags),
_ => None,
}
}
pub fn stack_slot(&self) -> Option<StackSlot> {
match self {
&InstructionData::StackStore { stack_slot, .. }
| &InstructionData::StackLoad { stack_slot, .. } => Some(stack_slot),
_ => None,
}
}
pub fn analyze_call<'a>(
&'a self,
pool: &'a ValueListPool,
exception_tables: &ExceptionTables,
) -> CallInfo<'a> {
match *self {
Self::Call {
func_ref, ref args, ..
} => CallInfo::Direct(func_ref, args.as_slice(pool)),
Self::CallIndirect {
sig_ref, ref args, ..
} => CallInfo::Indirect(sig_ref, &args.as_slice(pool)[1..]),
Self::TryCall {
func_ref,
ref args,
exception,
..
} => {
let exdata = &exception_tables[exception];
CallInfo::DirectWithSig(func_ref, exdata.signature(), args.as_slice(pool))
}
Self::TryCallIndirect {
exception,
ref args,
..
} => {
let exdata = &exception_tables[exception];
CallInfo::Indirect(exdata.signature(), &args.as_slice(pool)[1..])
}
Self::Ternary {
opcode: Opcode::StackSwitch,
..
} => {
CallInfo::NotACall
}
_ => {
debug_assert!(!self.opcode().is_call());
CallInfo::NotACall
}
}
}
#[inline]
pub(crate) fn mask_immediates(&mut self, ctrl_typevar: Type) {
if ctrl_typevar.is_invalid() {
return;
}
let bit_width = ctrl_typevar.bits();
match self {
Self::UnaryImm { opcode: _, imm } => {
*imm = imm.mask_to_width(bit_width);
}
Self::BinaryImm64 {
opcode,
arg: _,
imm,
} => {
if *opcode == Opcode::SdivImm || *opcode == Opcode::SremImm {
*imm = imm.mask_to_width(bit_width);
}
}
Self::IntCompareImm {
opcode,
arg: _,
cond,
imm,
} => {
debug_assert_eq!(*opcode, Opcode::IcmpImm);
if cond.unsigned() != *cond {
*imm = imm.mask_to_width(bit_width);
}
}
_ => {}
}
}
pub fn exception_table(&self) -> Option<ExceptionTable> {
match self {
Self::TryCall { exception, .. } | Self::TryCallIndirect { exception, .. } => {
Some(*exception)
}
_ => None,
}
}
}
pub enum CallInfo<'a> {
NotACall,
Direct(FuncRef, &'a [Value]),
Indirect(SigRef, &'a [Value]),
DirectWithSig(FuncRef, SigRef, &'a [Value]),
}
#[derive(Clone, Copy)]
pub struct OpcodeConstraints {
flags: u8,
typeset_offset: u8,
constraint_offset: u16,
}
impl OpcodeConstraints {
pub fn use_typevar_operand(self) -> bool {
(self.flags & 0x8) != 0
}
pub fn requires_typevar_operand(self) -> bool {
(self.flags & 0x10) != 0
}
pub fn num_fixed_results(self) -> usize {
(self.flags & 0x7) as usize
}
pub fn num_fixed_value_arguments(self) -> usize {
((self.flags >> 5) & 0x7) as usize
}
fn typeset_offset(self) -> Option<usize> {
let offset = usize::from(self.typeset_offset);
if offset < TYPE_SETS.len() {
Some(offset)
} else {
None
}
}
fn constraint_offset(self) -> usize {
self.constraint_offset as usize
}
pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
debug_assert!(n < self.num_fixed_results(), "Invalid result index");
match OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type) {
ResolvedConstraint::Bound(t) => t,
ResolvedConstraint::Free(ts) => panic!("Result constraints can't be free: {ts:?}"),
}
}
pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
debug_assert!(
n < self.num_fixed_value_arguments(),
"Invalid value argument index"
);
let offset = self.constraint_offset() + self.num_fixed_results();
OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
}
pub fn ctrl_typeset(self) -> Option<ValueTypeSet> {
self.typeset_offset().map(|offset| TYPE_SETS[offset])
}
pub fn is_polymorphic(self) -> bool {
self.ctrl_typeset().is_some()
}
}
type BitSet8 = ScalarBitSet<u8>;
type BitSet16 = ScalarBitSet<u16>;
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
pub struct ValueTypeSet {
pub lanes: BitSet16,
pub ints: BitSet8,
pub floats: BitSet8,
pub dynamic_lanes: BitSet16,
}
impl ValueTypeSet {
fn is_base_type(self, scalar: Type) -> bool {
let l2b = u8::try_from(scalar.log2_lane_bits()).unwrap();
if scalar.is_int() {
self.ints.contains(l2b)
} else if scalar.is_float() {
self.floats.contains(l2b)
} else {
false
}
}
pub fn contains(self, typ: Type) -> bool {
if typ.is_dynamic_vector() {
let l2l = u8::try_from(typ.log2_min_lane_count()).unwrap();
self.dynamic_lanes.contains(l2l) && self.is_base_type(typ.lane_type())
} else {
let l2l = u8::try_from(typ.log2_lane_count()).unwrap();
self.lanes.contains(l2l) && self.is_base_type(typ.lane_type())
}
}
pub fn example(self) -> Type {
let t = if self.ints.max().unwrap_or(0) > 5 {
types::I32
} else if self.floats.max().unwrap_or(0) > 5 {
types::F32
} else {
types::I8
};
t.by(1 << self.lanes.min().unwrap()).unwrap()
}
}
enum OperandConstraint {
Concrete(Type),
Free(u8),
Same,
LaneOf,
AsTruthy,
HalfWidth,
DoubleWidth,
SplitLanes,
MergeLanes,
DynamicToVector,
Narrower,
Wider,
}
impl OperandConstraint {
pub fn resolve(&self, ctrl_type: Type) -> ResolvedConstraint {
use self::OperandConstraint::*;
use self::ResolvedConstraint::Bound;
match *self {
Concrete(t) => Bound(t),
Free(vts) => ResolvedConstraint::Free(TYPE_SETS[vts as usize]),
Same => Bound(ctrl_type),
LaneOf => Bound(ctrl_type.lane_of()),
AsTruthy => Bound(ctrl_type.as_truthy()),
HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
DoubleWidth => Bound(
ctrl_type
.double_width()
.expect("invalid type for double_width"),
),
SplitLanes => {
if ctrl_type.is_dynamic_vector() {
Bound(
ctrl_type
.dynamic_to_vector()
.expect("invalid type for dynamic_to_vector")
.split_lanes()
.expect("invalid type for split_lanes")
.vector_to_dynamic()
.expect("invalid dynamic type"),
)
} else {
Bound(
ctrl_type
.split_lanes()
.expect("invalid type for split_lanes"),
)
}
}
MergeLanes => {
if ctrl_type.is_dynamic_vector() {
Bound(
ctrl_type
.dynamic_to_vector()
.expect("invalid type for dynamic_to_vector")
.merge_lanes()
.expect("invalid type for merge_lanes")
.vector_to_dynamic()
.expect("invalid dynamic type"),
)
} else {
Bound(
ctrl_type
.merge_lanes()
.expect("invalid type for merge_lanes"),
)
}
}
DynamicToVector => Bound(
ctrl_type
.dynamic_to_vector()
.expect("invalid type for dynamic_to_vector"),
),
Narrower => {
let ctrl_type_bits = ctrl_type.log2_lane_bits();
let mut tys = ValueTypeSet::default();
tys.lanes = ScalarBitSet::from_range(0, 1);
if ctrl_type.is_int() {
tys.ints = BitSet8::from_range(3, ctrl_type_bits as u8);
} else if ctrl_type.is_float() {
tys.floats = BitSet8::from_range(4, ctrl_type_bits as u8);
} else {
panic!(
"The Narrower constraint only operates on floats or ints, got {ctrl_type:?}"
);
}
ResolvedConstraint::Free(tys)
}
Wider => {
let ctrl_type_bits = ctrl_type.log2_lane_bits();
let mut tys = ValueTypeSet::default();
tys.lanes = ScalarBitSet::from_range(0, 1);
if ctrl_type.is_int() {
let lower_bound = ctrl_type_bits as u8 + 1;
if lower_bound < BitSet8::capacity() {
tys.ints = BitSet8::from_range(lower_bound, 8);
}
} else if ctrl_type.is_float() {
let lower_bound = ctrl_type_bits as u8 + 1;
if lower_bound < BitSet8::capacity() {
tys.floats = BitSet8::from_range(lower_bound, 8);
}
} else {
panic!(
"The Wider constraint only operates on floats or ints, got {ctrl_type:?}"
);
}
ResolvedConstraint::Free(tys)
}
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ResolvedConstraint {
Bound(Type),
Free(ValueTypeSet),
}
pub trait InstructionMapper {
fn map_value(&mut self, value: Value) -> Value;
fn map_value_list(&mut self, value_list: ValueList) -> ValueList;
fn map_global_value(&mut self, global_value: ir::GlobalValue) -> ir::GlobalValue;
fn map_jump_table(&mut self, jump_table: ir::JumpTable) -> ir::JumpTable;
fn map_exception_table(&mut self, exception_table: ExceptionTable) -> ExceptionTable;
fn map_block_call(&mut self, block_call: BlockCall) -> BlockCall;
fn map_block(&mut self, block: Block) -> Block;
fn map_func_ref(&mut self, func_ref: FuncRef) -> FuncRef;
fn map_sig_ref(&mut self, sig_ref: SigRef) -> SigRef;
fn map_stack_slot(&mut self, stack_slot: StackSlot) -> StackSlot;
fn map_dynamic_stack_slot(
&mut self,
dynamic_stack_slot: ir::DynamicStackSlot,
) -> ir::DynamicStackSlot;
fn map_constant(&mut self, constant: ir::Constant) -> ir::Constant;
fn map_immediate(&mut self, immediate: ir::Immediate) -> ir::Immediate;
}
impl<'a, T> InstructionMapper for &'a mut T
where
T: InstructionMapper,
{
fn map_value(&mut self, value: Value) -> Value {
(**self).map_value(value)
}
fn map_value_list(&mut self, value_list: ValueList) -> ValueList {
(**self).map_value_list(value_list)
}
fn map_global_value(&mut self, global_value: ir::GlobalValue) -> ir::GlobalValue {
(**self).map_global_value(global_value)
}
fn map_jump_table(&mut self, jump_table: ir::JumpTable) -> ir::JumpTable {
(**self).map_jump_table(jump_table)
}
fn map_exception_table(&mut self, exception_table: ExceptionTable) -> ExceptionTable {
(**self).map_exception_table(exception_table)
}
fn map_block_call(&mut self, block_call: BlockCall) -> BlockCall {
(**self).map_block_call(block_call)
}
fn map_block(&mut self, block: Block) -> Block {
(**self).map_block(block)
}
fn map_func_ref(&mut self, func_ref: FuncRef) -> FuncRef {
(**self).map_func_ref(func_ref)
}
fn map_sig_ref(&mut self, sig_ref: SigRef) -> SigRef {
(**self).map_sig_ref(sig_ref)
}
fn map_stack_slot(&mut self, stack_slot: StackSlot) -> StackSlot {
(**self).map_stack_slot(stack_slot)
}
fn map_dynamic_stack_slot(
&mut self,
dynamic_stack_slot: ir::DynamicStackSlot,
) -> ir::DynamicStackSlot {
(**self).map_dynamic_stack_slot(dynamic_stack_slot)
}
fn map_constant(&mut self, constant: ir::Constant) -> ir::Constant {
(**self).map_constant(constant)
}
fn map_immediate(&mut self, immediate: ir::Immediate) -> ir::Immediate {
(**self).map_immediate(immediate)
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::string::ToString;
use ir::{DynamicStackSlot, GlobalValue, JumpTable};
#[test]
fn inst_data_is_copy() {
fn is_copy<T: Copy>() {}
is_copy::<InstructionData>();
}
#[test]
fn inst_data_size() {
assert_eq!(core::mem::size_of::<InstructionData>(), 16);
}
#[test]
fn opcodes() {
use core::mem;
let x = Opcode::Iadd;
let mut y = Opcode::Isub;
assert!(x != y);
y = Opcode::Iadd;
assert_eq!(x, y);
assert_eq!(x.format(), InstructionFormat::Binary);
assert_eq!(format!("{:?}", Opcode::IaddImm), "IaddImm");
assert_eq!(Opcode::IaddImm.to_string(), "iadd_imm");
assert_eq!("iadd".parse::<Opcode>(), Ok(Opcode::Iadd));
assert_eq!("iadd_imm".parse::<Opcode>(), Ok(Opcode::IaddImm));
assert_eq!("iadd\0".parse::<Opcode>(), Err("Unknown opcode"));
assert_eq!("".parse::<Opcode>(), Err("Unknown opcode"));
assert_eq!("\0".parse::<Opcode>(), Err("Unknown opcode"));
assert_eq!(mem::size_of::<Opcode>(), mem::size_of::<Option<Opcode>>());
}
#[test]
fn instruction_data() {
use core::mem;
assert_eq!(mem::size_of::<InstructionData>(), 16);
}
#[test]
fn constraints() {
let a = Opcode::Iadd.constraints();
assert!(a.use_typevar_operand());
assert!(!a.requires_typevar_operand());
assert_eq!(a.num_fixed_results(), 1);
assert_eq!(a.num_fixed_value_arguments(), 2);
assert_eq!(a.result_type(0, types::I32), types::I32);
assert_eq!(a.result_type(0, types::I8), types::I8);
assert_eq!(
a.value_argument_constraint(0, types::I32),
ResolvedConstraint::Bound(types::I32)
);
assert_eq!(
a.value_argument_constraint(1, types::I32),
ResolvedConstraint::Bound(types::I32)
);
let b = Opcode::Bitcast.constraints();
assert!(!b.use_typevar_operand());
assert!(!b.requires_typevar_operand());
assert_eq!(b.num_fixed_results(), 1);
assert_eq!(b.num_fixed_value_arguments(), 1);
assert_eq!(b.result_type(0, types::I32), types::I32);
assert_eq!(b.result_type(0, types::I8), types::I8);
match b.value_argument_constraint(0, types::I32) {
ResolvedConstraint::Free(vts) => assert!(vts.contains(types::F32)),
_ => panic!("Unexpected constraint from value_argument_constraint"),
}
let c = Opcode::Call.constraints();
assert_eq!(c.num_fixed_results(), 0);
assert_eq!(c.num_fixed_value_arguments(), 0);
let i = Opcode::CallIndirect.constraints();
assert_eq!(i.num_fixed_results(), 0);
assert_eq!(i.num_fixed_value_arguments(), 1);
let cmp = Opcode::Icmp.constraints();
assert!(cmp.use_typevar_operand());
assert!(cmp.requires_typevar_operand());
assert_eq!(cmp.num_fixed_results(), 1);
assert_eq!(cmp.num_fixed_value_arguments(), 2);
assert_eq!(cmp.result_type(0, types::I64), types::I8);
}
#[test]
fn value_set() {
use crate::ir::types::*;
let vts = ValueTypeSet {
lanes: BitSet16::from_range(0, 8),
ints: BitSet8::from_range(4, 7),
floats: BitSet8::from_range(0, 0),
dynamic_lanes: BitSet16::from_range(0, 4),
};
assert!(!vts.contains(I8));
assert!(vts.contains(I32));
assert!(vts.contains(I64));
assert!(vts.contains(I32X4));
assert!(vts.contains(I32X4XN));
assert!(!vts.contains(F16));
assert!(!vts.contains(F32));
assert!(!vts.contains(F128));
assert_eq!(vts.example().to_string(), "i32");
let vts = ValueTypeSet {
lanes: BitSet16::from_range(0, 8),
ints: BitSet8::from_range(0, 0),
floats: BitSet8::from_range(5, 7),
dynamic_lanes: BitSet16::from_range(0, 8),
};
assert_eq!(vts.example().to_string(), "f32");
let vts = ValueTypeSet {
lanes: BitSet16::from_range(1, 8),
ints: BitSet8::from_range(0, 0),
floats: BitSet8::from_range(5, 7),
dynamic_lanes: BitSet16::from_range(0, 8),
};
assert_eq!(vts.example().to_string(), "f32x2");
let vts = ValueTypeSet {
lanes: BitSet16::from_range(2, 8),
ints: BitSet8::from_range(3, 7),
floats: BitSet8::from_range(0, 0),
dynamic_lanes: BitSet16::from_range(0, 8),
};
assert_eq!(vts.example().to_string(), "i32x4");
let vts = ValueTypeSet {
lanes: BitSet16::from_range(0, 9),
ints: BitSet8::from_range(3, 7),
floats: BitSet8::from_range(0, 0),
dynamic_lanes: BitSet16::from_range(0, 8),
};
assert!(vts.contains(I32));
assert!(vts.contains(I32X4));
}
#[test]
fn instruction_data_map() {
struct TestMapper;
impl InstructionMapper for TestMapper {
fn map_value(&mut self, value: Value) -> Value {
Value::from_u32(value.as_u32() + 1)
}
fn map_value_list(&mut self, _value_list: ValueList) -> ValueList {
ValueList::new()
}
fn map_global_value(&mut self, global_value: ir::GlobalValue) -> ir::GlobalValue {
GlobalValue::from_u32(global_value.as_u32() + 1)
}
fn map_jump_table(&mut self, jump_table: ir::JumpTable) -> ir::JumpTable {
JumpTable::from_u32(jump_table.as_u32() + 1)
}
fn map_exception_table(&mut self, exception_table: ExceptionTable) -> ExceptionTable {
ExceptionTable::from_u32(exception_table.as_u32() + 1)
}
fn map_block_call(&mut self, _block_call: BlockCall) -> BlockCall {
let block = Block::from_u32(42);
let mut pool = ValueListPool::new();
BlockCall::new(block, [], &mut pool)
}
fn map_block(&mut self, block: Block) -> Block {
Block::from_u32(block.as_u32() + 1)
}
fn map_func_ref(&mut self, func_ref: FuncRef) -> FuncRef {
FuncRef::from_u32(func_ref.as_u32() + 1)
}
fn map_sig_ref(&mut self, sig_ref: SigRef) -> SigRef {
SigRef::from_u32(sig_ref.as_u32() + 1)
}
fn map_stack_slot(&mut self, stack_slot: StackSlot) -> StackSlot {
StackSlot::from_u32(stack_slot.as_u32() + 1)
}
fn map_dynamic_stack_slot(
&mut self,
dynamic_stack_slot: ir::DynamicStackSlot,
) -> ir::DynamicStackSlot {
DynamicStackSlot::from_u32(dynamic_stack_slot.as_u32() + 1)
}
fn map_constant(&mut self, constant: ir::Constant) -> ir::Constant {
ir::Constant::from_u32(constant.as_u32() + 1)
}
fn map_immediate(&mut self, immediate: ir::Immediate) -> ir::Immediate {
ir::Immediate::from_u32(immediate.as_u32() + 1)
}
}
let mut pool = ValueListPool::new();
let map = |inst: InstructionData| inst.map(TestMapper);
assert_eq!(
map(InstructionData::Binary {
opcode: Opcode::Iadd,
args: [Value::from_u32(10), Value::from_u32(20)]
}),
InstructionData::Binary {
opcode: Opcode::Iadd,
args: [Value::from_u32(11), Value::from_u32(21)]
}
);
let mut args = ValueList::new();
args.push(Value::from_u32(42), &mut pool);
let func_ref = FuncRef::from_u32(99);
let inst = map(InstructionData::Call {
opcode: Opcode::Call,
args,
func_ref,
});
let InstructionData::Call {
opcode: Opcode::Call,
args,
func_ref,
} = inst
else {
panic!()
};
assert!(args.is_empty());
assert_eq!(func_ref, FuncRef::from_u32(100));
assert_eq!(
map(InstructionData::UnaryGlobalValue {
opcode: Opcode::GlobalValue,
global_value: GlobalValue::from_u32(4),
}),
InstructionData::UnaryGlobalValue {
opcode: Opcode::GlobalValue,
global_value: GlobalValue::from_u32(5),
}
);
assert_eq!(
map(InstructionData::BranchTable {
opcode: Opcode::BrTable,
arg: Value::from_u32(0),
table: JumpTable::from_u32(1),
}),
InstructionData::BranchTable {
opcode: Opcode::BrTable,
arg: Value::from_u32(1),
table: JumpTable::from_u32(2),
}
);
assert_eq!(
map(InstructionData::TryCall {
opcode: Opcode::TryCall,
args,
func_ref: FuncRef::from_u32(0),
exception: ExceptionTable::from_u32(1),
}),
InstructionData::TryCall {
opcode: Opcode::TryCall,
args,
func_ref: FuncRef::from_u32(1),
exception: ExceptionTable::from_u32(2),
}
);
assert_eq!(
map(InstructionData::Jump {
opcode: Opcode::Jump,
destination: BlockCall::new(Block::from_u32(99), [], &mut pool),
}),
map(InstructionData::Jump {
opcode: Opcode::Jump,
destination: BlockCall::new(Block::from_u32(42), [], &mut pool),
})
);
assert_eq!(
map(InstructionData::ExceptionHandlerAddress {
opcode: Opcode::GetExceptionHandlerAddress,
block: Block::from_u32(1),
imm: 0.into(),
}),
InstructionData::ExceptionHandlerAddress {
opcode: Opcode::GetExceptionHandlerAddress,
block: Block::from_u32(2),
imm: 0.into(),
},
);
assert_eq!(
map(InstructionData::CallIndirect {
opcode: Opcode::CallIndirect,
args,
sig_ref: SigRef::from_u32(11)
}),
InstructionData::CallIndirect {
opcode: Opcode::CallIndirect,
args: ValueList::new(),
sig_ref: SigRef::from_u32(12)
}
);
assert_eq!(
map(InstructionData::StackLoad {
opcode: Opcode::StackLoad,
stack_slot: StackSlot::from_u32(0),
offset: 0.into()
}),
InstructionData::StackLoad {
opcode: Opcode::StackLoad,
stack_slot: StackSlot::from_u32(1),
offset: 0.into()
},
);
assert_eq!(
map(InstructionData::DynamicStackLoad {
opcode: Opcode::DynamicStackLoad,
dynamic_stack_slot: DynamicStackSlot::from_u32(0),
}),
InstructionData::DynamicStackLoad {
opcode: Opcode::DynamicStackLoad,
dynamic_stack_slot: DynamicStackSlot::from_u32(1),
},
);
assert_eq!(
map(InstructionData::UnaryConst {
opcode: ir::Opcode::Vconst,
constant_handle: ir::Constant::from_u32(2)
}),
InstructionData::UnaryConst {
opcode: ir::Opcode::Vconst,
constant_handle: ir::Constant::from_u32(3)
},
);
assert_eq!(
map(InstructionData::Shuffle {
opcode: ir::Opcode::Shuffle,
args: [Value::from_u32(0), Value::from_u32(1)],
imm: ir::Immediate::from_u32(41),
}),
InstructionData::Shuffle {
opcode: ir::Opcode::Shuffle,
args: [Value::from_u32(1), Value::from_u32(2)],
imm: ir::Immediate::from_u32(42),
},
);
}
}