use crate::abi::{self, align_to, scratch, LocalSlot};
use crate::codegen::{CodeGenContext, FuncEnv};
use crate::isa::reg::Reg;
use cranelift_codegen::{
binemit::CodeOffset,
ir::{Endianness, LibCall, MemFlags, RelSourceLoc, SourceLoc, UserExternalNameRef},
Final, MachBufferFinalized, MachLabel,
};
use std::{fmt::Debug, ops::Range};
use wasmtime_environ::PtrSize;
pub(crate) use cranelift_codegen::ir::TrapCode;
#[derive(Eq, PartialEq)]
pub(crate) enum DivKind {
Signed,
Unsigned,
}
pub(crate) enum RemKind {
Signed,
Unsigned,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub(crate) enum MemMoveDirection {
HighToLow,
LowToHigh,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub(crate) enum TruncKind {
Checked,
Unchecked,
}
impl TruncKind {
pub(crate) fn is_checked(&self) -> bool {
*self == TruncKind::Checked
}
}
#[derive(Copy, Clone, Eq, PartialEq, Debug, PartialOrd, Ord, Default)]
pub struct SPOffset(u32);
impl SPOffset {
pub fn from_u32(offs: u32) -> Self {
Self(offs)
}
pub fn as_u32(&self) -> u32 {
self.0
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub struct StackSlot {
pub offset: SPOffset,
pub size: u32,
}
impl StackSlot {
pub fn new(offs: SPOffset, size: u32) -> Self {
Self { offset: offs, size }
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub(crate) enum IntCmpKind {
Eq,
Ne,
LtS,
LtU,
GtS,
GtU,
LeS,
LeU,
GeS,
GeU,
}
#[derive(Debug)]
pub(crate) enum FloatCmpKind {
Eq,
Ne,
Lt,
Gt,
Le,
Ge,
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub(crate) enum ShiftKind {
Shl,
ShrS,
ShrU,
Rotl,
Rotr,
}
pub(crate) enum ExtendKind {
I64ExtendI32S,
I64ExtendI32U,
I32Extend8S,
I32Extend16S,
I64Extend8S,
I64Extend16S,
I64Extend32S,
}
#[derive(Copy, Debug, Clone, Eq, PartialEq)]
pub(crate) enum OperandSize {
S8,
S16,
S32,
S64,
S128,
}
impl OperandSize {
pub fn num_bits(&self) -> u8 {
match self {
OperandSize::S8 => 8,
OperandSize::S16 => 16,
OperandSize::S32 => 32,
OperandSize::S64 => 64,
OperandSize::S128 => 128,
}
}
pub fn bytes(&self) -> u32 {
match self {
Self::S8 => 1,
Self::S16 => 2,
Self::S32 => 4,
Self::S64 => 8,
Self::S128 => 16,
}
}
pub fn log2(&self) -> u8 {
match self {
OperandSize::S8 => 3,
OperandSize::S16 => 4,
OperandSize::S32 => 5,
OperandSize::S64 => 6,
OperandSize::S128 => 7,
}
}
pub fn from_bytes(bytes: u8) -> Self {
use OperandSize::*;
match bytes {
4 => S32,
8 => S64,
16 => S128,
_ => panic!("Invalid bytes {} for OperandSize", bytes),
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum RegImm {
Reg(Reg),
Imm(Imm),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum Imm {
I32(u32),
I64(u64),
F32(u32),
F64(u64),
V128(i128),
}
impl Imm {
pub fn i64(val: i64) -> Self {
Self::I64(val as u64)
}
pub fn i32(val: i32) -> Self {
Self::I32(val as u32)
}
pub fn f32(bits: u32) -> Self {
Self::F32(bits)
}
pub fn f64(bits: u64) -> Self {
Self::F64(bits)
}
pub fn v128(bits: i128) -> Self {
Self::V128(bits)
}
pub fn to_i32(&self) -> Option<i32> {
match self {
Self::I32(v) => Some(*v as i32),
Self::I64(v) => i32::try_from(*v as i64).ok(),
_ => None,
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub(crate) enum VMContextLoc {
Reg(Reg),
Pinned,
}
pub(crate) const MAX_CONTEXT_ARGS: usize = 2;
#[derive(Clone, Debug)]
pub(crate) enum ContextArgs {
None,
VMContext([VMContextLoc; 1]),
CalleeAndCallerVMContext([VMContextLoc; MAX_CONTEXT_ARGS]),
}
impl ContextArgs {
pub fn none() -> Self {
Self::None
}
pub fn pinned_callee_and_caller_vmctx() -> Self {
Self::CalleeAndCallerVMContext([VMContextLoc::Pinned, VMContextLoc::Pinned])
}
pub fn pinned_vmctx() -> Self {
Self::VMContext([VMContextLoc::Pinned])
}
pub fn with_callee_and_pinned_caller(callee_vmctx: Reg) -> Self {
Self::CalleeAndCallerVMContext([VMContextLoc::Reg(callee_vmctx), VMContextLoc::Pinned])
}
pub fn len(&self) -> usize {
self.as_slice().len()
}
pub fn as_slice(&self) -> &[VMContextLoc] {
match self {
Self::None => &[],
Self::VMContext(a) => a.as_slice(),
Self::CalleeAndCallerVMContext(a) => a.as_slice(),
}
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) enum CalleeKind {
Indirect(Reg),
Direct(UserExternalNameRef),
LibCall(LibCall),
}
impl CalleeKind {
pub fn indirect(reg: Reg) -> Self {
Self::Indirect(reg)
}
pub fn direct(name: UserExternalNameRef) -> Self {
Self::Direct(name)
}
pub fn libcall(call: LibCall) -> Self {
Self::LibCall(call)
}
}
impl RegImm {
pub fn reg(r: Reg) -> Self {
RegImm::Reg(r)
}
pub fn i64(val: i64) -> Self {
RegImm::Imm(Imm::i64(val))
}
pub fn i32(val: i32) -> Self {
RegImm::Imm(Imm::i32(val))
}
#[allow(dead_code)]
pub fn f32(bits: u32) -> Self {
RegImm::Imm(Imm::f32(bits))
}
#[allow(dead_code)]
pub fn f64(bits: u64) -> Self {
RegImm::Imm(Imm::f64(bits))
}
pub fn v128(bits: i128) -> Self {
RegImm::Imm(Imm::v128(bits))
}
}
impl From<Reg> for RegImm {
fn from(r: Reg) -> Self {
Self::Reg(r)
}
}
#[derive(Debug)]
pub enum RoundingMode {
Nearest,
Up,
Down,
Zero,
}
pub const TRUSTED_FLAGS: MemFlags = MemFlags::trusted();
pub const UNTRUSTED_FLAGS: MemFlags = MemFlags::new().with_endianness(Endianness::Little);
pub(crate) trait MacroAssembler {
type Address: Copy + Debug;
type Ptr: PtrSize;
type ABI: abi::ABI;
fn prologue(&mut self, vmctx: Reg) {
self.frame_setup();
self.check_stack(vmctx);
}
fn frame_setup(&mut self);
fn frame_restore(&mut self);
fn check_stack(&mut self, vmctx: Reg);
fn epilogue(&mut self) {
self.frame_restore();
}
fn reserve_stack(&mut self, bytes: u32);
fn free_stack(&mut self, bytes: u32);
fn reset_stack_pointer(&mut self, offset: SPOffset);
fn local_address(&mut self, local: &LocalSlot) -> Self::Address;
fn address_from_sp(&self, offset: SPOffset) -> Self::Address;
fn address_at_sp(&self, offset: SPOffset) -> Self::Address;
fn address_at_vmctx(&self, offset: u32) -> Self::Address;
fn address_at_reg(&self, reg: Reg, offset: u32) -> Self::Address;
fn call(&mut self, stack_args_size: u32, f: impl FnMut(&mut Self) -> CalleeKind) -> u32;
fn sp_offset(&self) -> SPOffset;
fn store(&mut self, src: RegImm, dst: Self::Address, size: OperandSize);
fn store_ptr(&mut self, src: Reg, dst: Self::Address);
fn wasm_store(&mut self, src: Reg, dst: Self::Address, size: OperandSize);
fn load(&mut self, src: Self::Address, dst: Reg, size: OperandSize);
fn wasm_load(
&mut self,
src: Self::Address,
dst: Reg,
size: OperandSize,
kind: Option<ExtendKind>,
);
fn load_ptr(&mut self, src: Self::Address, dst: Reg);
fn load_addr(&mut self, _src: Self::Address, _dst: Reg, _size: OperandSize);
fn pop(&mut self, dst: Reg, size: OperandSize);
fn mov(&mut self, src: RegImm, dst: Reg, size: OperandSize);
fn cmov(&mut self, src: Reg, dst: Reg, cc: IntCmpKind, size: OperandSize);
fn memmove(&mut self, src: SPOffset, dst: SPOffset, bytes: u32, direction: MemMoveDirection) {
match direction {
MemMoveDirection::LowToHigh => debug_assert!(dst.as_u32() < src.as_u32()),
MemMoveDirection::HighToLow => debug_assert!(dst.as_u32() > src.as_u32()),
}
debug_assert!(bytes % 4 == 0);
let mut remaining = bytes;
let word_bytes = <Self::ABI as abi::ABI>::word_bytes();
let scratch = scratch!(Self);
let mut dst_offs = dst.as_u32() - bytes;
let mut src_offs = src.as_u32() - bytes;
let word_bytes = word_bytes as u32;
while remaining >= word_bytes {
remaining -= word_bytes;
dst_offs += word_bytes;
src_offs += word_bytes;
self.load_ptr(self.address_from_sp(SPOffset::from_u32(src_offs)), scratch);
self.store_ptr(
scratch.into(),
self.address_from_sp(SPOffset::from_u32(dst_offs)),
);
}
if remaining > 0 {
let half_word = word_bytes / 2;
let ptr_size = OperandSize::from_bytes(half_word as u8);
debug_assert!(remaining == half_word);
dst_offs += half_word;
src_offs += half_word;
self.load(
self.address_from_sp(SPOffset::from_u32(src_offs)),
scratch,
ptr_size,
);
self.store(
scratch.into(),
self.address_from_sp(SPOffset::from_u32(dst_offs)),
ptr_size,
);
}
}
fn add(&mut self, dst: Reg, lhs: Reg, rhs: RegImm, size: OperandSize);
fn checked_uadd(&mut self, dst: Reg, lhs: Reg, rhs: RegImm, size: OperandSize, trap: TrapCode);
fn sub(&mut self, dst: Reg, lhs: Reg, rhs: RegImm, size: OperandSize);
fn mul(&mut self, dst: Reg, lhs: Reg, rhs: RegImm, size: OperandSize);
fn float_add(&mut self, dst: Reg, lhs: Reg, rhs: Reg, size: OperandSize);
fn float_sub(&mut self, dst: Reg, lhs: Reg, rhs: Reg, size: OperandSize);
fn float_mul(&mut self, dst: Reg, lhs: Reg, rhs: Reg, size: OperandSize);
fn float_div(&mut self, dst: Reg, lhs: Reg, rhs: Reg, size: OperandSize);
fn float_min(&mut self, dst: Reg, lhs: Reg, rhs: Reg, size: OperandSize);
fn float_max(&mut self, dst: Reg, lhs: Reg, rhs: Reg, size: OperandSize);
fn float_copysign(&mut self, dst: Reg, lhs: Reg, rhs: Reg, size: OperandSize);
fn float_abs(&mut self, dst: Reg, size: OperandSize);
fn float_neg(&mut self, dst: Reg, size: OperandSize);
fn float_round<F: FnMut(&mut FuncEnv<Self::Ptr>, &mut CodeGenContext, &mut Self)>(
&mut self,
mode: RoundingMode,
env: &mut FuncEnv<Self::Ptr>,
context: &mut CodeGenContext,
size: OperandSize,
fallback: F,
);
fn float_sqrt(&mut self, dst: Reg, src: Reg, size: OperandSize);
fn and(&mut self, dst: Reg, lhs: Reg, rhs: RegImm, size: OperandSize);
fn or(&mut self, dst: Reg, lhs: Reg, rhs: RegImm, size: OperandSize);
fn xor(&mut self, dst: Reg, lhs: Reg, rhs: RegImm, size: OperandSize);
fn shift_ir(&mut self, dst: Reg, imm: u64, lhs: Reg, kind: ShiftKind, size: OperandSize);
fn shift(&mut self, context: &mut CodeGenContext, kind: ShiftKind, size: OperandSize);
fn div(&mut self, context: &mut CodeGenContext, kind: DivKind, size: OperandSize);
fn rem(&mut self, context: &mut CodeGenContext, kind: RemKind, size: OperandSize);
fn cmp(&mut self, src1: Reg, src2: RegImm, size: OperandSize);
fn cmp_with_set(&mut self, src: RegImm, dst: Reg, kind: IntCmpKind, size: OperandSize);
fn float_cmp_with_set(
&mut self,
src1: Reg,
src2: Reg,
dst: Reg,
kind: FloatCmpKind,
size: OperandSize,
);
fn clz(&mut self, src: Reg, dst: Reg, size: OperandSize);
fn ctz(&mut self, src: Reg, dst: Reg, size: OperandSize);
fn push(&mut self, src: Reg, size: OperandSize) -> StackSlot;
fn finalize(self, base: Option<SourceLoc>) -> MachBufferFinalized<Final>;
fn zero(&mut self, reg: Reg);
fn popcnt(&mut self, context: &mut CodeGenContext, size: OperandSize);
fn wrap(&mut self, src: Reg, dst: Reg);
fn extend(&mut self, src: Reg, dst: Reg, kind: ExtendKind);
fn signed_truncate(
&mut self,
src: Reg,
dst: Reg,
src_size: OperandSize,
dst_size: OperandSize,
kind: TruncKind,
);
fn unsigned_truncate(
&mut self,
src: Reg,
dst: Reg,
tmp_fpr: Reg,
src_size: OperandSize,
dst_size: OperandSize,
kind: TruncKind,
);
fn signed_convert(&mut self, src: Reg, dst: Reg, src_size: OperandSize, dst_size: OperandSize);
fn unsigned_convert(
&mut self,
src: Reg,
dst: Reg,
tmp_gpr: Reg,
src_size: OperandSize,
dst_size: OperandSize,
);
fn reinterpret_float_as_int(&mut self, src: Reg, dst: Reg, size: OperandSize);
fn reinterpret_int_as_float(&mut self, src: Reg, dst: Reg, size: OperandSize);
fn demote(&mut self, src: Reg, dst: Reg);
fn promote(&mut self, src: Reg, dst: Reg);
fn zero_mem_range(&mut self, mem: &Range<u32>) {
let word_size = <Self::ABI as abi::ABI>::word_bytes() as u32;
if mem.is_empty() {
return;
}
let start = if mem.start % word_size == 0 {
mem.start
} else {
assert!(mem.start % 4 == 0);
let start = align_to(mem.start, word_size);
let addr: Self::Address = self.local_address(&LocalSlot::i32(start));
self.store(RegImm::i32(0), addr, OperandSize::S32);
assert!(start % word_size == 0);
start
};
let end = align_to(mem.end, word_size);
let slots = (end - start) / word_size;
if slots == 1 {
let slot = LocalSlot::i64(start + word_size);
let addr: Self::Address = self.local_address(&slot);
self.store(RegImm::i64(0), addr, OperandSize::S64);
} else {
let zero = scratch!(Self);
self.zero(zero);
let zero = RegImm::reg(zero);
for step in (start..end).into_iter().step_by(word_size as usize) {
let slot = LocalSlot::i64(step + word_size);
let addr: Self::Address = self.local_address(&slot);
self.store(zero, addr, OperandSize::S64);
}
}
}
fn get_label(&mut self) -> MachLabel;
fn bind(&mut self, label: MachLabel);
fn branch(
&mut self,
kind: IntCmpKind,
lhs: Reg,
rhs: RegImm,
taken: MachLabel,
size: OperandSize,
);
fn jmp(&mut self, target: MachLabel);
fn jmp_table(&mut self, targets: &[MachLabel], index: Reg, tmp: Reg);
fn unreachable(&mut self);
fn trap(&mut self, code: TrapCode);
fn trapif(&mut self, cc: IntCmpKind, code: TrapCode);
fn trapz(&mut self, src: Reg, code: TrapCode);
fn ensure_sp_for_jump(&mut self, target: SPOffset) {
let bytes = self
.sp_offset()
.as_u32()
.checked_sub(target.as_u32())
.unwrap_or(0);
if bytes > 0 {
self.free_stack(bytes);
}
}
fn start_source_loc(&mut self, loc: RelSourceLoc) -> (CodeOffset, RelSourceLoc);
fn end_source_loc(&mut self);
fn current_code_offset(&self) -> CodeOffset;
}