use core::hint::cold_path;
use crate::{Result, conversion::convert_heaptype, macros::visit::*};
use alloc::string::ToString;
use alloc::vec::Vec;
use tinywasm_types::{Instruction, MemoryArg, WasmFunctionData};
use wasmparser::{
FrameKind, FuncValidator, FuncValidatorAllocations, FunctionBody, OperatorsReader, OperatorsReaderAllocations,
VisitOperator, VisitSimdOperator, WasmModuleResources,
};
#[derive(Debug, Clone, Copy)]
enum BlockKind {
Block,
Loop,
If,
}
#[derive(Debug, Clone, Copy)]
enum OperandSize {
S32,
S64,
S128,
}
#[derive(Debug, Clone, Copy, Default)]
struct StackBase {
s32: u16,
s64: u16,
s128: u16,
}
struct LoweringCtx {
kind: BlockKind,
has_else: bool,
start_ip: usize,
branch_jumps: Vec<usize>,
}
#[derive(Default)]
struct FunctionDataBuilder {
v128_constants: Vec<[u8; 16]>,
branch_table_targets: Vec<u32>,
}
impl FunctionDataBuilder {
fn finish(self) -> WasmFunctionData {
WasmFunctionData {
v128_constants: self.v128_constants.into_boxed_slice(),
branch_table_targets: self.branch_table_targets.into_boxed_slice(),
}
}
}
struct ValidateThenVisit<'a, R: WasmModuleResources>(&'a mut FunctionBuilder<R>);
fn operand_size(ty: wasmparser::ValType) -> OperandSize {
match ty {
wasmparser::ValType::I32 | wasmparser::ValType::F32 | wasmparser::ValType::Ref(_) => OperandSize::S32,
wasmparser::ValType::I64 | wasmparser::ValType::F64 => OperandSize::S64,
wasmparser::ValType::V128 => OperandSize::S128,
}
}
impl<'a, R: WasmModuleResources> VisitOperator<'a> for ValidateThenVisit<'_, R> {
type Output = ();
wasmparser::for_each_visit_operator!(validate_then_visit);
fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
Some(self)
}
}
impl<R: WasmModuleResources> VisitSimdOperator<'_> for ValidateThenVisit<'_, R> {
wasmparser::for_each_visit_simd_operator!(validate_then_visit_simd);
}
pub(crate) fn process_operators_and_validate(
validator: FuncValidator<impl WasmModuleResources>,
body: FunctionBody<'_>,
local_addr_map: Vec<u16>,
allocs: OperatorsReaderAllocations,
) -> Result<(Vec<Instruction>, WasmFunctionData, FuncValidatorAllocations, OperatorsReaderAllocations)> {
let reader = body.get_binary_reader_for_operators()?;
let mut reader = OperatorsReader::new_with_allocs(reader, allocs);
let mut builder = FunctionBuilder::new(validator, local_addr_map);
while !reader.eof() {
builder.position = reader.original_position();
if let Err(e) = reader.visit_operator(&mut ValidateThenVisit(&mut builder)) {
cold_path();
return Err(crate::ParseError::ParseError { message: e.to_string(), offset: builder.position });
}
}
reader.finish()?;
if let Some(error) = builder.error {
return Err(error);
}
Ok((builder.instructions, builder.data.finish(), builder.validator.into_allocations(), reader.into_allocations()))
}
pub(crate) struct FunctionBuilder<R> {
validator: FuncValidator<R>,
position: usize,
instructions: Vec<Instruction>,
data: FunctionDataBuilder,
ctx_stack: Vec<LoweringCtx>,
local_addr_map: Vec<u16>,
error: Option<crate::ParseError>,
}
impl<'a, R: WasmModuleResources> wasmparser::VisitOperator<'a> for FunctionBuilder<R> {
type Output = ();
fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
Some(self)
}
wasmparser::for_each_visit_operator!(impl_visit_operator);
define_mem_operands! {
visit_i32_load(I32Load), visit_i64_load(I64Load), visit_f32_load(F32Load), visit_f64_load(F64Load),
visit_i32_load8_s(I32Load8S), visit_i32_load8_u(I32Load8U), visit_i32_load16_s(I32Load16S),
visit_i32_load16_u(I32Load16U), visit_i64_load8_s(I64Load8S), visit_i64_load8_u(I64Load8U),
visit_i64_load16_s(I64Load16S), visit_i64_load16_u(I64Load16U), visit_i64_load32_s(I64Load32S),
visit_i64_load32_u(I64Load32U), visit_f32_store(F32Store), visit_f64_store(F64Store), visit_i32_store8(I32Store8),
visit_i32_store16(I32Store16), visit_i64_store8(I64Store8), visit_i64_store16(I64Store16),
visit_i64_store32(I64Store32), visit_i32_store(I32Store), visit_i64_store(I64Store)
}
define_operands! {
// basic instructions
visit_global_get(GlobalGet, u32), visit_i32_const(Const32, i32), visit_i64_const(Const64, i64), visit_return(Return),
visit_call(Call, u32), visit_call_indirect(CallIndirect, u32, u32), visit_return_call_indirect(ReturnCallIndirect, u32, u32),
visit_return_call(ReturnCall, u32), visit_memory_size(MemorySize, u32), visit_memory_grow(MemoryGrow, u32), visit_unreachable(Unreachable),
visit_nop(Nop), visit_i32_eqz(I32Eqz), visit_i32_eq(I32Eq), visit_i32_ne(I32Ne), visit_i32_lt_s(I32LtS), visit_i32_lt_u(I32LtU),
visit_i32_gt_s(I32GtS), visit_i32_gt_u(I32GtU), visit_i32_le_s(I32LeS), visit_i32_le_u(I32LeU), visit_i32_ge_s(I32GeS),
visit_i32_ge_u(I32GeU), visit_i64_eqz(I64Eqz), visit_i64_eq(I64Eq), visit_i64_ne(I64Ne), visit_i64_lt_s(I64LtS), visit_i64_lt_u(I64LtU),
visit_i64_gt_s(I64GtS), visit_i64_gt_u(I64GtU), visit_i64_le_s(I64LeS), visit_i64_le_u(I64LeU), visit_i64_ge_s(I64GeS), visit_i64_ge_u(I64GeU),
visit_f32_eq(F32Eq), visit_f32_ne(F32Ne), visit_f32_lt(F32Lt), visit_f32_gt(F32Gt), visit_f32_le(F32Le), visit_f32_ge(F32Ge), visit_f64_eq(F64Eq),
visit_f64_ne(F64Ne), visit_f64_lt(F64Lt), visit_f64_gt(F64Gt), visit_f64_le(F64Le), visit_f64_ge(F64Ge), visit_i32_clz(I32Clz), visit_i32_ctz(I32Ctz),
visit_i32_popcnt(I32Popcnt), visit_i32_sub(I32Sub), visit_i32_mul(I32Mul), visit_i32_div_s(I32DivS), visit_i32_div_u(I32DivU), visit_i32_rem_s(I32RemS),
visit_i32_rem_u(I32RemU), visit_i32_and(I32And), visit_i32_or(I32Or), visit_i32_xor(I32Xor), visit_i32_shl(I32Shl), visit_i32_shr_s(I32ShrS),
visit_i32_shr_u(I32ShrU), visit_i32_rotl(I32Rotl), visit_i32_rotr(I32Rotr), visit_i64_clz(I64Clz), visit_i64_ctz(I64Ctz), visit_i64_popcnt(I64Popcnt),
visit_i64_sub(I64Sub), visit_i64_mul(I64Mul), visit_i64_div_s(I64DivS), visit_i64_div_u(I64DivU), visit_i64_rem_s(I64RemS), visit_i64_rem_u(I64RemU),
visit_i64_and(I64And), visit_i64_or(I64Or), visit_i64_xor(I64Xor), visit_i64_shl(I64Shl), visit_i64_shr_s(I64ShrS), visit_i64_shr_u(I64ShrU),
visit_i64_rotr(I64Rotr), visit_f32_abs(F32Abs), visit_f32_neg(F32Neg), visit_f32_ceil(F32Ceil), visit_f32_floor(F32Floor), visit_f32_trunc(F32Trunc),
visit_f32_nearest(F32Nearest), visit_f32_sqrt(F32Sqrt), visit_f32_add(F32Add), visit_f32_sub(F32Sub), visit_f32_mul(F32Mul), visit_f32_div(F32Div),
visit_f32_min(F32Min), visit_f32_max(F32Max), visit_f32_copysign(F32Copysign), visit_f64_abs(F64Abs), visit_f64_neg(F64Neg), visit_f64_ceil(F64Ceil),
visit_f64_floor(F64Floor), visit_f64_trunc(F64Trunc), visit_f64_nearest(F64Nearest), visit_f64_sqrt(F64Sqrt), visit_f64_add(F64Add), visit_f64_sub(F64Sub),
visit_f64_mul(F64Mul), visit_f64_div(F64Div), visit_f64_min(F64Min), visit_f64_max(F64Max), visit_f64_copysign(F64Copysign), visit_i32_wrap_i64(I32WrapI64),
visit_i32_trunc_f32_s(I32TruncF32S), visit_i32_trunc_f32_u(I32TruncF32U), visit_i32_trunc_f64_s(I32TruncF64S), visit_i32_trunc_f64_u(I32TruncF64U),
visit_i64_extend_i32_s(I64ExtendI32S), visit_i64_extend_i32_u(I64ExtendI32U), visit_i64_trunc_f32_s(I64TruncF32S), visit_i64_trunc_f32_u(I64TruncF32U),
visit_i64_trunc_f64_s(I64TruncF64S), visit_i64_trunc_f64_u(I64TruncF64U), visit_f32_convert_i32_s(F32ConvertI32S), visit_f32_convert_i32_u(F32ConvertI32U),
visit_f32_convert_i64_s(F32ConvertI64S), visit_f32_convert_i64_u(F32ConvertI64U), visit_f32_demote_f64(F32DemoteF64), visit_f64_convert_i32_s(F64ConvertI32S),
visit_f64_convert_i32_u(F64ConvertI32U), visit_f64_convert_i64_s(F64ConvertI64S), visit_f64_convert_i64_u(F64ConvertI64U), visit_f64_promote_f32(F64PromoteF32),
visit_i32_add(I32Add), visit_i64_add(I64Add), visit_i64_rotl(I64Rotl),
// sign_extension
visit_i32_extend8_s(I32Extend8S), visit_i32_extend16_s(I32Extend16S), visit_i64_extend8_s(I64Extend8S), visit_i64_extend16_s(I64Extend16S),
visit_i64_extend32_s(I64Extend32S),
// Non-trapping Float-to-int Conversions
visit_i32_trunc_sat_f32_s(I32TruncSatF32S), visit_i32_trunc_sat_f32_u(I32TruncSatF32U), visit_i32_trunc_sat_f64_s(I32TruncSatF64S),
visit_i32_trunc_sat_f64_u(I32TruncSatF64U), visit_i64_trunc_sat_f32_s(I64TruncSatF32S), visit_i64_trunc_sat_f32_u(I64TruncSatF32U),
visit_i64_trunc_sat_f64_s(I64TruncSatF64S), visit_i64_trunc_sat_f64_u(I64TruncSatF64U),
// Reference Types
visit_ref_func(RefFunc, u32), visit_table_fill(TableFill, u32), visit_table_get(TableGet, u32), visit_table_set(TableSet, u32),
visit_table_grow(TableGrow, u32), visit_table_size(TableSize, u32), visit_ref_is_null(RefIsNull),
// Bulk Memory
visit_memory_init(MemoryInit, u32, u32), visit_memory_fill(MemoryFill, u32), visit_table_init(TableInit, u32, u32),
visit_data_drop(DataDrop, u32), visit_elem_drop(ElemDrop, u32),
// Wide Arithmetic
visit_i64_add128(I64Add128), visit_i64_sub128(I64Sub128), visit_i64_mul_wide_s(I64MulWideS), visit_i64_mul_wide_u(I64MulWideU)
}
fn visit_global_set(&mut self, global_index: u32) -> Self::Output {
if let Some(Some(t)) = self.validator.get_operand_type(0) {
self.instructions.push(match operand_size(t) {
OperandSize::S32 => Instruction::GlobalSet32(global_index),
OperandSize::S64 => Instruction::GlobalSet64(global_index),
OperandSize::S128 => Instruction::GlobalSet128(global_index),
})
}
}
fn visit_drop(&mut self) -> Self::Output {
if let Some(Some(t)) = self.validator.get_operand_type(0) {
self.instructions.push(match operand_size(t) {
OperandSize::S32 => Instruction::Drop32,
OperandSize::S64 => Instruction::Drop64,
OperandSize::S128 => Instruction::Drop128,
})
}
}
fn visit_select(&mut self) -> Self::Output {
match self.validator.get_operand_type(1) {
Some(Some(t)) => self.visit_typed_select(t),
_ => self.visit_unreachable(),
};
}
fn visit_local_get(&mut self, idx: u32) -> Self::Output {
let resolved_idx = self.local_addr_map[idx as usize];
if let Some(t) = self.validator.get_local_type(idx) {
self.instructions.push(match operand_size(t) {
OperandSize::S32 => Instruction::LocalGet32(resolved_idx),
OperandSize::S64 => Instruction::LocalGet64(resolved_idx),
OperandSize::S128 => Instruction::LocalGet128(resolved_idx),
});
}
}
fn visit_local_set(&mut self, idx: u32) -> Self::Output {
let resolved_idx = self.local_addr_map[idx as usize];
if let Some(Some(t)) = self.validator.get_operand_type(0) {
self.instructions.push(match operand_size(t) {
OperandSize::S32 => Instruction::LocalSet32(resolved_idx),
OperandSize::S64 => Instruction::LocalSet64(resolved_idx),
OperandSize::S128 => Instruction::LocalSet128(resolved_idx),
})
}
}
fn visit_local_tee(&mut self, idx: u32) -> Self::Output {
let resolved_idx = self.local_addr_map[idx as usize];
if let Some(Some(t)) = self.validator.get_operand_type(0) {
let size = operand_size(t);
let last = self.instructions.last();
let src = match (size, last) {
(OperandSize::S32, Some(Instruction::LocalGet32(src))) => Some(*src),
(OperandSize::S64, Some(Instruction::LocalGet64(src))) => Some(*src),
(OperandSize::S128, Some(Instruction::LocalGet128(src))) => Some(*src),
_ => None,
};
if let Some(src) = src {
self.instructions.pop();
match size {
OperandSize::S32 => {
self.instructions.push(Instruction::LocalCopy32(src, resolved_idx));
self.instructions.push(Instruction::LocalGet32(resolved_idx));
}
OperandSize::S64 => {
self.instructions.push(Instruction::LocalCopy64(src, resolved_idx));
self.instructions.push(Instruction::LocalGet64(resolved_idx));
}
OperandSize::S128 => {
self.instructions.push(Instruction::LocalCopy128(src, resolved_idx));
self.instructions.push(Instruction::LocalGet128(resolved_idx));
}
}
} else {
self.instructions.push(match size {
OperandSize::S32 => Instruction::LocalTee32(resolved_idx),
OperandSize::S64 => Instruction::LocalTee64(resolved_idx),
OperandSize::S128 => Instruction::LocalTee128(resolved_idx),
})
}
}
}
fn visit_block(&mut self, _blockty: wasmparser::BlockType) -> Self::Output {
let start_ip = self.instructions.len();
self.ctx_stack.push(LoweringCtx {
kind: BlockKind::Block,
has_else: false,
start_ip,
branch_jumps: Vec::new(),
});
}
fn visit_loop(&mut self, _ty: wasmparser::BlockType) -> Self::Output {
if !matches!(self.instructions.last(), Some(Instruction::Nop | Instruction::MergeBarrier)) {
self.instructions.push(Instruction::MergeBarrier); // prevent superinstructions from merging across block boundaries
}
let start_ip = self.instructions.len();
self.ctx_stack.push(LoweringCtx { kind: BlockKind::Loop, has_else: false, start_ip, branch_jumps: Vec::new() });
}
fn visit_if(&mut self, _ty: wasmparser::BlockType) -> Self::Output {
self.instructions.push(Instruction::JumpIfZero32(0));
self.ctx_stack.push(LoweringCtx {
kind: BlockKind::If,
has_else: false,
start_ip: self.instructions.len(),
branch_jumps: alloc::vec![self.instructions.len() - 1],
});
}
fn visit_else(&mut self) -> Self::Output {
let last_if = self.ctx_stack.last().filter(|ctx| matches!(ctx.kind, BlockKind::If));
if let Some(cond_jump_ip) = last_if.map(|ctx| ctx.branch_jumps[0]) {
let jump_ip = self.instructions.len();
self.instructions.push(Instruction::Jump(0));
if let Some(ctx) = self.ctx_stack.last_mut() {
ctx.has_else = true;
ctx.branch_jumps.push(jump_ip);
self.patch_jump_if_zero(cond_jump_ip, self.instructions.len());
if !matches!(self.instructions.last(), Some(Instruction::Nop | Instruction::MergeBarrier)) {
self.instructions.push(Instruction::MergeBarrier); // prevent superinstructions from merging across block boundaries
}
};
};
}
fn visit_end(&mut self) -> Self::Output {
if let Some(ctx) = self.ctx_stack.pop() {
self.patch_end_jumps(ctx, self.instructions.len());
if !matches!(self.instructions.last(), Some(Instruction::Nop | Instruction::MergeBarrier)) {
self.instructions.push(Instruction::MergeBarrier); // prevent superinstructions from merging across block boundaries
}
} else {
self.instructions.push(Instruction::Return);
}
}
fn visit_br(&mut self, depth: u32) -> Self::Output {
self.emit_dropkeep_to_label(depth);
self.emit_branch_jump_or_return(depth);
}
fn visit_br_if(&mut self, depth: u32) -> Self::Output {
let cond_jump_ip = self.instructions.len();
self.instructions.push(Instruction::JumpIfZero32(0));
let branch_side_start = self.instructions.len();
self.emit_dropkeep_to_label(depth);
if self.instructions.len() == branch_side_start
&& let Some(ctx_idx) = self.get_ctx_idx(depth)
{
self.instructions[cond_jump_ip] = Instruction::JumpIfNonZero32(0);
self.ctx_stack[ctx_idx].branch_jumps.push(cond_jump_ip);
return;
}
self.emit_branch_jump_or_return(depth);
self.patch_jump_if_zero(cond_jump_ip, self.instructions.len());
}
fn visit_br_table(&mut self, targets: wasmparser::BrTable<'_>) -> Self::Output {
let ts = targets
.targets()
.collect::<Result<Vec<_>, wasmparser::BinaryReaderError>>()
.expect("visit_br_table: BrTable targets are invalid");
let default_depth = targets.default();
let len = ts.len() as u32;
let target_depths: Vec<u32> = ts;
let header_ip = self.instructions.len();
let branch_table_start = self.data.branch_table_targets.len() as u32;
self.instructions.push(Instruction::BranchTable(0, branch_table_start, len));
struct PadInfo {
depth: u32,
pad_start: usize,
jump_or_ret_ip: usize,
is_return: bool,
}
let mut seen = Vec::<(u32, usize)>::new();
let mut pads: Vec<PadInfo> = Vec::new();
for &depth in target_depths.iter().chain(core::iter::once(&default_depth)) {
if seen.iter().any(|&(seen_depth, _)| seen_depth == depth) {
continue;
}
seen.push((depth, pads.len()));
let (pad_start, jump_or_ret_ip, is_return) = self.emit_br_table_pad(depth);
pads.push(PadInfo { depth, pad_start, jump_or_ret_ip, is_return });
}
for &depth in &target_depths {
let pad_idx = seen
.iter()
.find_map(|&(seen_depth, idx)| (seen_depth == depth).then_some(idx))
.expect("visit_br_table: missing branch table target");
self.data.branch_table_targets.push(pads[pad_idx].pad_start as u32);
}
let default_pad_idx = seen
.iter()
.find_map(|&(seen_depth, idx)| (seen_depth == default_depth).then_some(idx))
.expect("visit_br_table: missing default branch table target");
if let Instruction::BranchTable(default_ip, _, _) = &mut self.instructions[header_ip] {
*default_ip = pads[default_pad_idx].pad_start as u32;
}
for pad in &pads {
if pad.is_return {
continue;
}
self.patch_branch_jump_or_return(pad.depth, pad.jump_or_ret_ip);
}
}
fn visit_f32_const(&mut self, val: wasmparser::Ieee32) -> Self::Output {
self.instructions.push(Instruction::Const32(val.bits() as i32));
}
fn visit_f64_const(&mut self, val: wasmparser::Ieee64) -> Self::Output {
self.instructions.push(Instruction::Const64(val.bits() as i64));
}
fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output {
self.instructions.push(Instruction::TableCopy { dst_table, src_table });
}
fn visit_memory_copy(&mut self, dst_mem: u32, src_mem: u32) -> Self::Output {
self.instructions.push(Instruction::MemoryCopy { dst_mem, src_mem });
}
// Reference Types
fn visit_ref_null(&mut self, ty: wasmparser::HeapType) -> Self::Output {
match convert_heaptype(ty) {
Ok(ty) => self.instructions.push(Instruction::RefNull(ty)),
Err(err) => {
self.error.get_or_insert(err);
}
};
}
fn visit_typed_select_multi(&mut self, tys: Vec<wasmparser::ValType>) -> Self::Output {
let (c32, c64, c128) = Self::label_keep_counts(&tys);
self.instructions.push(Instruction::SelectMulti(tinywasm_types::ValueCounts { c32, c64, c128 }));
}
fn visit_typed_select(&mut self, ty: wasmparser::ValType) -> Self::Output {
self.instructions.push(match operand_size(ty) {
OperandSize::S32 => Instruction::Select32,
OperandSize::S64 => Instruction::Select64,
OperandSize::S128 => Instruction::Select128,
});
}
fn visit_f32_reinterpret_i32(&mut self) -> Self::Output {}
fn visit_f64_reinterpret_i64(&mut self) -> Self::Output {}
fn visit_i32_reinterpret_f32(&mut self) -> Self::Output {}
fn visit_i64_reinterpret_f64(&mut self) -> Self::Output {}
}
macro_rules! impl_visit_simd_operator {
($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => {
$(impl_visit_operator!(@@$proposal $op $({ $($arg: $argty),* })? => $visit ($($ann:tt)*));)*
};
(@@simd $($rest:tt)* ) => {};
(@@relaxed_simd $($rest:tt)* ) => {};
(@@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*)) => {
fn $visit(&mut self $($(,$arg: $argty)*)?) {
self.unsupported(stringify!($visit))
}
};
}
impl<R: WasmModuleResources> wasmparser::VisitSimdOperator<'_> for FunctionBuilder<R> {
wasmparser::for_each_visit_simd_operator!(impl_visit_simd_operator);
// simd
define_mem_operands_simd! {
visit_v128_load(V128Load), visit_v128_load8x8_s(V128Load8x8S), visit_v128_load8x8_u(V128Load8x8U), visit_v128_load16x4_s(V128Load16x4S), visit_v128_load16x4_u(V128Load16x4U), visit_v128_load32x2_s(V128Load32x2S), visit_v128_load32x2_u(V128Load32x2U), visit_v128_load8_splat(V128Load8Splat), visit_v128_load16_splat(V128Load16Splat), visit_v128_load32_splat(V128Load32Splat), visit_v128_load64_splat(V128Load64Splat), visit_v128_load32_zero(V128Load32Zero), visit_v128_load64_zero(V128Load64Zero), visit_v128_store(V128Store)
}
define_mem_operands_simd_lane! {
visit_v128_load8_lane(V128Load8Lane), visit_v128_load16_lane(V128Load16Lane), visit_v128_load32_lane(V128Load32Lane), visit_v128_load64_lane(V128Load64Lane),
visit_v128_store8_lane(V128Store8Lane), visit_v128_store16_lane(V128Store16Lane), visit_v128_store32_lane(V128Store32Lane), visit_v128_store64_lane(V128Store64Lane)
}
define_operands! {
visit_v128_not(V128Not), visit_v128_and(V128And), visit_v128_andnot(V128AndNot), visit_v128_or(V128Or), visit_v128_xor(V128Xor), visit_v128_bitselect(V128Bitselect), visit_v128_any_true(V128AnyTrue),
visit_i8x16_splat(I8x16Splat), visit_i8x16_swizzle(I8x16Swizzle), visit_i8x16_eq(I8x16Eq), visit_i8x16_ne(I8x16Ne), visit_i8x16_lt_s(I8x16LtS), visit_i8x16_lt_u(I8x16LtU), visit_i8x16_gt_s(I8x16GtS), visit_i8x16_gt_u(I8x16GtU), visit_i8x16_le_s(I8x16LeS), visit_i8x16_le_u(I8x16LeU), visit_i8x16_ge_s(I8x16GeS), visit_i8x16_ge_u(I8x16GeU),
visit_i16x8_splat(I16x8Splat), visit_i16x8_eq(I16x8Eq), visit_i16x8_ne(I16x8Ne), visit_i16x8_lt_s(I16x8LtS), visit_i16x8_lt_u(I16x8LtU), visit_i16x8_gt_s(I16x8GtS), visit_i16x8_gt_u(I16x8GtU), visit_i16x8_le_s(I16x8LeS), visit_i16x8_le_u(I16x8LeU), visit_i16x8_ge_s(I16x8GeS), visit_i16x8_ge_u(I16x8GeU),
visit_i32x4_splat(I32x4Splat), visit_i32x4_eq(I32x4Eq), visit_i32x4_ne(I32x4Ne), visit_i32x4_lt_s(I32x4LtS), visit_i32x4_lt_u(I32x4LtU), visit_i32x4_gt_s(I32x4GtS), visit_i32x4_gt_u(I32x4GtU), visit_i32x4_le_s(I32x4LeS), visit_i32x4_le_u(I32x4LeU), visit_i32x4_ge_s(I32x4GeS), visit_i32x4_ge_u(I32x4GeU),
visit_i64x2_splat(I64x2Splat), visit_i64x2_eq(I64x2Eq), visit_i64x2_ne(I64x2Ne), visit_i64x2_lt_s(I64x2LtS), visit_i64x2_gt_s(I64x2GtS), visit_i64x2_le_s(I64x2LeS), visit_i64x2_ge_s(I64x2GeS),
visit_f32x4_splat(F32x4Splat), visit_f32x4_eq(F32x4Eq), visit_f32x4_ne(F32x4Ne), visit_f32x4_lt(F32x4Lt), visit_f32x4_gt(F32x4Gt), visit_f32x4_le(F32x4Le), visit_f32x4_ge(F32x4Ge),
visit_f64x2_splat(F64x2Splat), visit_f64x2_eq(F64x2Eq), visit_f64x2_ne(F64x2Ne), visit_f64x2_lt(F64x2Lt), visit_f64x2_gt(F64x2Gt), visit_f64x2_le(F64x2Le), visit_f64x2_ge(F64x2Ge),
visit_i8x16_abs(I8x16Abs), visit_i8x16_neg(I8x16Neg), visit_i8x16_all_true(I8x16AllTrue), visit_i8x16_bitmask(I8x16Bitmask), visit_i8x16_shl(I8x16Shl), visit_i8x16_shr_s(I8x16ShrS), visit_i8x16_shr_u(I8x16ShrU), visit_i8x16_add(I8x16Add), visit_i8x16_sub(I8x16Sub), visit_i8x16_min_s(I8x16MinS), visit_i8x16_min_u(I8x16MinU), visit_i8x16_max_s(I8x16MaxS), visit_i8x16_max_u(I8x16MaxU),
visit_i16x8_abs(I16x8Abs), visit_i16x8_neg(I16x8Neg), visit_i16x8_all_true(I16x8AllTrue), visit_i16x8_bitmask(I16x8Bitmask), visit_i16x8_shl(I16x8Shl), visit_i16x8_shr_s(I16x8ShrS), visit_i16x8_shr_u(I16x8ShrU), visit_i16x8_add(I16x8Add), visit_i16x8_sub(I16x8Sub), visit_i16x8_min_s(I16x8MinS), visit_i16x8_min_u(I16x8MinU), visit_i16x8_max_s(I16x8MaxS), visit_i16x8_max_u(I16x8MaxU),
visit_i32x4_abs(I32x4Abs), visit_i32x4_neg(I32x4Neg), visit_i32x4_all_true(I32x4AllTrue), visit_i32x4_bitmask(I32x4Bitmask), visit_i32x4_shl(I32x4Shl), visit_i32x4_shr_s(I32x4ShrS), visit_i32x4_shr_u(I32x4ShrU), visit_i32x4_add(I32x4Add), visit_i32x4_sub(I32x4Sub), visit_i32x4_min_s(I32x4MinS), visit_i32x4_min_u(I32x4MinU), visit_i32x4_max_s(I32x4MaxS), visit_i32x4_max_u(I32x4MaxU),
visit_i64x2_abs(I64x2Abs), visit_i64x2_neg(I64x2Neg), visit_i64x2_all_true(I64x2AllTrue), visit_i64x2_bitmask(I64x2Bitmask), visit_i64x2_shl(I64x2Shl), visit_i64x2_shr_s(I64x2ShrS), visit_i64x2_shr_u(I64x2ShrU), visit_i64x2_add(I64x2Add), visit_i64x2_sub(I64x2Sub), visit_i64x2_mul(I64x2Mul),
visit_i8x16_narrow_i16x8_s(I8x16NarrowI16x8S), visit_i8x16_narrow_i16x8_u(I8x16NarrowI16x8U), visit_i8x16_add_sat_s(I8x16AddSatS), visit_i8x16_add_sat_u(I8x16AddSatU), visit_i8x16_sub_sat_s(I8x16SubSatS), visit_i8x16_sub_sat_u(I8x16SubSatU), visit_i8x16_avgr_u(I8x16AvgrU),
visit_i16x8_narrow_i32x4_s(I16x8NarrowI32x4S), visit_i16x8_narrow_i32x4_u(I16x8NarrowI32x4U), visit_i16x8_add_sat_s(I16x8AddSatS), visit_i16x8_add_sat_u(I16x8AddSatU), visit_i16x8_sub_sat_s(I16x8SubSatS), visit_i16x8_sub_sat_u(I16x8SubSatU), visit_i16x8_avgr_u(I16x8AvgrU),
visit_i16x8_extadd_pairwise_i8x16_s(I16x8ExtAddPairwiseI8x16S), visit_i16x8_extadd_pairwise_i8x16_u(I16x8ExtAddPairwiseI8x16U), visit_i16x8_mul(I16x8Mul),
visit_i32x4_extadd_pairwise_i16x8_s(I32x4ExtAddPairwiseI16x8S), visit_i32x4_extadd_pairwise_i16x8_u(I32x4ExtAddPairwiseI16x8U), visit_i32x4_mul(I32x4Mul),
visit_i16x8_extmul_low_i8x16_s(I16x8ExtMulLowI8x16S), visit_i16x8_extmul_low_i8x16_u(I16x8ExtMulLowI8x16U), visit_i16x8_extmul_high_i8x16_s(I16x8ExtMulHighI8x16S), visit_i16x8_extmul_high_i8x16_u(I16x8ExtMulHighI8x16U),
visit_i32x4_extmul_low_i16x8_s(I32x4ExtMulLowI16x8S), visit_i32x4_extmul_low_i16x8_u(I32x4ExtMulLowI16x8U), visit_i32x4_extmul_high_i16x8_s(I32x4ExtMulHighI16x8S), visit_i32x4_extmul_high_i16x8_u(I32x4ExtMulHighI16x8U),
visit_i64x2_extmul_low_i32x4_s(I64x2ExtMulLowI32x4S), visit_i64x2_extmul_low_i32x4_u(I64x2ExtMulLowI32x4U), visit_i64x2_extmul_high_i32x4_s(I64x2ExtMulHighI32x4S), visit_i64x2_extmul_high_i32x4_u(I64x2ExtMulHighI32x4U),
visit_i16x8_extend_low_i8x16_s(I16x8ExtendLowI8x16S), visit_i16x8_extend_low_i8x16_u(I16x8ExtendLowI8x16U), visit_i16x8_extend_high_i8x16_s(I16x8ExtendHighI8x16S), visit_i16x8_extend_high_i8x16_u(I16x8ExtendHighI8x16U),
visit_i32x4_extend_low_i16x8_s(I32x4ExtendLowI16x8S), visit_i32x4_extend_low_i16x8_u(I32x4ExtendLowI16x8U), visit_i32x4_extend_high_i16x8_s(I32x4ExtendHighI16x8S), visit_i32x4_extend_high_i16x8_u(I32x4ExtendHighI16x8U),
visit_i64x2_extend_low_i32x4_s(I64x2ExtendLowI32x4S), visit_i64x2_extend_low_i32x4_u(I64x2ExtendLowI32x4U), visit_i64x2_extend_high_i32x4_s(I64x2ExtendHighI32x4S), visit_i64x2_extend_high_i32x4_u(I64x2ExtendHighI32x4U),
visit_i8x16_popcnt(I8x16Popcnt), visit_i16x8_q15mulr_sat_s(I16x8Q15MulrSatS), visit_i32x4_dot_i16x8_s(I32x4DotI16x8S),
visit_f32x4_ceil(F32x4Ceil), visit_f32x4_floor(F32x4Floor), visit_f32x4_trunc(F32x4Trunc), visit_f32x4_nearest(F32x4Nearest), visit_f32x4_abs(F32x4Abs), visit_f32x4_neg(F32x4Neg), visit_f32x4_sqrt(F32x4Sqrt), visit_f32x4_add(F32x4Add), visit_f32x4_sub(F32x4Sub), visit_f32x4_mul(F32x4Mul), visit_f32x4_div(F32x4Div), visit_f32x4_min(F32x4Min), visit_f32x4_max(F32x4Max), visit_f32x4_pmin(F32x4PMin), visit_f32x4_pmax(F32x4PMax),
visit_f64x2_ceil(F64x2Ceil), visit_f64x2_floor(F64x2Floor), visit_f64x2_trunc(F64x2Trunc), visit_f64x2_nearest(F64x2Nearest), visit_f64x2_abs(F64x2Abs), visit_f64x2_neg(F64x2Neg), visit_f64x2_sqrt(F64x2Sqrt), visit_f64x2_add(F64x2Add), visit_f64x2_sub(F64x2Sub), visit_f64x2_mul(F64x2Mul), visit_f64x2_div(F64x2Div), visit_f64x2_min(F64x2Min), visit_f64x2_max(F64x2Max), visit_f64x2_pmin(F64x2PMin), visit_f64x2_pmax(F64x2PMax),
visit_i32x4_trunc_sat_f32x4_s(I32x4TruncSatF32x4S), visit_i32x4_trunc_sat_f32x4_u(I32x4TruncSatF32x4U),
visit_f32x4_convert_i32x4_s(F32x4ConvertI32x4S), visit_f32x4_convert_i32x4_u(F32x4ConvertI32x4U),
visit_i32x4_trunc_sat_f64x2_s_zero(I32x4TruncSatF64x2SZero), visit_i32x4_trunc_sat_f64x2_u_zero(I32x4TruncSatF64x2UZero),
visit_f64x2_convert_low_i32x4_s(F64x2ConvertLowI32x4S), visit_f64x2_convert_low_i32x4_u(F64x2ConvertLowI32x4U),
visit_f32x4_demote_f64x2_zero(F32x4DemoteF64x2Zero), visit_f64x2_promote_low_f32x4(F64x2PromoteLowF32x4),
visit_i8x16_relaxed_swizzle(I8x16RelaxedSwizzle),
visit_i32x4_relaxed_trunc_f32x4_s(I32x4RelaxedTruncF32x4S), visit_i32x4_relaxed_trunc_f32x4_u(I32x4RelaxedTruncF32x4U),
visit_i32x4_relaxed_trunc_f64x2_s_zero(I32x4RelaxedTruncF64x2SZero), visit_i32x4_relaxed_trunc_f64x2_u_zero(I32x4RelaxedTruncF64x2UZero),
visit_f32x4_relaxed_madd(F32x4RelaxedMadd), visit_f32x4_relaxed_nmadd(F32x4RelaxedNmadd),
visit_f64x2_relaxed_madd(F64x2RelaxedMadd), visit_f64x2_relaxed_nmadd(F64x2RelaxedNmadd),
visit_i8x16_relaxed_laneselect(I8x16RelaxedLaneselect), visit_i16x8_relaxed_laneselect(I16x8RelaxedLaneselect),
visit_i32x4_relaxed_laneselect(I32x4RelaxedLaneselect), visit_i64x2_relaxed_laneselect(I64x2RelaxedLaneselect),
visit_f32x4_relaxed_min(F32x4RelaxedMin), visit_f32x4_relaxed_max(F32x4RelaxedMax),
visit_f64x2_relaxed_min(F64x2RelaxedMin), visit_f64x2_relaxed_max(F64x2RelaxedMax),
visit_i16x8_relaxed_q15mulr_s(I16x8RelaxedQ15mulrS),
visit_i16x8_relaxed_dot_i8x16_i7x16_s(I16x8RelaxedDotI8x16I7x16S),
visit_i32x4_relaxed_dot_i8x16_i7x16_add_s(I32x4RelaxedDotI8x16I7x16AddS),
visit_i8x16_extract_lane_s(I8x16ExtractLaneS, u8), visit_i8x16_extract_lane_u(I8x16ExtractLaneU, u8), visit_i8x16_replace_lane(I8x16ReplaceLane, u8),
visit_i16x8_extract_lane_s(I16x8ExtractLaneS, u8), visit_i16x8_extract_lane_u(I16x8ExtractLaneU, u8), visit_i16x8_replace_lane(I16x8ReplaceLane, u8),
visit_i32x4_extract_lane(I32x4ExtractLane, u8), visit_i32x4_replace_lane(I32x4ReplaceLane, u8),
visit_i64x2_extract_lane(I64x2ExtractLane, u8), visit_i64x2_replace_lane(I64x2ReplaceLane, u8),
visit_f32x4_extract_lane(F32x4ExtractLane, u8), visit_f32x4_replace_lane(F32x4ReplaceLane, u8),
visit_f64x2_extract_lane(F64x2ExtractLane, u8), visit_f64x2_replace_lane(F64x2ReplaceLane, u8)
}
fn visit_i8x16_shuffle(&mut self, lanes: [u8; 16]) -> Self::Output {
self.instructions.push(Instruction::I8x16Shuffle(self.data.v128_constants.len() as u32));
self.data.v128_constants.push(lanes);
}
fn visit_v128_const(&mut self, value: wasmparser::V128) -> Self::Output {
self.instructions.push(Instruction::Const128(self.data.v128_constants.len() as u32));
self.data.v128_constants.push(*value.bytes());
}
}
impl<R: WasmModuleResources> FunctionBuilder<R> {
pub(crate) fn new(validator: FuncValidator<R>, local_addr_map: Vec<u16>) -> Self {
Self {
position: 0,
validator,
local_addr_map,
instructions: Vec::with_capacity(1024),
data: FunctionDataBuilder::default(),
ctx_stack: Vec::with_capacity(256),
error: None,
}
}
fn record_error(&mut self, error: crate::ParseError) {
if self.error.is_none() {
self.error = Some(error);
}
}
fn stack_base_at_frame(&self, depth: usize) -> StackBase {
let Some(frame) = self.validator.get_control_frame(depth) else {
return StackBase::default();
};
let mut base = StackBase::default();
let stack_height = self.validator.operand_stack_height() as usize;
for i in 0..frame.height {
let depth_from_top = stack_height - 1 - i;
if let Some(Some(ty)) = self.validator.get_operand_type(depth_from_top) {
match operand_size(ty) {
OperandSize::S32 => base.s32 += 1,
OperandSize::S64 => base.s64 += 1,
OperandSize::S128 => base.s128 += 1,
}
}
}
base
}
fn unsupported(&mut self, name: &str) {
self.record_error(crate::ParseError::UnsupportedOperator(name.to_string()));
}
fn is_unreachable(&self) -> bool {
self.validator.get_control_frame(0).is_none_or(|f| f.unreachable)
}
fn get_ctx_idx(&self, depth: u32) -> Option<usize> {
let len = self.ctx_stack.len();
let idx = len.checked_sub(depth as usize + 1)?;
Some(idx)
}
fn emit_dropkeep(&mut self, base: StackBase, c32: u16, c64: u16, c128: u16) {
if base.s32 == 0 && c32 == 0 && base.s64 == 0 && c64 == 0 && base.s128 == 0 && c128 == 0 {
return;
}
let fits_u8 = base.s32 <= u8::MAX as u16
&& c32 <= u8::MAX as u16
&& base.s64 <= u8::MAX as u16
&& c64 <= u8::MAX as u16
&& base.s128 <= u8::MAX as u16
&& c128 <= u8::MAX as u16;
if fits_u8 {
self.instructions.push(Instruction::DropKeep {
base32: base.s32,
keep32: c32 as u8,
base64: base.s64,
keep64: c64 as u8,
base128: base.s128,
keep128: c128 as u8,
});
} else {
self.instructions.push(Instruction::DropKeep32(base.s32, c32));
self.instructions.push(Instruction::DropKeep64(base.s64, c64));
self.instructions.push(Instruction::DropKeep128(base.s128, c128));
}
}
fn patch_jump(&mut self, jump_ip: usize, target: usize) {
match &mut self.instructions[jump_ip] {
Instruction::Jump(ip) | Instruction::JumpIfNonZero32(ip) => {
*ip = target as u32;
}
_ => {}
}
}
fn patch_jump_if_zero(&mut self, jump_ip: usize, target: usize) {
if let Instruction::JumpIfZero32(ip) = &mut self.instructions[jump_ip] {
*ip = target as u32;
}
}
fn label_keep_counts(label_types: &[wasmparser::ValType]) -> (u16, u16, u16) {
let (mut c32, mut c64, mut c128) = (0, 0, 0);
for &ty in label_types {
match operand_size(ty) {
OperandSize::S32 => c32 += 1,
OperandSize::S64 => c64 += 1,
OperandSize::S128 => c128 += 1,
}
}
(c32, c64, c128)
}
fn label_keep_counts_for_frame(&self, frame: &wasmparser::Frame) -> (u16, u16, u16) {
match &frame.block_type {
wasmparser::BlockType::Empty => (0, 0, 0),
wasmparser::BlockType::Type(ty) => match frame.kind {
FrameKind::Loop => (0, 0, 0),
_ => Self::label_keep_counts(core::slice::from_ref(ty)),
},
wasmparser::BlockType::FuncType(idx) => {
let sub_type = self.validator.resources().sub_type_at(*idx);
let func_ty = match sub_type {
Some(st) => st.composite_type.unwrap_func(),
None => return (0, 0, 0),
};
match frame.kind {
FrameKind::Loop => Self::label_keep_counts(func_ty.params()),
_ => Self::label_keep_counts(func_ty.results()),
}
}
}
}
fn emit_dropkeep_to_label(&mut self, label_depth: u32) {
if self.is_unreachable() {
return;
}
let Some(frame) = self.validator.get_control_frame(label_depth as usize) else {
return;
};
let base = self.stack_base_at_frame(label_depth as usize);
let (c32, c64, c128) = self.label_keep_counts_for_frame(frame);
self.emit_dropkeep(base, c32, c64, c128);
}
fn emit_branch_jump_or_return(&mut self, depth: u32) {
if let Some(ctx_idx) = self.get_ctx_idx(depth) {
let jump_ip = self.instructions.len();
self.instructions.push(Instruction::Jump(0));
self.ctx_stack[ctx_idx].branch_jumps.push(jump_ip);
} else {
self.instructions.push(Instruction::Return);
}
}
fn emit_br_table_pad(&mut self, depth: u32) -> (usize, usize, bool) {
let pad_start = self.instructions.len();
let frame = if self.is_unreachable() { None } else { self.validator.get_control_frame(depth as usize) };
let Some(frame) = frame else {
let ip = self.instructions.len();
self.instructions.push(Instruction::Return);
return (pad_start, ip, true);
};
let base = self.stack_base_at_frame(depth as usize);
let (c32, c64, c128) = self.label_keep_counts_for_frame(frame);
self.emit_dropkeep(base, c32, c64, c128);
let jump_ip = self.instructions.len();
self.instructions.push(Instruction::Jump(0));
(pad_start, jump_ip, false)
}
fn patch_branch_jump_or_return(&mut self, depth: u32, jump_ip: usize) {
let Some(frame) = self.validator.get_control_frame(depth as usize) else {
self.instructions[jump_ip] = Instruction::Return;
return;
};
let Some(ctx_idx) = self.get_ctx_idx(depth) else {
self.instructions[jump_ip] = Instruction::Return;
return;
};
match frame.kind {
FrameKind::Loop => self.patch_jump(jump_ip, self.ctx_stack[ctx_idx].start_ip),
_ => self.ctx_stack[ctx_idx].branch_jumps.push(jump_ip),
}
}
fn patch_end_jumps(&mut self, ctx: LoweringCtx, end_ip: usize) {
match ctx.kind {
BlockKind::Block | BlockKind::Loop => {
let target = if matches!(ctx.kind, BlockKind::Loop) { ctx.start_ip } else { end_ip };
for jump_ip in ctx.branch_jumps {
self.patch_jump(jump_ip, target);
}
}
BlockKind::If => {
if let Some((&cond_jump_ip, branch_jumps)) = ctx.branch_jumps.split_first() {
if !ctx.has_else {
self.patch_jump_if_zero(cond_jump_ip, end_ip);
}
for &jump_ip in branch_jumps {
self.patch_jump(jump_ip, end_ip);
}
}
}
}
}
}