wasmer_clif_fork_wasm/
code_translator.rs

1//! This module contains the bulk of the interesting code performing the translation between
2//! WebAssembly and Cranelift IR.
3//!
4//! The translation is done in one pass, opcode by opcode. Two main data structures are used during
5//! code translations: the value stack and the control stack. The value stack mimics the execution
6//! of the WebAssembly stack machine: each instruction result is pushed onto the stack and
7//! instruction arguments are popped off the stack. Similarly, when encountering a control flow
8//! block, it is pushed onto the control stack and popped off when encountering the corresponding
9//! `End`.
10//!
11//! Another data structure, the translation state, records information concerning unreachable code
12//! status and about if inserting a return at the end of the function is necessary.
13//!
14//! Some of the WebAssembly instructions need information about the environment for which they
15//! are being translated:
16//!
17//! - the loads and stores need the memory base address;
18//! - the `get_global` and `set_global` instructions depend on how the globals are implemented;
19//! - `memory.size` and `memory.grow` are runtime functions;
20//! - `call_indirect` has to translate the function index into the address of where this
21//!    is;
22//!
23//! That is why `translate_function_body` takes an object having the `WasmRuntime` trait as
24//! argument.
25use super::{hash_map, HashMap};
26use crate::environ::{FuncEnvironment, GlobalVariable, ReturnMode, WasmResult};
27use crate::state::{ControlStackFrame, ElseData, FuncTranslationState, ModuleTranslationState};
28use crate::translation_utils::{
29    block_with_params, blocktype_params_results, f32_translation, f64_translation,
30};
31use crate::translation_utils::{FuncIndex, GlobalIndex, MemoryIndex, SignatureIndex, TableIndex};
32use crate::wasm_unsupported;
33use core::{i32, u32};
34use cranelift_codegen::ir::condcodes::{FloatCC, IntCC};
35use cranelift_codegen::ir::types::*;
36use cranelift_codegen::ir::{
37    self, ConstantData, InstBuilder, JumpTableData, MemFlags, Value, ValueLabel,
38};
39use cranelift_codegen::packed_option::ReservedValue;
40use wasmer_clif_fork_frontend::{FunctionBuilder, Variable};
41use std::vec::Vec;
42use wasmparser::{MemoryImmediate, Operator};
43
44// Clippy warns about "flags: _" but its important to document that the flags field is ignored
45#[cfg_attr(
46    feature = "cargo-clippy",
47    allow(clippy::unneeded_field_pattern, clippy::cognitive_complexity)
48)]
49/// Translates wasm operators into Cranelift IR instructions. Returns `true` if it inserted
50/// a return.
51pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
52    module_translation_state: &ModuleTranslationState,
53    op: &Operator,
54    builder: &mut FunctionBuilder,
55    state: &mut FuncTranslationState,
56    environ: &mut FE,
57) -> WasmResult<()> {
58    if !state.reachable {
59        translate_unreachable_operator(module_translation_state, &op, builder, state, environ)?;
60        return Ok(());
61    }
62
63    // This big match treats all Wasm code operators.
64    match op {
65        /********************************** Locals ****************************************
66         *  `get_local` and `set_local` are treated as non-SSA variables and will completely
67         *  disappear in the Cranelift Code
68         ***********************************************************************************/
69        Operator::LocalGet { local_index } => {
70            let val = builder.use_var(Variable::with_u32(*local_index));
71            state.push1(val);
72            let label = ValueLabel::from_u32(*local_index);
73            builder.set_val_label(val, label);
74        }
75        Operator::LocalSet { local_index } => {
76            let val = state.pop1();
77            builder.def_var(Variable::with_u32(*local_index), val);
78            let label = ValueLabel::from_u32(*local_index);
79            builder.set_val_label(val, label);
80        }
81        Operator::LocalTee { local_index } => {
82            let val = state.peek1();
83            builder.def_var(Variable::with_u32(*local_index), val);
84            let label = ValueLabel::from_u32(*local_index);
85            builder.set_val_label(val, label);
86        }
87        /********************************** Globals ****************************************
88         *  `get_global` and `set_global` are handled by the environment.
89         ***********************************************************************************/
90        Operator::GlobalGet { global_index } => {
91            let val = match state.get_global(builder.func, *global_index, environ)? {
92                GlobalVariable::Const(val) => val,
93                GlobalVariable::Memory { gv, offset, ty } => {
94                    let addr = builder.ins().global_value(environ.pointer_type(), gv);
95                    let flags = ir::MemFlags::trusted();
96                    builder.ins().load(ty, flags, addr, offset)
97                }
98                GlobalVariable::Custom => environ.translate_custom_global_get(
99                    builder.cursor(),
100                    GlobalIndex::from_u32(*global_index),
101                )?,
102            };
103            state.push1(val);
104        }
105        Operator::GlobalSet { global_index } => {
106            match state.get_global(builder.func, *global_index, environ)? {
107                GlobalVariable::Const(_) => panic!("global #{} is a constant", *global_index),
108                GlobalVariable::Memory { gv, offset, ty } => {
109                    let addr = builder.ins().global_value(environ.pointer_type(), gv);
110                    let flags = ir::MemFlags::trusted();
111                    let val = state.pop1();
112                    debug_assert_eq!(ty, builder.func.dfg.value_type(val));
113                    builder.ins().store(flags, val, addr, offset);
114                }
115                GlobalVariable::Custom => {
116                    let val = state.pop1();
117                    environ.translate_custom_global_set(
118                        builder.cursor(),
119                        GlobalIndex::from_u32(*global_index),
120                        val,
121                    )?;
122                }
123            }
124        }
125        /********************************* Stack misc ***************************************
126         *  `drop`, `nop`, `unreachable` and `select`.
127         ***********************************************************************************/
128        Operator::Drop => {
129            state.pop1();
130        }
131        Operator::Select => {
132            let (arg1, arg2, cond) = state.pop3();
133            state.push1(builder.ins().select(cond, arg1, arg2));
134        }
135        Operator::TypedSelect { ty: _ } => {
136            // We ignore the explicit type parameter as it is only needed for
137            // validation, which we require to have been performed before
138            // translation.
139            let (arg1, arg2, cond) = state.pop3();
140            state.push1(builder.ins().select(cond, arg1, arg2));
141        }
142        Operator::Nop => {
143            // We do nothing
144        }
145        Operator::Unreachable => {
146            builder.ins().trap(ir::TrapCode::UnreachableCodeReached);
147            state.reachable = false;
148        }
149        /***************************** Control flow blocks **********************************
150         *  When starting a control flow block, we create a new `Block` that will hold the code
151         *  after the block, and we push a frame on the control stack. Depending on the type
152         *  of block, we create a new `Block` for the body of the block with an associated
153         *  jump instruction.
154         *
155         *  The `End` instruction pops the last control frame from the control stack, seals
156         *  the destination block (since `br` instructions targeting it only appear inside the
157         *  block and have already been translated) and modify the value stack to use the
158         *  possible `Block`'s arguments values.
159         ***********************************************************************************/
160        Operator::Block { ty } => {
161            let (params, results) = blocktype_params_results(module_translation_state, *ty)?;
162            let next = block_with_params(builder, results, environ)?;
163            state.push_block(next, params.len(), results.len());
164        }
165        Operator::Loop { ty } => {
166            let (params, results) = blocktype_params_results(module_translation_state, *ty)?;
167            let loop_body = block_with_params(builder, params, environ)?;
168            let next = block_with_params(builder, results, environ)?;
169            builder.ins().jump(loop_body, state.peekn(params.len()));
170            state.push_loop(loop_body, next, params.len(), results.len());
171
172            // Pop the initial `Block` actuals and replace them with the `Block`'s
173            // params since control flow joins at the top of the loop.
174            state.popn(params.len());
175            state
176                .stack
177                .extend_from_slice(builder.block_params(loop_body));
178
179            builder.switch_to_block(loop_body);
180            environ.translate_loop_header(builder.cursor())?;
181        }
182        Operator::If { ty } => {
183            let val = state.pop1();
184
185            let (params, results) = blocktype_params_results(module_translation_state, *ty)?;
186            let (destination, else_data) = if params == results {
187                // It is possible there is no `else` block, so we will only
188                // allocate an block for it if/when we find the `else`. For now,
189                // we if the condition isn't true, then we jump directly to the
190                // destination block following the whole `if...end`. If we do end
191                // up discovering an `else`, then we will allocate an block for it
192                // and go back and patch the jump.
193                let destination = block_with_params(builder, results, environ)?;
194                let branch_inst = builder
195                    .ins()
196                    .brz(val, destination, state.peekn(params.len()));
197                (destination, ElseData::NoElse { branch_inst })
198            } else {
199                // The `if` type signature is not valid without an `else` block,
200                // so we eagerly allocate the `else` block here.
201                let destination = block_with_params(builder, results, environ)?;
202                let else_block = block_with_params(builder, params, environ)?;
203                builder
204                    .ins()
205                    .brz(val, else_block, state.peekn(params.len()));
206                builder.seal_block(else_block);
207                (destination, ElseData::WithElse { else_block })
208            };
209
210            let next_block = builder.create_block();
211            builder.ins().jump(next_block, &[]);
212            builder.seal_block(next_block); // Only predecessor is the current block.
213            builder.switch_to_block(next_block);
214
215            // Here we append an argument to an Block targeted by an argumentless jump instruction
216            // But in fact there are two cases:
217            // - either the If does not have a Else clause, in that case ty = EmptyBlock
218            //   and we add nothing;
219            // - either the If have an Else clause, in that case the destination of this jump
220            //   instruction will be changed later when we translate the Else operator.
221            state.push_if(destination, else_data, params.len(), results.len(), *ty);
222        }
223        Operator::Else => {
224            let i = state.control_stack.len() - 1;
225            match state.control_stack[i] {
226                ControlStackFrame::If {
227                    ref else_data,
228                    head_is_reachable,
229                    ref mut consequent_ends_reachable,
230                    num_return_values,
231                    blocktype,
232                    destination,
233                    ..
234                } => {
235                    // We finished the consequent, so record its final
236                    // reachability state.
237                    debug_assert!(consequent_ends_reachable.is_none());
238                    *consequent_ends_reachable = Some(state.reachable);
239
240                    if head_is_reachable {
241                        // We have a branch from the head of the `if` to the `else`.
242                        state.reachable = true;
243
244                        // Ensure we have an block for the `else` block (it may have
245                        // already been pre-allocated, see `ElseData` for details).
246                        let else_block = match *else_data {
247                            ElseData::NoElse { branch_inst } => {
248                                let (params, _results) =
249                                    blocktype_params_results(module_translation_state, blocktype)?;
250                                debug_assert_eq!(params.len(), num_return_values);
251                                let else_block = block_with_params(builder, params, environ)?;
252                                builder.ins().jump(destination, state.peekn(params.len()));
253                                state.popn(params.len());
254
255                                builder.change_jump_destination(branch_inst, else_block);
256                                builder.seal_block(else_block);
257                                else_block
258                            }
259                            ElseData::WithElse { else_block } => {
260                                builder
261                                    .ins()
262                                    .jump(destination, state.peekn(num_return_values));
263                                state.popn(num_return_values);
264                                else_block
265                            }
266                        };
267
268                        // You might be expecting that we push the parameters for this
269                        // `else` block here, something like this:
270                        //
271                        //     state.pushn(&control_stack_frame.params);
272                        //
273                        // We don't do that because they are already on the top of the stack
274                        // for us: we pushed the parameters twice when we saw the initial
275                        // `if` so that we wouldn't have to save the parameters in the
276                        // `ControlStackFrame` as another `Vec` allocation.
277
278                        builder.switch_to_block(else_block);
279
280                        // We don't bother updating the control frame's `ElseData`
281                        // to `WithElse` because nothing else will read it.
282                    }
283                }
284                _ => unreachable!(),
285            }
286        }
287        Operator::End => {
288            let frame = state.control_stack.pop().unwrap();
289            let next_block = frame.following_code();
290
291            if !builder.is_unreachable() || !builder.is_pristine() {
292                let return_count = frame.num_return_values();
293                let return_args = state.peekn_mut(return_count);
294                let next_block_types = builder.func.dfg.block_param_types(next_block);
295                bitcast_arguments(return_args, &next_block_types, builder);
296                builder.ins().jump(frame.following_code(), return_args);
297                // You might expect that if we just finished an `if` block that
298                // didn't have a corresponding `else` block, then we would clean
299                // up our duplicate set of parameters that we pushed earlier
300                // right here. However, we don't have to explicitly do that,
301                // since we truncate the stack back to the original height
302                // below.
303            }
304            builder.switch_to_block(next_block);
305            builder.seal_block(next_block);
306            // If it is a loop we also have to seal the body loop block
307            if let ControlStackFrame::Loop { header, .. } = frame {
308                builder.seal_block(header)
309            }
310            state.stack.truncate(frame.original_stack_size());
311            state
312                .stack
313                .extend_from_slice(builder.block_params(next_block));
314        }
315        /**************************** Branch instructions *********************************
316         * The branch instructions all have as arguments a target nesting level, which
317         * corresponds to how many control stack frames do we have to pop to get the
318         * destination `Block`.
319         *
320         * Once the destination `Block` is found, we sometimes have to declare a certain depth
321         * of the stack unreachable, because some branch instructions are terminator.
322         *
323         * The `br_table` case is much more complicated because Cranelift's `br_table` instruction
324         * does not support jump arguments like all the other branch instructions. That is why, in
325         * the case where we would use jump arguments for every other branch instruction, we
326         * need to split the critical edges leaving the `br_tables` by creating one `Block` per
327         * table destination; the `br_table` will point to these newly created `Blocks` and these
328         * `Block`s contain only a jump instruction pointing to the final destination, this time with
329         * jump arguments.
330         *
331         * This system is also implemented in Cranelift's SSA construction algorithm, because
332         * `use_var` located in a destination `Block` of a `br_table` might trigger the addition
333         * of jump arguments in each predecessor branch instruction, one of which might be a
334         * `br_table`.
335         ***********************************************************************************/
336        Operator::Br { relative_depth } => {
337            let i = state.control_stack.len() - 1 - (*relative_depth as usize);
338            let (return_count, br_destination) = {
339                let frame = &mut state.control_stack[i];
340                // We signal that all the code that follows until the next End is unreachable
341                frame.set_branched_to_exit();
342                let return_count = if frame.is_loop() {
343                    0
344                } else {
345                    frame.num_return_values()
346                };
347                (return_count, frame.br_destination())
348            };
349
350            // Bitcast any vector arguments to their default type, I8X16, before jumping.
351            let destination_args = state.peekn_mut(return_count);
352            let destination_types = builder.func.dfg.block_param_types(br_destination);
353            bitcast_arguments(
354                destination_args,
355                &destination_types[..return_count],
356                builder,
357            );
358
359            builder.ins().jump(br_destination, destination_args);
360            state.popn(return_count);
361            state.reachable = false;
362        }
363        Operator::BrIf { relative_depth } => translate_br_if(*relative_depth, builder, state),
364        Operator::BrTable { table } => {
365            let (depths, default) = table.read_table()?;
366            let mut min_depth = default;
367            for depth in &*depths {
368                if *depth < min_depth {
369                    min_depth = *depth;
370                }
371            }
372            let jump_args_count = {
373                let i = state.control_stack.len() - 1 - (min_depth as usize);
374                let min_depth_frame = &state.control_stack[i];
375                if min_depth_frame.is_loop() {
376                    0
377                } else {
378                    min_depth_frame.num_return_values()
379                }
380            };
381            let val = state.pop1();
382            let mut data = JumpTableData::with_capacity(depths.len());
383            if jump_args_count == 0 {
384                // No jump arguments
385                for depth in &*depths {
386                    let block = {
387                        let i = state.control_stack.len() - 1 - (*depth as usize);
388                        let frame = &mut state.control_stack[i];
389                        frame.set_branched_to_exit();
390                        frame.br_destination()
391                    };
392                    data.push_entry(block);
393                }
394                let jt = builder.create_jump_table(data);
395                let block = {
396                    let i = state.control_stack.len() - 1 - (default as usize);
397                    let frame = &mut state.control_stack[i];
398                    frame.set_branched_to_exit();
399                    frame.br_destination()
400                };
401                builder.ins().br_table(val, block, jt);
402            } else {
403                // Here we have jump arguments, but Cranelift's br_table doesn't support them
404                // We then proceed to split the edges going out of the br_table
405                let return_count = jump_args_count;
406                let mut dest_block_sequence = vec![];
407                let mut dest_block_map = HashMap::new();
408                for depth in &*depths {
409                    let branch_block = match dest_block_map.entry(*depth as usize) {
410                        hash_map::Entry::Occupied(entry) => *entry.get(),
411                        hash_map::Entry::Vacant(entry) => {
412                            let block = builder.create_block();
413                            dest_block_sequence.push((*depth as usize, block));
414                            *entry.insert(block)
415                        }
416                    };
417                    data.push_entry(branch_block);
418                }
419                let default_branch_block = match dest_block_map.entry(default as usize) {
420                    hash_map::Entry::Occupied(entry) => *entry.get(),
421                    hash_map::Entry::Vacant(entry) => {
422                        let block = builder.create_block();
423                        dest_block_sequence.push((default as usize, block));
424                        *entry.insert(block)
425                    }
426                };
427                let jt = builder.create_jump_table(data);
428                builder.ins().br_table(val, default_branch_block, jt);
429                for (depth, dest_block) in dest_block_sequence {
430                    builder.switch_to_block(dest_block);
431                    builder.seal_block(dest_block);
432                    let real_dest_block = {
433                        let i = state.control_stack.len() - 1 - depth;
434                        let frame = &mut state.control_stack[i];
435                        frame.set_branched_to_exit();
436                        frame.br_destination()
437                    };
438
439                    // Bitcast any vector arguments to their default type, I8X16, before jumping.
440                    let destination_args = state.peekn_mut(return_count);
441                    let destination_types = builder.func.dfg.block_param_types(real_dest_block);
442                    bitcast_arguments(
443                        destination_args,
444                        &destination_types[..return_count],
445                        builder,
446                    );
447
448                    builder.ins().jump(real_dest_block, destination_args);
449                }
450                state.popn(return_count);
451            }
452            state.reachable = false;
453        }
454        Operator::Return => {
455            let (return_count, br_destination) = {
456                let frame = &mut state.control_stack[0];
457                frame.set_branched_to_exit();
458                let return_count = frame.num_return_values();
459                (return_count, frame.br_destination())
460            };
461            {
462                let return_args = state.peekn_mut(return_count);
463                let return_types = wasm_param_types(&builder.func.signature.returns, |i| {
464                    environ.is_wasm_return(&builder.func.signature, i)
465                });
466                bitcast_arguments(return_args, &return_types, builder);
467                match environ.return_mode() {
468                    ReturnMode::NormalReturns => builder.ins().return_(return_args),
469                    ReturnMode::FallthroughReturn => {
470                        builder.ins().jump(br_destination, return_args)
471                    }
472                };
473            }
474            state.popn(return_count);
475            state.reachable = false;
476        }
477        /************************************ Calls ****************************************
478         * The call instructions pop off their arguments from the stack and append their
479         * return values to it. `call_indirect` needs environment support because there is an
480         * argument referring to an index in the external functions table of the module.
481         ************************************************************************************/
482        Operator::Call { function_index } => {
483            let (fref, num_args) = state.get_direct_func(builder.func, *function_index, environ)?;
484
485            // Bitcast any vector arguments to their default type, I8X16, before calling.
486            let callee_signature =
487                &builder.func.dfg.signatures[builder.func.dfg.ext_funcs[fref].signature];
488            let args = state.peekn_mut(num_args);
489            let types = wasm_param_types(&callee_signature.params, |i| {
490                environ.is_wasm_parameter(&callee_signature, i)
491            });
492            bitcast_arguments(args, &types, builder);
493
494            let call = environ.translate_call(
495                builder.cursor(),
496                FuncIndex::from_u32(*function_index),
497                fref,
498                args,
499            )?;
500            let inst_results = builder.inst_results(call);
501            debug_assert_eq!(
502                inst_results.len(),
503                builder.func.dfg.signatures[builder.func.dfg.ext_funcs[fref].signature]
504                    .returns
505                    .len(),
506                "translate_call results should match the call signature"
507            );
508            state.popn(num_args);
509            state.pushn(inst_results);
510        }
511        Operator::CallIndirect { index, table_index } => {
512            // `index` is the index of the function's signature and `table_index` is the index of
513            // the table to search the function in.
514            let (sigref, num_args) = state.get_indirect_sig(builder.func, *index, environ)?;
515            let table = state.get_table(builder.func, *table_index, environ)?;
516            let callee = state.pop1();
517
518            // Bitcast any vector arguments to their default type, I8X16, before calling.
519            let callee_signature = &builder.func.dfg.signatures[sigref];
520            let args = state.peekn_mut(num_args);
521            let types = wasm_param_types(&callee_signature.params, |i| {
522                environ.is_wasm_parameter(&callee_signature, i)
523            });
524            bitcast_arguments(args, &types, builder);
525
526            let call = environ.translate_call_indirect(
527                builder.cursor(),
528                TableIndex::from_u32(*table_index),
529                table,
530                SignatureIndex::from_u32(*index),
531                sigref,
532                callee,
533                state.peekn(num_args),
534            )?;
535            let inst_results = builder.inst_results(call);
536            debug_assert_eq!(
537                inst_results.len(),
538                builder.func.dfg.signatures[sigref].returns.len(),
539                "translate_call_indirect results should match the call signature"
540            );
541            state.popn(num_args);
542            state.pushn(inst_results);
543        }
544        /******************************* Memory management ***********************************
545         * Memory management is handled by environment. It is usually translated into calls to
546         * special functions.
547         ************************************************************************************/
548        Operator::MemoryGrow { reserved } => {
549            // The WebAssembly MVP only supports one linear memory, but we expect the reserved
550            // argument to be a memory index.
551            let heap_index = MemoryIndex::from_u32(*reserved);
552            let heap = state.get_heap(builder.func, *reserved, environ)?;
553            let val = state.pop1();
554            state.push1(environ.translate_memory_grow(builder.cursor(), heap_index, heap, val)?)
555        }
556        Operator::MemorySize { reserved } => {
557            let heap_index = MemoryIndex::from_u32(*reserved);
558            let heap = state.get_heap(builder.func, *reserved, environ)?;
559            state.push1(environ.translate_memory_size(builder.cursor(), heap_index, heap)?);
560        }
561        /******************************* Load instructions ***********************************
562         * Wasm specifies an integer alignment flag but we drop it in Cranelift.
563         * The memory base address is provided by the environment.
564         ************************************************************************************/
565        Operator::I32Load8U {
566            memarg: MemoryImmediate { flags: _, offset },
567        } => {
568            translate_load(*offset, ir::Opcode::Uload8, I32, builder, state, environ)?;
569        }
570        Operator::I32Load16U {
571            memarg: MemoryImmediate { flags: _, offset },
572        } => {
573            translate_load(*offset, ir::Opcode::Uload16, I32, builder, state, environ)?;
574        }
575        Operator::I32Load8S {
576            memarg: MemoryImmediate { flags: _, offset },
577        } => {
578            translate_load(*offset, ir::Opcode::Sload8, I32, builder, state, environ)?;
579        }
580        Operator::I32Load16S {
581            memarg: MemoryImmediate { flags: _, offset },
582        } => {
583            translate_load(*offset, ir::Opcode::Sload16, I32, builder, state, environ)?;
584        }
585        Operator::I64Load8U {
586            memarg: MemoryImmediate { flags: _, offset },
587        } => {
588            translate_load(*offset, ir::Opcode::Uload8, I64, builder, state, environ)?;
589        }
590        Operator::I64Load16U {
591            memarg: MemoryImmediate { flags: _, offset },
592        } => {
593            translate_load(*offset, ir::Opcode::Uload16, I64, builder, state, environ)?;
594        }
595        Operator::I64Load8S {
596            memarg: MemoryImmediate { flags: _, offset },
597        } => {
598            translate_load(*offset, ir::Opcode::Sload8, I64, builder, state, environ)?;
599        }
600        Operator::I64Load16S {
601            memarg: MemoryImmediate { flags: _, offset },
602        } => {
603            translate_load(*offset, ir::Opcode::Sload16, I64, builder, state, environ)?;
604        }
605        Operator::I64Load32S {
606            memarg: MemoryImmediate { flags: _, offset },
607        } => {
608            translate_load(*offset, ir::Opcode::Sload32, I64, builder, state, environ)?;
609        }
610        Operator::I64Load32U {
611            memarg: MemoryImmediate { flags: _, offset },
612        } => {
613            translate_load(*offset, ir::Opcode::Uload32, I64, builder, state, environ)?;
614        }
615        Operator::I32Load {
616            memarg: MemoryImmediate { flags: _, offset },
617        } => {
618            translate_load(*offset, ir::Opcode::Load, I32, builder, state, environ)?;
619        }
620        Operator::F32Load {
621            memarg: MemoryImmediate { flags: _, offset },
622        } => {
623            translate_load(*offset, ir::Opcode::Load, F32, builder, state, environ)?;
624        }
625        Operator::I64Load {
626            memarg: MemoryImmediate { flags: _, offset },
627        } => {
628            translate_load(*offset, ir::Opcode::Load, I64, builder, state, environ)?;
629        }
630        Operator::F64Load {
631            memarg: MemoryImmediate { flags: _, offset },
632        } => {
633            translate_load(*offset, ir::Opcode::Load, F64, builder, state, environ)?;
634        }
635        Operator::V128Load {
636            memarg: MemoryImmediate { flags: _, offset },
637        } => {
638            translate_load(*offset, ir::Opcode::Load, I8X16, builder, state, environ)?;
639        }
640        /****************************** Store instructions ***********************************
641         * Wasm specifies an integer alignment flag but we drop it in Cranelift.
642         * The memory base address is provided by the environment.
643         ************************************************************************************/
644        Operator::I32Store {
645            memarg: MemoryImmediate { flags: _, offset },
646        }
647        | Operator::I64Store {
648            memarg: MemoryImmediate { flags: _, offset },
649        }
650        | Operator::F32Store {
651            memarg: MemoryImmediate { flags: _, offset },
652        }
653        | Operator::F64Store {
654            memarg: MemoryImmediate { flags: _, offset },
655        } => {
656            translate_store(*offset, ir::Opcode::Store, builder, state, environ)?;
657        }
658        Operator::I32Store8 {
659            memarg: MemoryImmediate { flags: _, offset },
660        }
661        | Operator::I64Store8 {
662            memarg: MemoryImmediate { flags: _, offset },
663        } => {
664            translate_store(*offset, ir::Opcode::Istore8, builder, state, environ)?;
665        }
666        Operator::I32Store16 {
667            memarg: MemoryImmediate { flags: _, offset },
668        }
669        | Operator::I64Store16 {
670            memarg: MemoryImmediate { flags: _, offset },
671        } => {
672            translate_store(*offset, ir::Opcode::Istore16, builder, state, environ)?;
673        }
674        Operator::I64Store32 {
675            memarg: MemoryImmediate { flags: _, offset },
676        } => {
677            translate_store(*offset, ir::Opcode::Istore32, builder, state, environ)?;
678        }
679        Operator::V128Store {
680            memarg: MemoryImmediate { flags: _, offset },
681        } => {
682            translate_store(*offset, ir::Opcode::Store, builder, state, environ)?;
683        }
684        /****************************** Nullary Operators ************************************/
685        Operator::I32Const { value } => state.push1(builder.ins().iconst(I32, i64::from(*value))),
686        Operator::I64Const { value } => state.push1(builder.ins().iconst(I64, *value)),
687        Operator::F32Const { value } => {
688            state.push1(builder.ins().f32const(f32_translation(*value)));
689        }
690        Operator::F64Const { value } => {
691            state.push1(builder.ins().f64const(f64_translation(*value)));
692        }
693        /******************************* Unary Operators *************************************/
694        Operator::I32Clz | Operator::I64Clz => {
695            let arg = state.pop1();
696            state.push1(builder.ins().clz(arg));
697        }
698        Operator::I32Ctz | Operator::I64Ctz => {
699            let arg = state.pop1();
700            state.push1(builder.ins().ctz(arg));
701        }
702        Operator::I32Popcnt | Operator::I64Popcnt => {
703            let arg = state.pop1();
704            state.push1(builder.ins().popcnt(arg));
705        }
706        Operator::I64ExtendI32S => {
707            let val = state.pop1();
708            state.push1(builder.ins().sextend(I64, val));
709        }
710        Operator::I64ExtendI32U => {
711            let val = state.pop1();
712            state.push1(builder.ins().uextend(I64, val));
713        }
714        Operator::I32WrapI64 => {
715            let val = state.pop1();
716            state.push1(builder.ins().ireduce(I32, val));
717        }
718        Operator::F32Sqrt | Operator::F64Sqrt => {
719            let arg = state.pop1();
720            state.push1(builder.ins().sqrt(arg));
721        }
722        Operator::F32Ceil | Operator::F64Ceil => {
723            let arg = state.pop1();
724            state.push1(builder.ins().ceil(arg));
725        }
726        Operator::F32Floor | Operator::F64Floor => {
727            let arg = state.pop1();
728            state.push1(builder.ins().floor(arg));
729        }
730        Operator::F32Trunc | Operator::F64Trunc => {
731            let arg = state.pop1();
732            state.push1(builder.ins().trunc(arg));
733        }
734        Operator::F32Nearest | Operator::F64Nearest => {
735            let arg = state.pop1();
736            state.push1(builder.ins().nearest(arg));
737        }
738        Operator::F32Abs | Operator::F64Abs => {
739            let val = state.pop1();
740            state.push1(builder.ins().fabs(val));
741        }
742        Operator::F32Neg | Operator::F64Neg => {
743            let arg = state.pop1();
744            state.push1(builder.ins().fneg(arg));
745        }
746        Operator::F64ConvertI64U | Operator::F64ConvertI32U => {
747            let val = state.pop1();
748            state.push1(builder.ins().fcvt_from_uint(F64, val));
749        }
750        Operator::F64ConvertI64S | Operator::F64ConvertI32S => {
751            let val = state.pop1();
752            state.push1(builder.ins().fcvt_from_sint(F64, val));
753        }
754        Operator::F32ConvertI64S | Operator::F32ConvertI32S => {
755            let val = state.pop1();
756            state.push1(builder.ins().fcvt_from_sint(F32, val));
757        }
758        Operator::F32ConvertI64U | Operator::F32ConvertI32U => {
759            let val = state.pop1();
760            state.push1(builder.ins().fcvt_from_uint(F32, val));
761        }
762        Operator::F64PromoteF32 => {
763            let val = state.pop1();
764            state.push1(builder.ins().fpromote(F64, val));
765        }
766        Operator::F32DemoteF64 => {
767            let val = state.pop1();
768            state.push1(builder.ins().fdemote(F32, val));
769        }
770        Operator::I64TruncF64S | Operator::I64TruncF32S => {
771            let val = state.pop1();
772            state.push1(builder.ins().fcvt_to_sint(I64, val));
773        }
774        Operator::I32TruncF64S | Operator::I32TruncF32S => {
775            let val = state.pop1();
776            state.push1(builder.ins().fcvt_to_sint(I32, val));
777        }
778        Operator::I64TruncF64U | Operator::I64TruncF32U => {
779            let val = state.pop1();
780            state.push1(builder.ins().fcvt_to_uint(I64, val));
781        }
782        Operator::I32TruncF64U | Operator::I32TruncF32U => {
783            let val = state.pop1();
784            state.push1(builder.ins().fcvt_to_uint(I32, val));
785        }
786        Operator::I64TruncSatF64S | Operator::I64TruncSatF32S => {
787            let val = state.pop1();
788            state.push1(builder.ins().fcvt_to_sint_sat(I64, val));
789        }
790        Operator::I32TruncSatF64S | Operator::I32TruncSatF32S => {
791            let val = state.pop1();
792            state.push1(builder.ins().fcvt_to_sint_sat(I32, val));
793        }
794        Operator::I64TruncSatF64U | Operator::I64TruncSatF32U => {
795            let val = state.pop1();
796            state.push1(builder.ins().fcvt_to_uint_sat(I64, val));
797        }
798        Operator::I32TruncSatF64U | Operator::I32TruncSatF32U => {
799            let val = state.pop1();
800            state.push1(builder.ins().fcvt_to_uint_sat(I32, val));
801        }
802        Operator::F32ReinterpretI32 => {
803            let val = state.pop1();
804            state.push1(builder.ins().bitcast(F32, val));
805        }
806        Operator::F64ReinterpretI64 => {
807            let val = state.pop1();
808            state.push1(builder.ins().bitcast(F64, val));
809        }
810        Operator::I32ReinterpretF32 => {
811            let val = state.pop1();
812            state.push1(builder.ins().bitcast(I32, val));
813        }
814        Operator::I64ReinterpretF64 => {
815            let val = state.pop1();
816            state.push1(builder.ins().bitcast(I64, val));
817        }
818        Operator::I32Extend8S => {
819            let val = state.pop1();
820            state.push1(builder.ins().ireduce(I8, val));
821            let val = state.pop1();
822            state.push1(builder.ins().sextend(I32, val));
823        }
824        Operator::I32Extend16S => {
825            let val = state.pop1();
826            state.push1(builder.ins().ireduce(I16, val));
827            let val = state.pop1();
828            state.push1(builder.ins().sextend(I32, val));
829        }
830        Operator::I64Extend8S => {
831            let val = state.pop1();
832            state.push1(builder.ins().ireduce(I8, val));
833            let val = state.pop1();
834            state.push1(builder.ins().sextend(I64, val));
835        }
836        Operator::I64Extend16S => {
837            let val = state.pop1();
838            state.push1(builder.ins().ireduce(I16, val));
839            let val = state.pop1();
840            state.push1(builder.ins().sextend(I64, val));
841        }
842        Operator::I64Extend32S => {
843            let val = state.pop1();
844            state.push1(builder.ins().ireduce(I32, val));
845            let val = state.pop1();
846            state.push1(builder.ins().sextend(I64, val));
847        }
848        /****************************** Binary Operators ************************************/
849        Operator::I32Add | Operator::I64Add => {
850            let (arg1, arg2) = state.pop2();
851            state.push1(builder.ins().iadd(arg1, arg2));
852        }
853        Operator::I32And | Operator::I64And | Operator::V128And => {
854            let (arg1, arg2) = state.pop2();
855            state.push1(builder.ins().band(arg1, arg2));
856        }
857        Operator::I32Or | Operator::I64Or | Operator::V128Or => {
858            let (arg1, arg2) = state.pop2();
859            state.push1(builder.ins().bor(arg1, arg2));
860        }
861        Operator::I32Xor | Operator::I64Xor | Operator::V128Xor => {
862            let (arg1, arg2) = state.pop2();
863            state.push1(builder.ins().bxor(arg1, arg2));
864        }
865        Operator::I32Shl | Operator::I64Shl => {
866            let (arg1, arg2) = state.pop2();
867            state.push1(builder.ins().ishl(arg1, arg2));
868        }
869        Operator::I32ShrS | Operator::I64ShrS => {
870            let (arg1, arg2) = state.pop2();
871            state.push1(builder.ins().sshr(arg1, arg2));
872        }
873        Operator::I32ShrU | Operator::I64ShrU => {
874            let (arg1, arg2) = state.pop2();
875            state.push1(builder.ins().ushr(arg1, arg2));
876        }
877        Operator::I32Rotl | Operator::I64Rotl => {
878            let (arg1, arg2) = state.pop2();
879            state.push1(builder.ins().rotl(arg1, arg2));
880        }
881        Operator::I32Rotr | Operator::I64Rotr => {
882            let (arg1, arg2) = state.pop2();
883            state.push1(builder.ins().rotr(arg1, arg2));
884        }
885        Operator::F32Add | Operator::F64Add => {
886            let (arg1, arg2) = state.pop2();
887            state.push1(builder.ins().fadd(arg1, arg2));
888        }
889        Operator::I32Sub | Operator::I64Sub => {
890            let (arg1, arg2) = state.pop2();
891            state.push1(builder.ins().isub(arg1, arg2));
892        }
893        Operator::F32Sub | Operator::F64Sub => {
894            let (arg1, arg2) = state.pop2();
895            state.push1(builder.ins().fsub(arg1, arg2));
896        }
897        Operator::I32Mul | Operator::I64Mul => {
898            let (arg1, arg2) = state.pop2();
899            state.push1(builder.ins().imul(arg1, arg2));
900        }
901        Operator::F32Mul | Operator::F64Mul => {
902            let (arg1, arg2) = state.pop2();
903            state.push1(builder.ins().fmul(arg1, arg2));
904        }
905        Operator::F32Div | Operator::F64Div => {
906            let (arg1, arg2) = state.pop2();
907            state.push1(builder.ins().fdiv(arg1, arg2));
908        }
909        Operator::I32DivS | Operator::I64DivS => {
910            let (arg1, arg2) = state.pop2();
911            state.push1(builder.ins().sdiv(arg1, arg2));
912        }
913        Operator::I32DivU | Operator::I64DivU => {
914            let (arg1, arg2) = state.pop2();
915            state.push1(builder.ins().udiv(arg1, arg2));
916        }
917        Operator::I32RemS | Operator::I64RemS => {
918            let (arg1, arg2) = state.pop2();
919            state.push1(builder.ins().srem(arg1, arg2));
920        }
921        Operator::I32RemU | Operator::I64RemU => {
922            let (arg1, arg2) = state.pop2();
923            state.push1(builder.ins().urem(arg1, arg2));
924        }
925        Operator::F32Min | Operator::F64Min => {
926            let (arg1, arg2) = state.pop2();
927            state.push1(builder.ins().fmin(arg1, arg2));
928        }
929        Operator::F32Max | Operator::F64Max => {
930            let (arg1, arg2) = state.pop2();
931            state.push1(builder.ins().fmax(arg1, arg2));
932        }
933        Operator::F32Copysign | Operator::F64Copysign => {
934            let (arg1, arg2) = state.pop2();
935            state.push1(builder.ins().fcopysign(arg1, arg2));
936        }
937        /**************************** Comparison Operators **********************************/
938        Operator::I32LtS | Operator::I64LtS => {
939            translate_icmp(IntCC::SignedLessThan, builder, state)
940        }
941        Operator::I32LtU | Operator::I64LtU => {
942            translate_icmp(IntCC::UnsignedLessThan, builder, state)
943        }
944        Operator::I32LeS | Operator::I64LeS => {
945            translate_icmp(IntCC::SignedLessThanOrEqual, builder, state)
946        }
947        Operator::I32LeU | Operator::I64LeU => {
948            translate_icmp(IntCC::UnsignedLessThanOrEqual, builder, state)
949        }
950        Operator::I32GtS | Operator::I64GtS => {
951            translate_icmp(IntCC::SignedGreaterThan, builder, state)
952        }
953        Operator::I32GtU | Operator::I64GtU => {
954            translate_icmp(IntCC::UnsignedGreaterThan, builder, state)
955        }
956        Operator::I32GeS | Operator::I64GeS => {
957            translate_icmp(IntCC::SignedGreaterThanOrEqual, builder, state)
958        }
959        Operator::I32GeU | Operator::I64GeU => {
960            translate_icmp(IntCC::UnsignedGreaterThanOrEqual, builder, state)
961        }
962        Operator::I32Eqz | Operator::I64Eqz => {
963            let arg = state.pop1();
964            let val = builder.ins().icmp_imm(IntCC::Equal, arg, 0);
965            state.push1(builder.ins().bint(I32, val));
966        }
967        Operator::I32Eq | Operator::I64Eq => translate_icmp(IntCC::Equal, builder, state),
968        Operator::F32Eq | Operator::F64Eq => translate_fcmp(FloatCC::Equal, builder, state),
969        Operator::I32Ne | Operator::I64Ne => translate_icmp(IntCC::NotEqual, builder, state),
970        Operator::F32Ne | Operator::F64Ne => translate_fcmp(FloatCC::NotEqual, builder, state),
971        Operator::F32Gt | Operator::F64Gt => translate_fcmp(FloatCC::GreaterThan, builder, state),
972        Operator::F32Ge | Operator::F64Ge => {
973            translate_fcmp(FloatCC::GreaterThanOrEqual, builder, state)
974        }
975        Operator::F32Lt | Operator::F64Lt => translate_fcmp(FloatCC::LessThan, builder, state),
976        Operator::F32Le | Operator::F64Le => {
977            translate_fcmp(FloatCC::LessThanOrEqual, builder, state)
978        }
979        Operator::RefNull => state.push1(builder.ins().null(environ.reference_type())),
980        Operator::RefIsNull => {
981            let arg = state.pop1();
982            let val = builder.ins().is_null(arg);
983            let val_int = builder.ins().bint(I32, val);
984            state.push1(val_int);
985        }
986        Operator::RefFunc { function_index } => {
987            state.push1(environ.translate_ref_func(builder.cursor(), *function_index)?);
988        }
989        Operator::AtomicNotify { .. }
990        | Operator::I32AtomicWait { .. }
991        | Operator::I64AtomicWait { .. }
992        | Operator::I32AtomicLoad { .. }
993        | Operator::I64AtomicLoad { .. }
994        | Operator::I32AtomicLoad8U { .. }
995        | Operator::I32AtomicLoad16U { .. }
996        | Operator::I64AtomicLoad8U { .. }
997        | Operator::I64AtomicLoad16U { .. }
998        | Operator::I64AtomicLoad32U { .. }
999        | Operator::I32AtomicStore { .. }
1000        | Operator::I64AtomicStore { .. }
1001        | Operator::I32AtomicStore8 { .. }
1002        | Operator::I32AtomicStore16 { .. }
1003        | Operator::I64AtomicStore8 { .. }
1004        | Operator::I64AtomicStore16 { .. }
1005        | Operator::I64AtomicStore32 { .. }
1006        | Operator::I32AtomicRmwAdd { .. }
1007        | Operator::I64AtomicRmwAdd { .. }
1008        | Operator::I32AtomicRmw8AddU { .. }
1009        | Operator::I32AtomicRmw16AddU { .. }
1010        | Operator::I64AtomicRmw8AddU { .. }
1011        | Operator::I64AtomicRmw16AddU { .. }
1012        | Operator::I64AtomicRmw32AddU { .. }
1013        | Operator::I32AtomicRmwSub { .. }
1014        | Operator::I64AtomicRmwSub { .. }
1015        | Operator::I32AtomicRmw8SubU { .. }
1016        | Operator::I32AtomicRmw16SubU { .. }
1017        | Operator::I64AtomicRmw8SubU { .. }
1018        | Operator::I64AtomicRmw16SubU { .. }
1019        | Operator::I64AtomicRmw32SubU { .. }
1020        | Operator::I32AtomicRmwAnd { .. }
1021        | Operator::I64AtomicRmwAnd { .. }
1022        | Operator::I32AtomicRmw8AndU { .. }
1023        | Operator::I32AtomicRmw16AndU { .. }
1024        | Operator::I64AtomicRmw8AndU { .. }
1025        | Operator::I64AtomicRmw16AndU { .. }
1026        | Operator::I64AtomicRmw32AndU { .. }
1027        | Operator::I32AtomicRmwOr { .. }
1028        | Operator::I64AtomicRmwOr { .. }
1029        | Operator::I32AtomicRmw8OrU { .. }
1030        | Operator::I32AtomicRmw16OrU { .. }
1031        | Operator::I64AtomicRmw8OrU { .. }
1032        | Operator::I64AtomicRmw16OrU { .. }
1033        | Operator::I64AtomicRmw32OrU { .. }
1034        | Operator::I32AtomicRmwXor { .. }
1035        | Operator::I64AtomicRmwXor { .. }
1036        | Operator::I32AtomicRmw8XorU { .. }
1037        | Operator::I32AtomicRmw16XorU { .. }
1038        | Operator::I64AtomicRmw8XorU { .. }
1039        | Operator::I64AtomicRmw16XorU { .. }
1040        | Operator::I64AtomicRmw32XorU { .. }
1041        | Operator::I32AtomicRmwXchg { .. }
1042        | Operator::I64AtomicRmwXchg { .. }
1043        | Operator::I32AtomicRmw8XchgU { .. }
1044        | Operator::I32AtomicRmw16XchgU { .. }
1045        | Operator::I64AtomicRmw8XchgU { .. }
1046        | Operator::I64AtomicRmw16XchgU { .. }
1047        | Operator::I64AtomicRmw32XchgU { .. }
1048        | Operator::I32AtomicRmwCmpxchg { .. }
1049        | Operator::I64AtomicRmwCmpxchg { .. }
1050        | Operator::I32AtomicRmw8CmpxchgU { .. }
1051        | Operator::I32AtomicRmw16CmpxchgU { .. }
1052        | Operator::I64AtomicRmw8CmpxchgU { .. }
1053        | Operator::I64AtomicRmw16CmpxchgU { .. }
1054        | Operator::I64AtomicRmw32CmpxchgU { .. }
1055        | Operator::AtomicFence { .. } => {
1056            return Err(wasm_unsupported!("proposed thread operator {:?}", op));
1057        }
1058        Operator::MemoryCopy => {
1059            // The WebAssembly MVP only supports one linear memory and
1060            // wasmparser will ensure that the memory indices specified are
1061            // zero.
1062            let heap_index = MemoryIndex::from_u32(0);
1063            let heap = state.get_heap(builder.func, 0, environ)?;
1064            let len = state.pop1();
1065            let src = state.pop1();
1066            let dest = state.pop1();
1067            environ.translate_memory_copy(builder.cursor(), heap_index, heap, dest, src, len)?;
1068        }
1069        Operator::MemoryFill => {
1070            // The WebAssembly MVP only supports one linear memory and
1071            // wasmparser will ensure that the memory index specified is
1072            // zero.
1073            let heap_index = MemoryIndex::from_u32(0);
1074            let heap = state.get_heap(builder.func, 0, environ)?;
1075            let len = state.pop1();
1076            let val = state.pop1();
1077            let dest = state.pop1();
1078            environ.translate_memory_fill(builder.cursor(), heap_index, heap, dest, val, len)?;
1079        }
1080        Operator::MemoryInit { segment } => {
1081            // The WebAssembly MVP only supports one linear memory and
1082            // wasmparser will ensure that the memory index specified is
1083            // zero.
1084            let heap_index = MemoryIndex::from_u32(0);
1085            let heap = state.get_heap(builder.func, 0, environ)?;
1086            let len = state.pop1();
1087            let src = state.pop1();
1088            let dest = state.pop1();
1089            environ.translate_memory_init(
1090                builder.cursor(),
1091                heap_index,
1092                heap,
1093                *segment,
1094                dest,
1095                src,
1096                len,
1097            )?;
1098        }
1099        Operator::DataDrop { segment } => {
1100            environ.translate_data_drop(builder.cursor(), *segment)?;
1101        }
1102        Operator::TableSize { table: index } => {
1103            let table = state.get_table(builder.func, *index, environ)?;
1104            state.push1(environ.translate_table_size(
1105                builder.cursor(),
1106                TableIndex::from_u32(*index),
1107                table,
1108            )?);
1109        }
1110        Operator::TableGrow { table } => {
1111            let delta = state.pop1();
1112            let init_value = state.pop1();
1113            state.push1(environ.translate_table_grow(
1114                builder.cursor(),
1115                *table,
1116                delta,
1117                init_value,
1118            )?);
1119        }
1120        Operator::TableGet { table } => {
1121            let index = state.pop1();
1122            state.push1(environ.translate_table_get(builder.cursor(), *table, index)?);
1123        }
1124        Operator::TableSet { table } => {
1125            let value = state.pop1();
1126            let index = state.pop1();
1127            environ.translate_table_set(builder.cursor(), *table, value, index)?;
1128        }
1129        Operator::TableCopy {
1130            dst_table: dst_table_index,
1131            src_table: src_table_index,
1132        } => {
1133            let dst_table = state.get_table(builder.func, *dst_table_index, environ)?;
1134            let src_table = state.get_table(builder.func, *src_table_index, environ)?;
1135            let len = state.pop1();
1136            let src = state.pop1();
1137            let dest = state.pop1();
1138            environ.translate_table_copy(
1139                builder.cursor(),
1140                TableIndex::from_u32(*dst_table_index),
1141                dst_table,
1142                TableIndex::from_u32(*src_table_index),
1143                src_table,
1144                dest,
1145                src,
1146                len,
1147            )?;
1148        }
1149        Operator::TableFill { table } => {
1150            let len = state.pop1();
1151            let val = state.pop1();
1152            let dest = state.pop1();
1153            environ.translate_table_fill(builder.cursor(), *table, dest, val, len)?;
1154        }
1155        Operator::TableInit {
1156            segment,
1157            table: table_index,
1158        } => {
1159            let table = state.get_table(builder.func, *table_index, environ)?;
1160            let len = state.pop1();
1161            let src = state.pop1();
1162            let dest = state.pop1();
1163            environ.translate_table_init(
1164                builder.cursor(),
1165                *segment,
1166                TableIndex::from_u32(*table_index),
1167                table,
1168                dest,
1169                src,
1170                len,
1171            )?;
1172        }
1173        Operator::ElemDrop { segment } => {
1174            environ.translate_elem_drop(builder.cursor(), *segment)?;
1175        }
1176        Operator::V128Const { value } => {
1177            let data = value.bytes().to_vec().into();
1178            let handle = builder.func.dfg.constants.insert(data);
1179            let value = builder.ins().vconst(I8X16, handle);
1180            // the v128.const is typed in CLIF as a I8x16 but raw_bitcast to a different type before use
1181            state.push1(value)
1182        }
1183        Operator::I8x16Splat | Operator::I16x8Splat => {
1184            let reduced = builder.ins().ireduce(type_of(op).lane_type(), state.pop1());
1185            let splatted = builder.ins().splat(type_of(op), reduced);
1186            state.push1(splatted)
1187        }
1188        Operator::I32x4Splat
1189        | Operator::I64x2Splat
1190        | Operator::F32x4Splat
1191        | Operator::F64x2Splat => {
1192            let splatted = builder.ins().splat(type_of(op), state.pop1());
1193            state.push1(splatted)
1194        }
1195        Operator::V8x16LoadSplat {
1196            memarg: MemoryImmediate { flags: _, offset },
1197        }
1198        | Operator::V16x8LoadSplat {
1199            memarg: MemoryImmediate { flags: _, offset },
1200        }
1201        | Operator::V32x4LoadSplat {
1202            memarg: MemoryImmediate { flags: _, offset },
1203        }
1204        | Operator::V64x2LoadSplat {
1205            memarg: MemoryImmediate { flags: _, offset },
1206        } => {
1207            // TODO: For spec compliance, this is initially implemented as a combination of `load +
1208            // splat` but could be implemented eventually as a single instruction (`load_splat`).
1209            // See https://github.com/bytecodealliance/cranelift/issues/1348.
1210            translate_load(
1211                *offset,
1212                ir::Opcode::Load,
1213                type_of(op).lane_type(),
1214                builder,
1215                state,
1216                environ,
1217            )?;
1218            let splatted = builder.ins().splat(type_of(op), state.pop1());
1219            state.push1(splatted)
1220        }
1221        Operator::I8x16ExtractLaneS { lane } | Operator::I16x8ExtractLaneS { lane } => {
1222            let vector = pop1_with_bitcast(state, type_of(op), builder);
1223            let extracted = builder.ins().extractlane(vector, lane.clone());
1224            state.push1(builder.ins().sextend(I32, extracted))
1225        }
1226        Operator::I8x16ExtractLaneU { lane } | Operator::I16x8ExtractLaneU { lane } => {
1227            let vector = pop1_with_bitcast(state, type_of(op), builder);
1228            state.push1(builder.ins().extractlane(vector, lane.clone()));
1229            // on x86, PEXTRB zeroes the upper bits of the destination register of extractlane so uextend is elided; of course, this depends on extractlane being legalized to a PEXTRB
1230        }
1231        Operator::I32x4ExtractLane { lane }
1232        | Operator::I64x2ExtractLane { lane }
1233        | Operator::F32x4ExtractLane { lane }
1234        | Operator::F64x2ExtractLane { lane } => {
1235            let vector = pop1_with_bitcast(state, type_of(op), builder);
1236            state.push1(builder.ins().extractlane(vector, lane.clone()))
1237        }
1238        Operator::I8x16ReplaceLane { lane }
1239        | Operator::I16x8ReplaceLane { lane }
1240        | Operator::I32x4ReplaceLane { lane }
1241        | Operator::I64x2ReplaceLane { lane }
1242        | Operator::F32x4ReplaceLane { lane }
1243        | Operator::F64x2ReplaceLane { lane } => {
1244            let (vector, replacement_value) = state.pop2();
1245            let original_vector_type = builder.func.dfg.value_type(vector);
1246            let vector = optionally_bitcast_vector(vector, type_of(op), builder);
1247            let replaced_vector = builder
1248                .ins()
1249                .insertlane(vector, lane.clone(), replacement_value);
1250            state.push1(optionally_bitcast_vector(
1251                replaced_vector,
1252                original_vector_type,
1253                builder,
1254            ))
1255        }
1256        Operator::V8x16Shuffle { lanes, .. } => {
1257            let (a, b) = pop2_with_bitcast(state, I8X16, builder);
1258            let lanes = ConstantData::from(lanes.as_ref());
1259            let mask = builder.func.dfg.immediates.push(lanes);
1260            let shuffled = builder.ins().shuffle(a, b, mask);
1261            state.push1(shuffled)
1262            // At this point the original types of a and b are lost; users of this value (i.e. this
1263            // WASM-to-CLIF translator) may need to raw_bitcast for type-correctness. This is due
1264            // to WASM using the less specific v128 type for certain operations and more specific
1265            // types (e.g. i8x16) for others.
1266        }
1267        Operator::I8x16Add | Operator::I16x8Add | Operator::I32x4Add | Operator::I64x2Add => {
1268            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1269            state.push1(builder.ins().iadd(a, b))
1270        }
1271        Operator::I8x16AddSaturateS | Operator::I16x8AddSaturateS => {
1272            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1273            state.push1(builder.ins().sadd_sat(a, b))
1274        }
1275        Operator::I8x16AddSaturateU | Operator::I16x8AddSaturateU => {
1276            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1277            state.push1(builder.ins().uadd_sat(a, b))
1278        }
1279        Operator::I8x16Sub | Operator::I16x8Sub | Operator::I32x4Sub | Operator::I64x2Sub => {
1280            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1281            state.push1(builder.ins().isub(a, b))
1282        }
1283        Operator::I8x16SubSaturateS | Operator::I16x8SubSaturateS => {
1284            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1285            state.push1(builder.ins().ssub_sat(a, b))
1286        }
1287        Operator::I8x16SubSaturateU | Operator::I16x8SubSaturateU => {
1288            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1289            state.push1(builder.ins().usub_sat(a, b))
1290        }
1291        Operator::I8x16MinS | Operator::I16x8MinS | Operator::I32x4MinS => {
1292            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1293            state.push1(builder.ins().imin(a, b))
1294        }
1295        Operator::I8x16MinU | Operator::I16x8MinU | Operator::I32x4MinU => {
1296            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1297            state.push1(builder.ins().umin(a, b))
1298        }
1299        Operator::I8x16MaxS | Operator::I16x8MaxS | Operator::I32x4MaxS => {
1300            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1301            state.push1(builder.ins().imax(a, b))
1302        }
1303        Operator::I8x16MaxU | Operator::I16x8MaxU | Operator::I32x4MaxU => {
1304            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1305            state.push1(builder.ins().umax(a, b))
1306        }
1307        Operator::I8x16RoundingAverageU | Operator::I16x8RoundingAverageU => {
1308            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1309            state.push1(builder.ins().avg_round(a, b))
1310        }
1311        Operator::I8x16Neg | Operator::I16x8Neg | Operator::I32x4Neg | Operator::I64x2Neg => {
1312            let a = pop1_with_bitcast(state, type_of(op), builder);
1313            state.push1(builder.ins().ineg(a))
1314        }
1315        Operator::I16x8Mul | Operator::I32x4Mul => {
1316            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1317            state.push1(builder.ins().imul(a, b))
1318        }
1319        Operator::V128AndNot => {
1320            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1321            state.push1(builder.ins().band_not(a, b))
1322        }
1323        Operator::V128Not => {
1324            let a = state.pop1();
1325            state.push1(builder.ins().bnot(a));
1326        }
1327        Operator::I16x8Shl | Operator::I32x4Shl | Operator::I64x2Shl => {
1328            let (a, b) = state.pop2();
1329            let bitcast_a = optionally_bitcast_vector(a, type_of(op), builder);
1330            let bitwidth = i64::from(builder.func.dfg.value_type(a).bits());
1331            // The spec expects to shift with `b mod lanewidth`; so, e.g., for 16 bit lane-width
1332            // we do `b AND 15`; this means fewer instructions than `iconst + urem`.
1333            let b_mod_bitwidth = builder.ins().band_imm(b, bitwidth - 1);
1334            state.push1(builder.ins().ishl(bitcast_a, b_mod_bitwidth))
1335        }
1336        Operator::I16x8ShrU | Operator::I32x4ShrU | Operator::I64x2ShrU => {
1337            let (a, b) = state.pop2();
1338            let bitcast_a = optionally_bitcast_vector(a, type_of(op), builder);
1339            let bitwidth = i64::from(builder.func.dfg.value_type(a).bits());
1340            // The spec expects to shift with `b mod lanewidth`; so, e.g., for 16 bit lane-width
1341            // we do `b AND 15`; this means fewer instructions than `iconst + urem`.
1342            let b_mod_bitwidth = builder.ins().band_imm(b, bitwidth - 1);
1343            state.push1(builder.ins().ushr(bitcast_a, b_mod_bitwidth))
1344        }
1345        Operator::I16x8ShrS | Operator::I32x4ShrS => {
1346            let (a, b) = state.pop2();
1347            let bitcast_a = optionally_bitcast_vector(a, type_of(op), builder);
1348            let bitwidth = i64::from(builder.func.dfg.value_type(a).bits());
1349            // The spec expects to shift with `b mod lanewidth`; so, e.g., for 16 bit lane-width
1350            // we do `b AND 15`; this means fewer instructions than `iconst + urem`.
1351            let b_mod_bitwidth = builder.ins().band_imm(b, bitwidth - 1);
1352            state.push1(builder.ins().sshr(bitcast_a, b_mod_bitwidth))
1353        }
1354        Operator::V128Bitselect => {
1355            let (a, b, c) = state.pop3();
1356            let bitcast_a = optionally_bitcast_vector(a, I8X16, builder);
1357            let bitcast_b = optionally_bitcast_vector(b, I8X16, builder);
1358            let bitcast_c = optionally_bitcast_vector(c, I8X16, builder);
1359            // The CLIF operand ordering is slightly different and the types of all three
1360            // operands must match (hence the bitcast).
1361            state.push1(builder.ins().bitselect(bitcast_c, bitcast_a, bitcast_b))
1362        }
1363        Operator::I8x16AnyTrue
1364        | Operator::I16x8AnyTrue
1365        | Operator::I32x4AnyTrue
1366        | Operator::I64x2AnyTrue => {
1367            let a = pop1_with_bitcast(state, type_of(op), builder);
1368            let bool_result = builder.ins().vany_true(a);
1369            state.push1(builder.ins().bint(I32, bool_result))
1370        }
1371        Operator::I8x16AllTrue
1372        | Operator::I16x8AllTrue
1373        | Operator::I32x4AllTrue
1374        | Operator::I64x2AllTrue => {
1375            let a = pop1_with_bitcast(state, type_of(op), builder);
1376            let bool_result = builder.ins().vall_true(a);
1377            state.push1(builder.ins().bint(I32, bool_result))
1378        }
1379        Operator::I8x16Eq | Operator::I16x8Eq | Operator::I32x4Eq => {
1380            translate_vector_icmp(IntCC::Equal, type_of(op), builder, state)
1381        }
1382        Operator::I8x16Ne | Operator::I16x8Ne | Operator::I32x4Ne => {
1383            translate_vector_icmp(IntCC::NotEqual, type_of(op), builder, state)
1384        }
1385        Operator::I8x16GtS | Operator::I16x8GtS | Operator::I32x4GtS => {
1386            translate_vector_icmp(IntCC::SignedGreaterThan, type_of(op), builder, state)
1387        }
1388        Operator::I8x16LtS | Operator::I16x8LtS | Operator::I32x4LtS => {
1389            translate_vector_icmp(IntCC::SignedLessThan, type_of(op), builder, state)
1390        }
1391        Operator::I8x16GtU | Operator::I16x8GtU | Operator::I32x4GtU => {
1392            translate_vector_icmp(IntCC::UnsignedGreaterThan, type_of(op), builder, state)
1393        }
1394        Operator::I8x16LtU | Operator::I16x8LtU | Operator::I32x4LtU => {
1395            translate_vector_icmp(IntCC::UnsignedLessThan, type_of(op), builder, state)
1396        }
1397        Operator::I8x16GeS | Operator::I16x8GeS | Operator::I32x4GeS => {
1398            translate_vector_icmp(IntCC::SignedGreaterThanOrEqual, type_of(op), builder, state)
1399        }
1400        Operator::I8x16LeS | Operator::I16x8LeS | Operator::I32x4LeS => {
1401            translate_vector_icmp(IntCC::SignedLessThanOrEqual, type_of(op), builder, state)
1402        }
1403        Operator::I8x16GeU | Operator::I16x8GeU | Operator::I32x4GeU => translate_vector_icmp(
1404            IntCC::UnsignedGreaterThanOrEqual,
1405            type_of(op),
1406            builder,
1407            state,
1408        ),
1409        Operator::I8x16LeU | Operator::I16x8LeU | Operator::I32x4LeU => {
1410            translate_vector_icmp(IntCC::UnsignedLessThanOrEqual, type_of(op), builder, state)
1411        }
1412        Operator::F32x4Eq | Operator::F64x2Eq => {
1413            translate_vector_fcmp(FloatCC::Equal, type_of(op), builder, state)
1414        }
1415        Operator::F32x4Ne | Operator::F64x2Ne => {
1416            translate_vector_fcmp(FloatCC::NotEqual, type_of(op), builder, state)
1417        }
1418        Operator::F32x4Lt | Operator::F64x2Lt => {
1419            translate_vector_fcmp(FloatCC::LessThan, type_of(op), builder, state)
1420        }
1421        Operator::F32x4Gt | Operator::F64x2Gt => {
1422            translate_vector_fcmp(FloatCC::GreaterThan, type_of(op), builder, state)
1423        }
1424        Operator::F32x4Le | Operator::F64x2Le => {
1425            translate_vector_fcmp(FloatCC::LessThanOrEqual, type_of(op), builder, state)
1426        }
1427        Operator::F32x4Ge | Operator::F64x2Ge => {
1428            translate_vector_fcmp(FloatCC::GreaterThanOrEqual, type_of(op), builder, state)
1429        }
1430        Operator::F32x4Add | Operator::F64x2Add => {
1431            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1432            state.push1(builder.ins().fadd(a, b))
1433        }
1434        Operator::F32x4Sub | Operator::F64x2Sub => {
1435            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1436            state.push1(builder.ins().fsub(a, b))
1437        }
1438        Operator::F32x4Mul | Operator::F64x2Mul => {
1439            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1440            state.push1(builder.ins().fmul(a, b))
1441        }
1442        Operator::F32x4Div | Operator::F64x2Div => {
1443            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1444            state.push1(builder.ins().fdiv(a, b))
1445        }
1446        Operator::F32x4Max | Operator::F64x2Max => {
1447            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1448            state.push1(builder.ins().fmax(a, b))
1449        }
1450        Operator::F32x4Min | Operator::F64x2Min => {
1451            let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
1452            state.push1(builder.ins().fmin(a, b))
1453        }
1454        Operator::F32x4Sqrt | Operator::F64x2Sqrt => {
1455            let a = pop1_with_bitcast(state, type_of(op), builder);
1456            state.push1(builder.ins().sqrt(a))
1457        }
1458        Operator::F32x4Neg | Operator::F64x2Neg => {
1459            let a = pop1_with_bitcast(state, type_of(op), builder);
1460            state.push1(builder.ins().fneg(a))
1461        }
1462        Operator::F32x4Abs | Operator::F64x2Abs => {
1463            let a = pop1_with_bitcast(state, type_of(op), builder);
1464            state.push1(builder.ins().fabs(a))
1465        }
1466        Operator::I8x16Shl
1467        | Operator::I8x16ShrS
1468        | Operator::I8x16ShrU
1469        | Operator::I8x16Mul
1470        | Operator::I64x2Mul
1471        | Operator::I64x2ShrS
1472        | Operator::I32x4TruncSatF32x4S
1473        | Operator::I32x4TruncSatF32x4U
1474        | Operator::I64x2TruncSatF64x2S
1475        | Operator::I64x2TruncSatF64x2U
1476        | Operator::F32x4ConvertI32x4S
1477        | Operator::F32x4ConvertI32x4U
1478        | Operator::F64x2ConvertI64x2S
1479        | Operator::F64x2ConvertI64x2U { .. }
1480        | Operator::I8x16NarrowI16x8S { .. }
1481        | Operator::I8x16NarrowI16x8U { .. }
1482        | Operator::I16x8NarrowI32x4S { .. }
1483        | Operator::I16x8NarrowI32x4U { .. }
1484        | Operator::I16x8WidenLowI8x16S { .. }
1485        | Operator::I16x8WidenHighI8x16S { .. }
1486        | Operator::I16x8WidenLowI8x16U { .. }
1487        | Operator::I16x8WidenHighI8x16U { .. }
1488        | Operator::I32x4WidenLowI16x8S { .. }
1489        | Operator::I32x4WidenHighI16x8S { .. }
1490        | Operator::I32x4WidenLowI16x8U { .. }
1491        | Operator::I32x4WidenHighI16x8U { .. }
1492        | Operator::V8x16Swizzle
1493        | Operator::I16x8Load8x8S { .. }
1494        | Operator::I16x8Load8x8U { .. }
1495        | Operator::I32x4Load16x4S { .. }
1496        | Operator::I32x4Load16x4U { .. }
1497        | Operator::I64x2Load32x2S { .. }
1498        | Operator::I64x2Load32x2U { .. } => {
1499            return Err(wasm_unsupported!("proposed SIMD operator {:?}", op));
1500        }
1501    };
1502    Ok(())
1503}
1504
1505// Clippy warns us of some fields we are deliberately ignoring
1506#[cfg_attr(feature = "cargo-clippy", allow(clippy::unneeded_field_pattern))]
1507/// Deals with a Wasm instruction located in an unreachable portion of the code. Most of them
1508/// are dropped but special ones like `End` or `Else` signal the potential end of the unreachable
1509/// portion so the translation state must be updated accordingly.
1510fn translate_unreachable_operator<FE: FuncEnvironment + ?Sized>(
1511    module_translation_state: &ModuleTranslationState,
1512    op: &Operator,
1513    builder: &mut FunctionBuilder,
1514    state: &mut FuncTranslationState,
1515    environ: &mut FE,
1516) -> WasmResult<()> {
1517    debug_assert!(!state.reachable);
1518    match *op {
1519        Operator::If { ty } => {
1520            // Push a placeholder control stack entry. The if isn't reachable,
1521            // so we don't have any branches anywhere.
1522            state.push_if(
1523                ir::Block::reserved_value(),
1524                ElseData::NoElse {
1525                    branch_inst: ir::Inst::reserved_value(),
1526                },
1527                0,
1528                0,
1529                ty,
1530            );
1531        }
1532        Operator::Loop { ty: _ } | Operator::Block { ty: _ } => {
1533            state.push_block(ir::Block::reserved_value(), 0, 0);
1534        }
1535        Operator::Else => {
1536            let i = state.control_stack.len() - 1;
1537            match state.control_stack[i] {
1538                ControlStackFrame::If {
1539                    ref else_data,
1540                    head_is_reachable,
1541                    ref mut consequent_ends_reachable,
1542                    blocktype,
1543                    ..
1544                } => {
1545                    debug_assert!(consequent_ends_reachable.is_none());
1546                    *consequent_ends_reachable = Some(state.reachable);
1547
1548                    if head_is_reachable {
1549                        // We have a branch from the head of the `if` to the `else`.
1550                        state.reachable = true;
1551
1552                        let else_block = match *else_data {
1553                            ElseData::NoElse { branch_inst } => {
1554                                let (params, _results) =
1555                                    blocktype_params_results(module_translation_state, blocktype)?;
1556                                let else_block = block_with_params(builder, params, environ)?;
1557
1558                                // We change the target of the branch instruction.
1559                                builder.change_jump_destination(branch_inst, else_block);
1560                                builder.seal_block(else_block);
1561                                else_block
1562                            }
1563                            ElseData::WithElse { else_block } => else_block,
1564                        };
1565
1566                        builder.switch_to_block(else_block);
1567
1568                        // Again, no need to push the parameters for the `else`,
1569                        // since we already did when we saw the original `if`. See
1570                        // the comment for translating `Operator::Else` in
1571                        // `translate_operator` for details.
1572                    }
1573                }
1574                _ => unreachable!(),
1575            }
1576        }
1577        Operator::End => {
1578            let stack = &mut state.stack;
1579            let control_stack = &mut state.control_stack;
1580            let frame = control_stack.pop().unwrap();
1581
1582            // Now we have to split off the stack the values not used
1583            // by unreachable code that hasn't been translated
1584            stack.truncate(frame.original_stack_size());
1585
1586            let reachable_anyway = match frame {
1587                // If it is a loop we also have to seal the body loop block
1588                ControlStackFrame::Loop { header, .. } => {
1589                    builder.seal_block(header);
1590                    // And loops can't have branches to the end.
1591                    false
1592                }
1593                // If we never set `consequent_ends_reachable` then that means
1594                // we are finishing the consequent now, and there was no
1595                // `else`. Whether the following block is reachable depends only
1596                // on if the head was reachable.
1597                ControlStackFrame::If {
1598                    head_is_reachable,
1599                    consequent_ends_reachable: None,
1600                    ..
1601                } => head_is_reachable,
1602                // Since we are only in this function when in unreachable code,
1603                // we know that the alternative just ended unreachable. Whether
1604                // the following block is reachable depends on if the consequent
1605                // ended reachable or not.
1606                ControlStackFrame::If {
1607                    head_is_reachable,
1608                    consequent_ends_reachable: Some(consequent_ends_reachable),
1609                    ..
1610                } => head_is_reachable && consequent_ends_reachable,
1611                // All other control constructs are already handled.
1612                _ => false,
1613            };
1614
1615            if frame.exit_is_branched_to() || reachable_anyway {
1616                builder.switch_to_block(frame.following_code());
1617                builder.seal_block(frame.following_code());
1618
1619                // And add the return values of the block but only if the next block is reachable
1620                // (which corresponds to testing if the stack depth is 1)
1621                stack.extend_from_slice(builder.block_params(frame.following_code()));
1622                state.reachable = true;
1623            }
1624        }
1625        _ => {
1626            // We don't translate because this is unreachable code
1627        }
1628    }
1629
1630    Ok(())
1631}
1632
1633/// Get the address+offset to use for a heap access.
1634fn get_heap_addr(
1635    heap: ir::Heap,
1636    addr32: ir::Value,
1637    offset: u32,
1638    addr_ty: Type,
1639    builder: &mut FunctionBuilder,
1640) -> (ir::Value, i32) {
1641    use core::cmp::min;
1642
1643    let mut adjusted_offset = u64::from(offset);
1644    let offset_guard_size: u64 = builder.func.heaps[heap].offset_guard_size.into();
1645
1646    // Generate `heap_addr` instructions that are friendly to CSE by checking offsets that are
1647    // multiples of the offset-guard size. Add one to make sure that we check the pointer itself
1648    // is in bounds.
1649    if offset_guard_size != 0 {
1650        adjusted_offset = adjusted_offset / offset_guard_size * offset_guard_size;
1651    }
1652
1653    // For accesses on the outer skirts of the offset-guard pages, we expect that we get a trap
1654    // even if the access goes beyond the offset-guard pages. This is because the first byte
1655    // pointed to is inside the offset-guard pages.
1656    let check_size = min(u64::from(u32::MAX), 1 + adjusted_offset) as u32;
1657    let base = builder.ins().heap_addr(addr_ty, heap, addr32, check_size);
1658
1659    // Native load/store instructions take a signed `Offset32` immediate, so adjust the base
1660    // pointer if necessary.
1661    if offset > i32::MAX as u32 {
1662        // Offset doesn't fit in the load/store instruction.
1663        let adj = builder.ins().iadd_imm(base, i64::from(i32::MAX) + 1);
1664        (adj, (offset - (i32::MAX as u32 + 1)) as i32)
1665    } else {
1666        (base, offset as i32)
1667    }
1668}
1669
1670/// Translate a load instruction.
1671fn translate_load<FE: FuncEnvironment + ?Sized>(
1672    offset: u32,
1673    opcode: ir::Opcode,
1674    result_ty: Type,
1675    builder: &mut FunctionBuilder,
1676    state: &mut FuncTranslationState,
1677    environ: &mut FE,
1678) -> WasmResult<()> {
1679    let addr32 = state.pop1();
1680    // We don't yet support multiple linear memories.
1681    let heap = state.get_heap(builder.func, 0, environ)?;
1682    let (base, offset) = get_heap_addr(heap, addr32, offset, environ.pointer_type(), builder);
1683    // Note that we don't set `is_aligned` here, even if the load instruction's
1684    // alignment immediate says it's aligned, because WebAssembly's immediate
1685    // field is just a hint, while Cranelift's aligned flag needs a guarantee.
1686    let flags = MemFlags::new();
1687    let (load, dfg) = builder
1688        .ins()
1689        .Load(opcode, result_ty, flags, offset.into(), base);
1690    state.push1(dfg.first_result(load));
1691    Ok(())
1692}
1693
1694/// Translate a store instruction.
1695fn translate_store<FE: FuncEnvironment + ?Sized>(
1696    offset: u32,
1697    opcode: ir::Opcode,
1698    builder: &mut FunctionBuilder,
1699    state: &mut FuncTranslationState,
1700    environ: &mut FE,
1701) -> WasmResult<()> {
1702    let (addr32, val) = state.pop2();
1703    let val_ty = builder.func.dfg.value_type(val);
1704
1705    // We don't yet support multiple linear memories.
1706    let heap = state.get_heap(builder.func, 0, environ)?;
1707    let (base, offset) = get_heap_addr(heap, addr32, offset, environ.pointer_type(), builder);
1708    // See the comments in `translate_load` about the flags.
1709    let flags = MemFlags::new();
1710    builder
1711        .ins()
1712        .Store(opcode, val_ty, flags, offset.into(), val, base);
1713    Ok(())
1714}
1715
1716fn translate_icmp(cc: IntCC, builder: &mut FunctionBuilder, state: &mut FuncTranslationState) {
1717    let (arg0, arg1) = state.pop2();
1718    let val = builder.ins().icmp(cc, arg0, arg1);
1719    state.push1(builder.ins().bint(I32, val));
1720}
1721
1722fn translate_vector_icmp(
1723    cc: IntCC,
1724    needed_type: Type,
1725    builder: &mut FunctionBuilder,
1726    state: &mut FuncTranslationState,
1727) {
1728    let (a, b) = state.pop2();
1729    let bitcast_a = optionally_bitcast_vector(a, needed_type, builder);
1730    let bitcast_b = optionally_bitcast_vector(b, needed_type, builder);
1731    state.push1(builder.ins().icmp(cc, bitcast_a, bitcast_b))
1732}
1733
1734fn translate_fcmp(cc: FloatCC, builder: &mut FunctionBuilder, state: &mut FuncTranslationState) {
1735    let (arg0, arg1) = state.pop2();
1736    let val = builder.ins().fcmp(cc, arg0, arg1);
1737    state.push1(builder.ins().bint(I32, val));
1738}
1739
1740fn translate_vector_fcmp(
1741    cc: FloatCC,
1742    needed_type: Type,
1743    builder: &mut FunctionBuilder,
1744    state: &mut FuncTranslationState,
1745) {
1746    let (a, b) = state.pop2();
1747    let bitcast_a = optionally_bitcast_vector(a, needed_type, builder);
1748    let bitcast_b = optionally_bitcast_vector(b, needed_type, builder);
1749    state.push1(builder.ins().fcmp(cc, bitcast_a, bitcast_b))
1750}
1751
1752fn translate_br_if(
1753    relative_depth: u32,
1754    builder: &mut FunctionBuilder,
1755    state: &mut FuncTranslationState,
1756) {
1757    let val = state.pop1();
1758    let (br_destination, inputs) = translate_br_if_args(relative_depth, state);
1759
1760    // Bitcast any vector arguments to their default type, I8X16, before jumping.
1761    let destination_types = builder.func.dfg.block_param_types(br_destination);
1762    bitcast_arguments(inputs, &destination_types[..inputs.len()], builder);
1763
1764    builder.ins().brnz(val, br_destination, inputs);
1765
1766    let next_block = builder.create_block();
1767    builder.ins().jump(next_block, &[]);
1768    builder.seal_block(next_block); // The only predecessor is the current block.
1769    builder.switch_to_block(next_block);
1770}
1771
1772fn translate_br_if_args(
1773    relative_depth: u32,
1774    state: &mut FuncTranslationState,
1775) -> (ir::Block, &mut [ir::Value]) {
1776    let i = state.control_stack.len() - 1 - (relative_depth as usize);
1777    let (return_count, br_destination) = {
1778        let frame = &mut state.control_stack[i];
1779        // The values returned by the branch are still available for the reachable
1780        // code that comes after it
1781        frame.set_branched_to_exit();
1782        let return_count = if frame.is_loop() {
1783            frame.num_param_values()
1784        } else {
1785            frame.num_return_values()
1786        };
1787        (return_count, frame.br_destination())
1788    };
1789    let inputs = state.peekn_mut(return_count);
1790    (br_destination, inputs)
1791}
1792
1793/// Determine the returned value type of a WebAssembly operator
1794fn type_of(operator: &Operator) -> Type {
1795    match operator {
1796        Operator::V128Load { .. }
1797        | Operator::V128Store { .. }
1798        | Operator::V128Const { .. }
1799        | Operator::V128Not
1800        | Operator::V128And
1801        | Operator::V128AndNot
1802        | Operator::V128Or
1803        | Operator::V128Xor
1804        | Operator::V128Bitselect => I8X16, // default type representing V128
1805
1806        Operator::V8x16Shuffle { .. }
1807        | Operator::I8x16Splat
1808        | Operator::V8x16LoadSplat { .. }
1809        | Operator::I8x16ExtractLaneS { .. }
1810        | Operator::I8x16ExtractLaneU { .. }
1811        | Operator::I8x16ReplaceLane { .. }
1812        | Operator::I8x16Eq
1813        | Operator::I8x16Ne
1814        | Operator::I8x16LtS
1815        | Operator::I8x16LtU
1816        | Operator::I8x16GtS
1817        | Operator::I8x16GtU
1818        | Operator::I8x16LeS
1819        | Operator::I8x16LeU
1820        | Operator::I8x16GeS
1821        | Operator::I8x16GeU
1822        | Operator::I8x16Neg
1823        | Operator::I8x16AnyTrue
1824        | Operator::I8x16AllTrue
1825        | Operator::I8x16Shl
1826        | Operator::I8x16ShrS
1827        | Operator::I8x16ShrU
1828        | Operator::I8x16Add
1829        | Operator::I8x16AddSaturateS
1830        | Operator::I8x16AddSaturateU
1831        | Operator::I8x16Sub
1832        | Operator::I8x16SubSaturateS
1833        | Operator::I8x16SubSaturateU
1834        | Operator::I8x16MinS
1835        | Operator::I8x16MinU
1836        | Operator::I8x16MaxS
1837        | Operator::I8x16MaxU
1838        | Operator::I8x16RoundingAverageU
1839        | Operator::I8x16Mul => I8X16,
1840
1841        Operator::I16x8Splat
1842        | Operator::V16x8LoadSplat { .. }
1843        | Operator::I16x8ExtractLaneS { .. }
1844        | Operator::I16x8ExtractLaneU { .. }
1845        | Operator::I16x8ReplaceLane { .. }
1846        | Operator::I16x8Eq
1847        | Operator::I16x8Ne
1848        | Operator::I16x8LtS
1849        | Operator::I16x8LtU
1850        | Operator::I16x8GtS
1851        | Operator::I16x8GtU
1852        | Operator::I16x8LeS
1853        | Operator::I16x8LeU
1854        | Operator::I16x8GeS
1855        | Operator::I16x8GeU
1856        | Operator::I16x8Neg
1857        | Operator::I16x8AnyTrue
1858        | Operator::I16x8AllTrue
1859        | Operator::I16x8Shl
1860        | Operator::I16x8ShrS
1861        | Operator::I16x8ShrU
1862        | Operator::I16x8Add
1863        | Operator::I16x8AddSaturateS
1864        | Operator::I16x8AddSaturateU
1865        | Operator::I16x8Sub
1866        | Operator::I16x8SubSaturateS
1867        | Operator::I16x8SubSaturateU
1868        | Operator::I16x8MinS
1869        | Operator::I16x8MinU
1870        | Operator::I16x8MaxS
1871        | Operator::I16x8MaxU
1872        | Operator::I16x8RoundingAverageU
1873        | Operator::I16x8Mul => I16X8,
1874
1875        Operator::I32x4Splat
1876        | Operator::V32x4LoadSplat { .. }
1877        | Operator::I32x4ExtractLane { .. }
1878        | Operator::I32x4ReplaceLane { .. }
1879        | Operator::I32x4Eq
1880        | Operator::I32x4Ne
1881        | Operator::I32x4LtS
1882        | Operator::I32x4LtU
1883        | Operator::I32x4GtS
1884        | Operator::I32x4GtU
1885        | Operator::I32x4LeS
1886        | Operator::I32x4LeU
1887        | Operator::I32x4GeS
1888        | Operator::I32x4GeU
1889        | Operator::I32x4Neg
1890        | Operator::I32x4AnyTrue
1891        | Operator::I32x4AllTrue
1892        | Operator::I32x4Shl
1893        | Operator::I32x4ShrS
1894        | Operator::I32x4ShrU
1895        | Operator::I32x4Add
1896        | Operator::I32x4Sub
1897        | Operator::I32x4Mul
1898        | Operator::I32x4MinS
1899        | Operator::I32x4MinU
1900        | Operator::I32x4MaxS
1901        | Operator::I32x4MaxU
1902        | Operator::F32x4ConvertI32x4S
1903        | Operator::F32x4ConvertI32x4U => I32X4,
1904
1905        Operator::I64x2Splat
1906        | Operator::V64x2LoadSplat { .. }
1907        | Operator::I64x2ExtractLane { .. }
1908        | Operator::I64x2ReplaceLane { .. }
1909        | Operator::I64x2Neg
1910        | Operator::I64x2AnyTrue
1911        | Operator::I64x2AllTrue
1912        | Operator::I64x2Shl
1913        | Operator::I64x2ShrS
1914        | Operator::I64x2ShrU
1915        | Operator::I64x2Add
1916        | Operator::I64x2Sub
1917        | Operator::F64x2ConvertI64x2S
1918        | Operator::F64x2ConvertI64x2U => I64X2,
1919
1920        Operator::F32x4Splat
1921        | Operator::F32x4ExtractLane { .. }
1922        | Operator::F32x4ReplaceLane { .. }
1923        | Operator::F32x4Eq
1924        | Operator::F32x4Ne
1925        | Operator::F32x4Lt
1926        | Operator::F32x4Gt
1927        | Operator::F32x4Le
1928        | Operator::F32x4Ge
1929        | Operator::F32x4Abs
1930        | Operator::F32x4Neg
1931        | Operator::F32x4Sqrt
1932        | Operator::F32x4Add
1933        | Operator::F32x4Sub
1934        | Operator::F32x4Mul
1935        | Operator::F32x4Div
1936        | Operator::F32x4Min
1937        | Operator::F32x4Max
1938        | Operator::I32x4TruncSatF32x4S
1939        | Operator::I32x4TruncSatF32x4U => F32X4,
1940
1941        Operator::F64x2Splat
1942        | Operator::F64x2ExtractLane { .. }
1943        | Operator::F64x2ReplaceLane { .. }
1944        | Operator::F64x2Eq
1945        | Operator::F64x2Ne
1946        | Operator::F64x2Lt
1947        | Operator::F64x2Gt
1948        | Operator::F64x2Le
1949        | Operator::F64x2Ge
1950        | Operator::F64x2Abs
1951        | Operator::F64x2Neg
1952        | Operator::F64x2Sqrt
1953        | Operator::F64x2Add
1954        | Operator::F64x2Sub
1955        | Operator::F64x2Mul
1956        | Operator::F64x2Div
1957        | Operator::F64x2Min
1958        | Operator::F64x2Max
1959        | Operator::I64x2TruncSatF64x2S
1960        | Operator::I64x2TruncSatF64x2U => F64X2,
1961
1962        _ => unimplemented!(
1963            "Currently only SIMD instructions are mapped to their return type; the \
1964             following instruction is not mapped: {:?}",
1965            operator
1966        ),
1967    }
1968}
1969
1970/// Some SIMD operations only operate on I8X16 in CLIF; this will convert them to that type by
1971/// adding a raw_bitcast if necessary.
1972pub fn optionally_bitcast_vector(
1973    value: Value,
1974    needed_type: Type,
1975    builder: &mut FunctionBuilder,
1976) -> Value {
1977    if builder.func.dfg.value_type(value) != needed_type {
1978        builder.ins().raw_bitcast(needed_type, value)
1979    } else {
1980        value
1981    }
1982}
1983
1984/// A helper for popping and bitcasting a single value; since SIMD values can lose their type by
1985/// using v128 (i.e. CLIF's I8x16) we must re-type the values using a bitcast to avoid CLIF
1986/// typing issues.
1987fn pop1_with_bitcast(
1988    state: &mut FuncTranslationState,
1989    needed_type: Type,
1990    builder: &mut FunctionBuilder,
1991) -> Value {
1992    optionally_bitcast_vector(state.pop1(), needed_type, builder)
1993}
1994
1995/// A helper for popping and bitcasting two values; since SIMD values can lose their type by
1996/// using v128 (i.e. CLIF's I8x16) we must re-type the values using a bitcast to avoid CLIF
1997/// typing issues.
1998fn pop2_with_bitcast(
1999    state: &mut FuncTranslationState,
2000    needed_type: Type,
2001    builder: &mut FunctionBuilder,
2002) -> (Value, Value) {
2003    let (a, b) = state.pop2();
2004    let bitcast_a = optionally_bitcast_vector(a, needed_type, builder);
2005    let bitcast_b = optionally_bitcast_vector(b, needed_type, builder);
2006    (bitcast_a, bitcast_b)
2007}
2008
2009/// A helper for bitcasting a sequence of values (e.g. function arguments). If a value is a
2010/// vector type that does not match its expected type, this will modify the value in place to point
2011/// to the result of a `raw_bitcast`. This conversion is necessary to translate Wasm code that
2012/// uses `V128` as function parameters (or implicitly in block parameters) and still use specific
2013/// CLIF types (e.g. `I32X4`) in the function body.
2014pub fn bitcast_arguments(
2015    arguments: &mut [Value],
2016    expected_types: &[Type],
2017    builder: &mut FunctionBuilder,
2018) {
2019    assert_eq!(arguments.len(), expected_types.len());
2020    for (i, t) in expected_types.iter().enumerate() {
2021        if t.is_vector() {
2022            assert!(
2023                builder.func.dfg.value_type(arguments[i]).is_vector(),
2024                "unexpected type mismatch: expected {}, argument {} was actually of type {}",
2025                t,
2026                arguments[i],
2027                builder.func.dfg.value_type(arguments[i])
2028            );
2029            arguments[i] = optionally_bitcast_vector(arguments[i], *t, builder)
2030        }
2031    }
2032}
2033
2034/// A helper to extract all the `Type` listings of each variable in `params`
2035/// for only parameters the return true for `is_wasm`, typically paired with
2036/// `is_wasm_return` or `is_wasm_parameter`.
2037pub fn wasm_param_types(params: &[ir::AbiParam], is_wasm: impl Fn(usize) -> bool) -> Vec<Type> {
2038    let mut ret = Vec::with_capacity(params.len());
2039    for (i, param) in params.iter().enumerate() {
2040        if is_wasm(i) {
2041            ret.push(param.value_type);
2042        }
2043    }
2044    ret
2045}