concordium_wasm/
machine.rs

1// TODO:
2// - Read all data from instructions at once (e.g., three locals).
3// - GlobalGet can be short circuited if we store all constants in the module in
4//   one place together with globals.
5
6//! An implementation of the abstract machine that can run artifacts.
7//! This module defines types related to code execution. The functions to run
8//! code are defined as methods on the [`Artifact`] type, e.g.,
9//! [`Artifact::run`].
10
11use crate::{
12    artifact::{StackValue, *},
13    constants::{MAX_NUM_PAGES, PAGE_SIZE},
14    types::*,
15};
16use anyhow::{anyhow, bail, ensure};
17use std::{convert::TryInto, io::Write};
18
19#[cfg(not(target_endian = "little"))]
20compile_error!("The intepreter only supports little endian platforms.");
21
22/// An empty type used when no interrupt is possible by a host function call.
23#[derive(Debug, Copy, Clone)]
24pub enum NoInterrupt {}
25
26/// The host that can process calls to external, host, functions.
27/// This is a Wasm concept. Wasm modules are self-contained and instructions can
28/// only modify the Wasm memory and stack and cannot access information about
29/// the external world, such as current time for example. Host functions fill
30/// that role.
31pub trait Host<I> {
32    type Interrupt;
33    /// Charge the given amount of energy for the initial memory.
34    /// The argument is the number of pages.
35    fn tick_initial_memory(&mut self, num_pages: u32) -> RunResult<()>;
36    /// Call the specified host function, giving it access to the current memory
37    /// and stack. The return value of `Ok(None)` signifies that execution
38    /// succeeded and the machine should proceeed, the return value of
39    /// `Ok(Some(i))` indicates that an interrupt was triggered by the host
40    /// function, and the return value of `Err(_)` signifies a trap, i.e., the
41    /// host function was called with illegal arguments.
42    ///
43    /// Interrupts are used by Concordium to execute inter-contract calls and
44    /// other similar operations. When a contract attempts to invoke another
45    /// contract an interrupt is triggered and an the invocation is handled,
46    /// along with any recursion. Execution of the original contract resumes
47    /// after handling the interrupt.
48    fn call(
49        &mut self,
50        f: &I,
51        memory: &mut [u8],
52        stack: &mut RuntimeStack,
53    ) -> RunResult<Option<Self::Interrupt>>;
54
55    /// Consume a given amount of NRG.
56    fn tick_energy(&mut self, _energy: u64) -> RunResult<()>;
57
58    /// Track a function call. This is called upon entry to a function. The
59    /// corresonding [`track_return`](Host::track_return) is called upon
60    /// return from a function. These two together can be used to track function
61    /// call stack depth. [`track_call`](Host::track_call) can return an `Err`
62    /// to indicate that a call stack depth was exceeded. This will lead to
63    /// immediate termination of execution.
64    fn track_call(&mut self) -> RunResult<()>;
65
66    /// Called when a function returns. See documentation of
67    /// [`track_call`](Host::track_call) for details.
68    fn track_return(&mut self);
69}
70
71/// Result of execution. Runtime exceptions are returned as `Err(_)`.
72/// This includes traps, illegal memory accesses, etc.
73pub type RunResult<A> = anyhow::Result<A>;
74
75/// Configuration that can be run. This maintains the snapshot of the state of
76/// the machine, such as the current instruction pointer, current memory
77/// contents, the function stack, etc.
78#[derive(Debug)]
79pub struct RunConfig {
80    /// Current value of the program counter.
81    pc:               usize,
82    /// Index of the current instruction list that we are executing
83    /// (instructions of the current function). Note that this is the index in
84    /// the list of defined functions. Imported functions do not count towards
85    /// it. It is assumed that this index points to a valid function in the
86    /// artifact's list of functions and the interpreter is subject to undefined
87    /// behaviour if this is not the case.
88    instructions_idx: usize,
89    /// Stack of function frames.
90    function_frames:  Vec<FunctionState>,
91    /// Location where the return value must be written in the locals array
92    /// **after** a return is called. If `None` the function has no return
93    /// value.
94    return_type:      Option<(usize, ValueType)>,
95    /// Current state of the memory.
96    memory:           Vec<u8>,
97    /// All the "locals". Including parameters, declared locals and temporary
98    /// ones.
99    locals_vec:       Vec<StackValue>,
100    /// Position where the locals for the current frame start.
101    locals_base:      usize,
102    /// Current values of globals.
103    globals:          Vec<StackValue>,
104    /// Configuration parameter, the maximum size of the memory execution is
105    /// allowed to allocate. This is fixed at startup and cannot be changed
106    /// during execution.
107    max_memory:       usize,
108    /// In case of an interrupt, upon resume there might be a return value
109    /// produced by the interrupt. This is the location in which the return
110    /// value is written. In case there is no return value for that
111    /// particular function this is set to a dummy location 0 and not used upon
112    /// resume. The type of the host function that is being called determines
113    /// which case we're in so we do not record the information here.
114    return_value_loc: usize,
115}
116
117impl RunConfig {
118    #[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
119    /// Push a value to the configuration's stack. This is typically used when
120    /// the interrupt produced a response.
121    pub fn push_value<F>(&mut self, f: F)
122    where
123        StackValue: From<F>, {
124        let v: StackValue = f.into();
125        self.locals_vec[self.locals_base + self.return_value_loc] = v;
126    }
127}
128
129#[derive(Debug)]
130/// A successful outcome of code execution.
131pub enum ExecutionOutcome<Interrupt> {
132    /// Execution was successful and the function terminated normally.
133    Success {
134        /// Result of execution of the function. If the function has unit result
135        /// type then the result is [`None`], otherwise it is the value.
136        result: Option<Value>,
137        /// Final memory of the machine.
138        memory: Vec<u8>,
139    },
140    /// Execution was interrupted in the given state. It can be resumed. There
141    /// is no resulting value since execution did not yet complete.
142    Interrupted {
143        /// The interrupt reason provided by the host function.
144        reason: Interrupt,
145        /// The current configuration that can be used to resume execution.
146        config: RunConfig,
147    },
148}
149
150#[derive(Debug)]
151/// State of a function recorded in the function frame stack.
152/// This records enough information to allow us to resume execution upon return
153/// from a nested function call.
154struct FunctionState {
155    /// The program counter relative to the instruction list.
156    pc:               usize,
157    /// Instructions of the function.
158    instructions_idx: usize,
159    /// Index in the stack where the locals start. We have a single stack for
160    /// the entire execution and after entering a function all the locals
161    /// are pushed on first (this includes function parameters).
162    locals_base:      usize,
163    /// Location where the return value must be written in the locals array
164    /// **after** a return is called, together with the return type of the
165    /// function. If `None` the function has no return value.
166    return_type:      Option<(usize, ValueType)>,
167}
168
169#[derive(Clone, Copy, Debug, Eq, PartialEq)]
170/// A Wasm typed value. The values are not inherently signed or unsigned,
171/// but we choose signed integers as the representation type.
172///
173/// This works well on any two's complement platform.
174pub enum Value {
175    I32(i32),
176    I64(i64),
177}
178
179impl From<Value> for ValueType {
180    fn from(v: Value) -> Self {
181        match v {
182            Value::I32(_) => ValueType::I32,
183            Value::I64(_) => ValueType::I64,
184        }
185    }
186}
187
188impl From<Value> for i64 {
189    fn from(v: Value) -> Self {
190        match v {
191            Value::I32(x) => i64::from(x),
192            Value::I64(x) => x,
193        }
194    }
195}
196
197/// A runtime stack. This contains both the stack in a function, as well as all
198/// the function parameters and locals of the function.
199#[derive(Debug)]
200pub struct RuntimeStack {
201    /// The vector containing the whole stack.
202    stack: Vec<StackValue>,
203}
204
205#[derive(Debug)]
206/// A runtime error that we impose on top of the Wasm spec.
207pub enum RuntimeError {
208    /// Calling an imported function directly is not supported.
209    DirectlyCallImport,
210}
211
212impl std::fmt::Display for RuntimeError {
213    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
214        match self {
215            RuntimeError::DirectlyCallImport => {
216                write!(f, "Calling an imported function directly is not supported.")
217            }
218        }
219    }
220}
221
222impl RuntimeStack {
223    #[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
224    /// Remove and return an element from the stack **assuming the stack has at
225    /// least one element.**
226    pub fn pop(&mut self) -> StackValue { self.stack.pop().expect("Stack not empty") }
227
228    #[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
229    /// Push an element onto the stack.
230    pub fn push(&mut self, x: StackValue) { self.stack.push(x); }
231
232    #[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
233    /// Push a value onto the stack, as long as it is convertible into a
234    /// [`StackValue`].
235    pub fn push_value<F>(&mut self, f: F)
236    where
237        StackValue: From<F>, {
238        self.push(StackValue::from(f))
239    }
240
241    /// **Remove** and return the top of the stack, **assuming the stack is not
242    /// empty.**
243    ///
244    /// # Safety
245    /// This function is safe provided
246    /// - the stack is not empty
247    /// - top of the stack contains a 32-bit value.
248    pub unsafe fn pop_u32(&mut self) -> u32 { self.pop().short as u32 }
249
250    /// **Remove** and return the top of the stack, **assuming the stack is not
251    /// empty.**
252    ///
253    /// # Safety
254    /// This function is safe provided
255    /// - the stack is not empty
256    /// - top of the stack contains a 32-bit value.
257    pub unsafe fn peek_u32(&mut self) -> u32 {
258        self.stack.last().expect("Non-empty stack").short as u32
259    }
260
261    /// **Remove** and return the top of the stack, **assuming the stack is not
262    /// empty.**
263    ///
264    /// # Safety
265    /// This function is safe provided
266    /// - the stack is not empty
267    /// - top of the stack contains a 64-bit value.
268    pub unsafe fn pop_u64(&mut self) -> u64 { self.pop().long as u64 }
269}
270
271#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
272fn get_u16(pc: &mut *const u8) -> u16 {
273    let r = unsafe { pc.cast::<u16>().read_unaligned() };
274    *pc = unsafe { pc.add(2) };
275    r
276}
277
278#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
279fn get_u32(pc: &mut *const u8) -> u32 {
280    let r = unsafe { pc.cast::<u32>().read_unaligned() };
281    *pc = unsafe { pc.add(4) };
282    r
283}
284
285#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
286fn get_i32(pc: &mut *const u8) -> i32 {
287    let r = unsafe { pc.cast::<i32>().read_unaligned() };
288    *pc = unsafe { pc.add(4) };
289    r
290}
291
292#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
293fn get_local(constants: &[i64], locals: &[StackValue], pc: &mut *const u8) -> StackValue {
294    let v = get_i32(pc);
295    if v >= 0 {
296        let v = v as usize;
297        // assert!(v < locals.len());
298        *unsafe { locals.get_unchecked(v) }
299    } else {
300        let v = (-(v + 1)) as usize;
301        // assert!((v as usize) < constants.len());
302        let v = unsafe { constants.get_unchecked(v) };
303        StackValue::from(*v)
304    }
305}
306
307#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
308fn get_local_mut<'a>(locals: &'a mut [StackValue], pc: &mut *const u8) -> &'a mut StackValue {
309    let v = get_i32(pc);
310    // Targets should never be constants, so we should always have a non-negative
311    // value.
312    // assert!(v >= 0);
313    unsafe { locals.get_unchecked_mut(v as usize) }
314}
315
316#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
317fn read_u8(bytes: &[u8], pos: usize) -> RunResult<u8> {
318    bytes.get(pos).copied().ok_or_else(|| anyhow!("Memory access out of bounds."))
319}
320
321#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
322fn read_u16(bytes: &[u8], pos: usize) -> RunResult<u16> {
323    ensure!(pos + 2 <= bytes.len(), "Memory access out of bounds.");
324    let r = unsafe { bytes.as_ptr().add(pos).cast::<u16>().read_unaligned() };
325    Ok(r)
326}
327
328#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
329fn read_u32(bytes: &[u8], pos: usize) -> RunResult<u32> {
330    ensure!(pos + 4 <= bytes.len(), "Memory access out of bounds.");
331    let r = unsafe { bytes.as_ptr().add(pos).cast::<u32>().read_unaligned() };
332    Ok(r)
333}
334
335#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
336fn read_i8(bytes: &[u8], pos: usize) -> RunResult<i8> {
337    bytes.get(pos).map(|&x| x as i8).ok_or_else(|| anyhow!("Memory access out of bounds."))
338}
339
340#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
341fn read_i16(bytes: &[u8], pos: usize) -> RunResult<i16> {
342    ensure!(pos + 2 <= bytes.len(), "Memory access out of bounds.");
343    let r = unsafe { bytes.as_ptr().add(pos).cast::<i16>().read_unaligned() };
344    Ok(r)
345}
346
347#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
348fn read_i32(bytes: &[u8], pos: usize) -> RunResult<i32> {
349    ensure!(pos + 4 <= bytes.len(), "Memory access out of bounds.");
350    let r = unsafe { bytes.as_ptr().add(pos).cast::<i32>().read_unaligned() };
351    Ok(r)
352}
353
354#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
355fn read_i64(bytes: &[u8], pos: usize) -> RunResult<i64> {
356    ensure!(pos + 8 <= bytes.len(), "Memory access out of bounds.");
357    let r = unsafe { bytes.as_ptr().add(pos).cast::<i64>().read_unaligned() };
358    Ok(r)
359}
360
361#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
362/// Retrieve data for processing a memory load instruction.
363/// The return value is a pair of a (mutable) pointer to the register where the
364/// result should be written, and the offset in memory where the value should be
365/// read from.
366///
367/// The instruction pointer is advanced.
368fn memory_load<'a>(
369    constants: &[i64],
370    locals: &'a mut [StackValue],
371    pc: &mut *const u8,
372) -> (&'a mut StackValue, usize) {
373    let offset = get_u32(pc);
374    let base = get_local(constants, locals, pc);
375    let result = get_local_mut(locals, pc);
376    let pos = unsafe { base.short } as u32 as usize + offset as usize;
377    (result, pos)
378}
379
380#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
381/// Retrieve data for processing a memory store instruction.
382/// The return value is a pair of a value to be stored and the place in memory
383/// where the value should be writte to.
384fn memory_store(
385    constants: &[i64],
386    locals: &[StackValue],
387    pc: &mut *const u8,
388) -> (StackValue, usize) {
389    let offset = get_u32(pc);
390    let value = get_local(constants, locals, pc);
391    let base = get_local(constants, locals, pc);
392    (value, unsafe { base.short } as u32 as usize + offset as usize)
393}
394
395#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
396fn write_memory_at(memory: &mut [u8], pos: usize, bytes: &[u8]) -> RunResult<()> {
397    let end = pos + bytes.len();
398    ensure!(end <= memory.len(), "Illegal memory access.");
399    memory[pos..end].copy_from_slice(bytes);
400    Ok(())
401}
402
403#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
404fn unary_i32(
405    constants: &[i64],
406    locals: &mut [StackValue],
407    pc: &mut *const u8,
408    f: impl Fn(i32) -> i32,
409) {
410    let source = get_local(constants, locals, pc);
411    let target = get_local_mut(locals, pc);
412    target.short = f(unsafe { source.short });
413}
414
415#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
416fn unary_i64(
417    constants: &[i64],
418    locals: &mut [StackValue],
419    pc: &mut *const u8,
420    f: impl Fn(i64) -> i64,
421) {
422    let source = get_local(constants, locals, pc);
423    let target = get_local_mut(locals, pc);
424    target.long = f(unsafe { source.long });
425}
426
427#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
428fn binary_i32(
429    constants: &[i64],
430    locals: &mut [StackValue],
431    pc: &mut *const u8,
432    f: impl Fn(i32, i32) -> i32,
433) {
434    let right = get_local(constants, locals, pc);
435    let left = get_local(constants, locals, pc);
436    let target = get_local_mut(locals, pc);
437    target.short = f(unsafe { left.short }, unsafe { right.short });
438}
439
440#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
441fn binary_i32_partial(
442    constants: &[i64],
443    locals: &mut [StackValue],
444    pc: &mut *const u8,
445    f: impl Fn(i32, i32) -> Option<i32>,
446) -> RunResult<()> {
447    let right = get_local(constants, locals, pc);
448    let left = get_local(constants, locals, pc);
449    let target = get_local_mut(locals, pc);
450    target.short = f(unsafe { left.short }, unsafe { right.short })
451        .ok_or_else(|| anyhow!("Runtime exception in i32 binary."))?;
452    Ok(())
453}
454
455#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
456fn binary_i64(
457    constants: &[i64],
458    locals: &mut [StackValue],
459    pc: &mut *const u8,
460    f: impl Fn(i64, i64) -> i64,
461) {
462    let right = get_local(constants, locals, pc);
463    let left = get_local(constants, locals, pc);
464    let target = get_local_mut(locals, pc);
465    target.long = f(unsafe { left.long }, unsafe { right.long });
466}
467
468#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
469fn binary_i64_partial(
470    constants: &[i64],
471    locals: &mut [StackValue],
472    pc: &mut *const u8,
473    f: impl Fn(i64, i64) -> Option<i64>,
474) -> RunResult<()> {
475    let right = get_local(constants, locals, pc);
476    let left = get_local(constants, locals, pc);
477    let target = get_local_mut(locals, pc);
478    target.long = f(unsafe { left.long }, unsafe { right.long })
479        .ok_or_else(|| anyhow!("Runtime exception in i64 binary"))?;
480    Ok(())
481}
482
483#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
484fn binary_i64_test(
485    constants: &[i64],
486    locals: &mut [StackValue],
487    pc: &mut *const u8,
488    f: impl Fn(i64, i64) -> i32,
489) {
490    let right = get_local(constants, locals, pc);
491    let left = get_local(constants, locals, pc);
492    let target = get_local_mut(locals, pc);
493    target.short = f(unsafe { left.long }, unsafe { right.long });
494}
495
496impl<I: TryFromImport, R: RunnableCode> Artifact<I, R> {
497    /// Attempt to run the entrypoint using the supplied arguments. The
498    /// arguments are
499    ///
500    /// - `host` - the structure that resolves calls to external, host,
501    ///   functions.
502    /// - `name` - the name of the entrypoint to invoke
503    /// - `args` - a list of arguments to the entrypoint. The argument list must
504    ///   match the declared type of the entrypoint. If this is not the case
505    ///   then execution will fail with an error.
506    ///
507    /// Note that this method at present cannot be used to directly call a host
508    /// function. Only an entrypoint defined in the Wasm module can be called.
509    pub fn run<Q: std::fmt::Display + Ord + ?Sized, H: Host<I>>(
510        &self,
511        host: &mut H,
512        name: &Q,
513        args: &[Value],
514    ) -> RunResult<ExecutionOutcome<H::Interrupt>>
515    where
516        Name: std::borrow::Borrow<Q>, {
517        let start = *self.get_entrypoint_index(name)?;
518        // FIXME: The next restriction could easily be lifted, but it is not a problem
519        // for now.
520        ensure!(start as usize >= self.imports.len(), RuntimeError::DirectlyCallImport);
521        let instructions_idx = start as usize - self.imports.len();
522        let outer_function = &self.code[instructions_idx]; // safe because the artifact should be well-formed.
523        let num_args: u32 = args.len().try_into()?;
524        ensure!(
525            outer_function.num_params() == num_args,
526            "The number of arguments does not match the number of parameters {} != {}.",
527            num_args,
528            outer_function.num_params(),
529        );
530        for (p, actual) in outer_function.params().iter().zip(args.iter()) {
531            // the first num_params locals are arguments
532            let actual_ty = ValueType::from(*actual);
533            ensure!(
534                *p == actual_ty,
535                "Argument of incorrect type: actual {:#?}, expected {:#?}.",
536                actual_ty,
537                *p
538            )
539        }
540
541        let globals = self.global.inits.iter().copied().map(StackValue::from).collect::<Vec<_>>();
542        let mut locals: Vec<StackValue> =
543            vec![unsafe { std::mem::zeroed() }; outer_function.num_registers() as usize];
544        for (&arg, place) in args.iter().zip(&mut locals) {
545            *place = match arg {
546                Value::I32(v) => StackValue::from(v),
547                Value::I64(v) => StackValue::from(v),
548            };
549        }
550        let memory = {
551            if let Some(m) = self.memory.as_ref() {
552                host.tick_initial_memory(m.init_size)?;
553                // This is safe since maximum initial memory is limited to 32 pages.
554                let mut memory = vec![0u8; (MAX_NUM_PAGES * PAGE_SIZE) as usize];
555                unsafe {
556                    memory.set_len((m.init_size * PAGE_SIZE) as usize);
557                }
558                for data in m.init.iter() {
559                    (&mut memory[data.offset as usize..]).write_all(&data.init)?;
560                }
561                memory
562            } else {
563                Vec::new()
564            }
565        };
566
567        let max_memory = self.memory.as_ref().map(|x| x.max_size).unwrap_or(0) as usize;
568
569        let pc = 0;
570
571        let function_frames: Vec<FunctionState> = Vec::new();
572        let return_type = match outer_function.return_type() {
573            BlockType::EmptyType => None,
574            BlockType::ValueType(vt) => Some((0, vt)),
575        };
576        let locals_base = 0;
577
578        let config = RunConfig {
579            pc,
580            instructions_idx,
581            function_frames,
582            return_type,
583            memory,
584            locals_vec: locals,
585            locals_base,
586            globals,
587            max_memory,
588            return_value_loc: 0, // not used
589        };
590        self.run_config(host, config)
591    }
592
593    /// Returns the index of the given entrypoint if it exists.
594    fn get_entrypoint_index<Q>(&self, name: &Q) -> RunResult<&FuncIndex>
595    where
596        Q: std::fmt::Display + Ord + ?Sized,
597        Name: std::borrow::Borrow<Q>, {
598        self.export
599            .get(name)
600            .ok_or_else(|| anyhow!("Trying to invoke a method that does not exist: {}.", name))
601    }
602
603    /// Returns if the given entrypoint name exists in the artifact.
604    pub fn has_entrypoint<Q>(&self, name: &Q) -> bool
605    where
606        Q: std::fmt::Display + Ord + ?Sized,
607        Name: std::borrow::Borrow<Q>, {
608        self.get_entrypoint_index(name).is_ok()
609    }
610
611    /// Run a [configuration](RunConfig) using the provided `host` to resolve
612    /// calls to external functions.
613    ///
614    /// This executes code either until
615    /// - the execution terminates with a result
616    /// - there is an error
617    /// - the host triggers an interrupt as a result of an external call.
618    pub fn run_config<H: Host<I>>(
619        &self,
620        host: &mut H,
621        config: RunConfig,
622    ) -> RunResult<ExecutionOutcome<H::Interrupt>> {
623        // we deliberately deconstruct the struct here instead of having mutable
624        // references to fields here to improve performance. On some benchmarks
625        // instruction execution is 30% slower if we keep references to the config
626        // struct instead of deconstructing it here. Why that is I don't know, but it
627        // likely has to do with memory layout.
628        let RunConfig {
629            pc,
630            mut instructions_idx,
631            mut function_frames,
632            mut return_type,
633            mut memory,
634            mut locals_vec,
635            mut locals_base,
636            mut globals,
637            max_memory,
638            return_value_loc: _,
639        } = config;
640
641        // Stack used for host function calls, to pass parameters.
642        let mut stack = RuntimeStack {
643            // We preallocate the stack so that all host functions
644            // that we have will not need further allocations.
645            stack: vec![unsafe { std::mem::zeroed() }; 10],
646        };
647
648        let mut locals = &mut locals_vec[locals_base..];
649        // the use of get_unchecked here is safe if the caller constructs the Runconfig
650        // in a protocol compliant way.
651        // The only way to construct a RunConfig is in this module (since all the fields
652        // are private), and the only place it is constructed is in the `run`
653        // method above, where the precondition is checked.
654        let code = unsafe { self.code.get_unchecked(instructions_idx) };
655        let mut constants = code.constants();
656        let mut instructions = code.code();
657        let mut pc = unsafe { instructions.as_ptr().add(pc) };
658        'outer: loop {
659            let instr = unsafe { *pc };
660            pc = unsafe { pc.add(1) };
661            // FIXME: The unsafe here is a bit wrong, but it is much faster than using
662            // InternalOpcode::try_from(instr). About 25% faster on a fibonacci test.
663            // The ensure here guarantees that the transmute is safe, provided that
664            // InternalOpcode stays as it is.
665            // ensure!(instr <= InternalOpcode::I64ExtendI32U as u8, "Illegal opcode.");
666            match unsafe { std::mem::transmute::<u8, InternalOpcode>(instr) } {
667                InternalOpcode::Unreachable => bail!("Unreachable."),
668                InternalOpcode::If => {
669                    let condition = get_local(constants, locals, &mut pc);
670                    let else_target = get_u32(&mut pc);
671                    if unsafe { condition.short } == 0 {
672                        // jump to the else branch.
673                        pc = unsafe { instructions.as_ptr().add(else_target as usize) };
674                    } // else do nothing and start executing the if branch
675                }
676                InternalOpcode::Br => {
677                    // we could optimize this for the common case of jumping to end/beginning of a
678                    // current block.
679                    let target = get_u32(&mut pc);
680                    pc = unsafe { instructions.as_ptr().add(target as usize) };
681                }
682                InternalOpcode::BrIf => {
683                    // we could optimize this for the common case of jumping to end/beginning of a
684                    // current block.
685                    let target = get_u32(&mut pc);
686                    let condition = get_local(constants, locals, &mut pc);
687                    if unsafe { condition.short } != 0 {
688                        pc = unsafe { instructions.as_ptr().add(target as usize) };
689                    } // else do nothing
690                }
691                InternalOpcode::BrTable => {
692                    let condition = get_local(constants, locals, &mut pc);
693                    let num_labels = get_u16(&mut pc);
694                    let top: u32 = unsafe { condition.short } as u32;
695                    if top < u32::from(num_labels) {
696                        pc = unsafe { pc.add((top as usize + 1) * 4) }; // the +1 is for the
697                                                                        // default branch.
698                    } // else use default branch
699                    let target = get_u32(&mut pc);
700                    pc = unsafe { instructions.as_ptr().add(target as usize) };
701                }
702                InternalOpcode::BrTableCarry => {
703                    let condition = get_local(constants, locals, &mut pc);
704                    let copy_source = get_local(constants, locals, &mut pc);
705                    let num_labels = get_u16(&mut pc);
706                    let top: u32 = unsafe { condition.short } as u32;
707                    if top < u32::from(num_labels) {
708                        pc = unsafe { pc.add((top as usize + 1) * 8) }; // the +1 is for the default branch.
709                    } // else use default branch
710                    let copy_target = get_local_mut(locals, &mut pc);
711                    *copy_target = copy_source;
712                    let target = get_u32(&mut pc);
713                    pc = unsafe { instructions.as_ptr().add(target as usize) };
714                }
715                InternalOpcode::Copy => {
716                    let copy_source = get_local(constants, locals, &mut pc);
717                    let copy_target = get_local_mut(locals, &mut pc);
718                    *copy_target = copy_source;
719                }
720                InternalOpcode::Return => {
721                    host.track_return();
722                    if let Some(top_frame) = function_frames.pop() {
723                        // Make sure the return value is at the right place
724                        // for the callee to continue.
725                        if let Some((place, _)) = return_type {
726                            locals_vec[top_frame.locals_base + place] = locals[0];
727                        }
728                        instructions_idx = top_frame.instructions_idx;
729                        // the use of get_unchecked here is entirely safe. The only way for the
730                        // index to get on the stack is if we have been
731                        // executing that function already. Hence we must be able to look it up
732                        // again. The only way this property would fail to
733                        // hold is if somebody else was modifying the artifact's list of functions
734                        // at the same time. That would lead to other
735                        // problems as well and is not possible in safe Rust anyhow.
736                        let code = unsafe { self.code.get_unchecked(instructions_idx) };
737                        instructions = code.code();
738                        pc = unsafe { instructions.as_ptr().add(top_frame.pc) };
739                        constants = code.constants();
740                        return_type = top_frame.return_type;
741                        // truncate all the locals that are above what we need at present.
742                        unsafe { locals_vec.set_len(locals_base) };
743                        locals_base = top_frame.locals_base;
744                        locals = &mut locals_vec[locals_base..];
745                    } else {
746                        break 'outer;
747                    }
748                }
749                InternalOpcode::TickEnergy => {
750                    let v = get_u32(&mut pc);
751                    host.tick_energy(v as u64)?;
752                }
753                InternalOpcode::Call => {
754                    // if we want synchronous calls we need to either
755                    // 1. Just use recursion in the host. This is problematic because of stack
756                    // overflow.
757                    // 2. Manage storage of intermediate state
758                    // ourselves. This means we have to store the state of execution, which is
759                    // stored in the config structure. We handle synchronous calls by the host
760                    // function interrupting execution of the current
761                    // function/module. The host will then handle resumption.
762                    // If the host function returns Ok(None) then the meaning of it is that
763                    // execution should resume as normal.
764                    let idx = get_u32(&mut pc);
765                    if let Some(f) = self.imports.get(idx as usize) {
766                        let params_len = f.ty().parameters.len();
767                        stack.stack.clear();
768                        stack.stack.reserve(params_len);
769                        // NB: Locations of locals are stored in reverse order, i.e.,
770                        // location of the last argument to the function is stored first.
771                        // That is the reason for the reverse iteration below.
772                        for offset in (0..params_len).rev() {
773                            let val = get_local(constants, locals, &mut pc);
774                            unsafe { stack.stack.as_mut_ptr().add(offset).write(val) }
775                        }
776                        unsafe {
777                            stack.stack.set_len(params_len);
778                        }
779                        let return_value_loc = if f.ty().result.is_some() {
780                            let target = get_i32(&mut pc);
781                            target as usize
782                        } else {
783                            0
784                        };
785                        // we are calling an imported function, handle the call directly.
786                        if let Some(reason) = host.call(f, &mut memory, &mut stack)? {
787                            return Ok(ExecutionOutcome::Interrupted {
788                                reason,
789                                config: RunConfig {
790                                    pc: unsafe { pc.offset_from(instructions.as_ptr()) as usize }, /* TODO: Maybe use try_into? */
791                                    instructions_idx,
792                                    function_frames,
793                                    return_type,
794                                    memory,
795                                    locals_vec,
796                                    locals_base,
797                                    globals,
798                                    max_memory,
799                                    return_value_loc,
800                                },
801                            });
802                        } else if f.ty().result.is_some() {
803                            locals[return_value_loc] = stack.pop();
804                        }
805                        assert!(stack.stack.is_empty());
806                    } else {
807                        host.track_call()?;
808                        let local_idx = idx as usize - self.imports.len();
809                        let f = self
810                            .code
811                            .get(local_idx)
812                            .ok_or_else(|| anyhow!("Accessing non-existent code."))?;
813
814                        // drop(locals);
815                        let current_size = locals_vec.len();
816                        let new_size = current_size + f.num_registers() as usize;
817                        locals_vec.resize(new_size, unsafe { std::mem::zeroed() });
818                        let (prefix, new_locals) = locals_vec.split_at_mut(current_size);
819                        let current_locals = &mut prefix[locals_base..];
820
821                        // Note the rev.
822                        for p in new_locals[..f.num_params() as usize].iter_mut().rev() {
823                            *p = get_local(constants, current_locals, &mut pc)
824                        }
825                        let new_return_type = match f.return_type() {
826                            BlockType::EmptyType => None,
827                            BlockType::ValueType(v) => Some((get_i32(&mut pc) as usize, v)),
828                        };
829
830                        let current_frame = FunctionState {
831                            pc: unsafe { pc.offset_from(instructions.as_ptr()) as usize }, /* TODO: Maybe use try_into?, */
832                            instructions_idx,
833                            locals_base,
834                            return_type,
835                        };
836                        function_frames.push(current_frame);
837                        locals_base = current_size;
838
839                        locals = new_locals;
840                        return_type = new_return_type;
841                        instructions = f.code();
842                        constants = f.constants();
843                        instructions_idx = local_idx;
844                        pc = instructions.as_ptr();
845                    }
846                }
847                InternalOpcode::CallIndirect => {
848                    let ty_idx = get_u32(&mut pc);
849                    let ty = self
850                        .ty
851                        .get(ty_idx as usize)
852                        .ok_or_else(|| anyhow!("Non-existent type."))?;
853                    let idx = get_local(constants, locals, &mut pc);
854                    let idx = unsafe { idx.short } as u32;
855                    if let Some(Some(f_idx)) = self.table.functions.get(idx as usize) {
856                        if let Some(f) = self.imports.get(*f_idx as usize) {
857                            let ty_actual = f.ty();
858                            // call imported function.
859                            ensure!(ty_actual == ty, "Actual type different from expected.");
860
861                            let params_len = f.ty().parameters.len();
862                            stack.stack.clear();
863                            stack.stack.reserve(params_len);
864                            for offset in (0..params_len).rev() {
865                                let val = get_local(constants, locals, &mut pc);
866                                unsafe { stack.stack.as_mut_ptr().add(offset).write(val) }
867                            }
868                            unsafe {
869                                stack.stack.set_len(params_len);
870                            }
871                            let return_value_loc = if f.ty().result.is_some() {
872                                let target = get_i32(&mut pc);
873                                target as usize
874                            } else {
875                                0
876                            };
877
878                            // we are calling an imported function, handle the call directly.
879                            if let Some(reason) = host.call(f, &mut memory, &mut stack)? {
880                                return Ok(ExecutionOutcome::Interrupted {
881                                    reason,
882                                    config: RunConfig {
883                                        pc: unsafe {
884                                            pc.offset_from(instructions.as_ptr()) as usize
885                                        }, /* TODO: Maybe use try_into?, */
886                                        instructions_idx,
887                                        function_frames,
888                                        return_type,
889                                        memory,
890                                        locals_vec,
891                                        locals_base,
892                                        globals,
893                                        max_memory,
894                                        return_value_loc,
895                                    },
896                                });
897                            } else if f.ty().result.is_some() {
898                                locals[return_value_loc] = stack.pop();
899                            }
900                        } else {
901                            host.track_call()?;
902                            let local_idx = *f_idx as usize - self.imports.len();
903                            let f = self
904                                .code
905                                .get(local_idx)
906                                .ok_or_else(|| anyhow!("Accessing non-existent code."))?;
907                            let ty_actual =
908                                self.ty.get(f.type_idx() as usize).ok_or_else(|| {
909                                    anyhow!("Non-existent type. This should not happen.")
910                                })?;
911                            ensure!(
912                                f.type_idx() == ty_idx || ty_actual == ty,
913                                "Actual type different from expected."
914                            );
915
916                            // FIXME: Remove duplication.
917                            // drop(locals);
918                            let current_size = locals_vec.len();
919                            let new_size = current_size + f.num_registers() as usize;
920                            locals_vec.resize(new_size, unsafe { std::mem::zeroed() });
921                            let (prefix, new_locals) = locals_vec.split_at_mut(current_size);
922                            let current_locals = &mut prefix[locals_base..];
923                            // Note the rev.
924                            for p in new_locals[..f.num_params() as usize].iter_mut().rev() {
925                                *p = get_local(constants, current_locals, &mut pc)
926                            }
927                            let new_return_type = match f.return_type() {
928                                BlockType::EmptyType => None,
929                                BlockType::ValueType(v) => Some((get_i32(&mut pc) as usize, v)),
930                            };
931
932                            let current_frame = FunctionState {
933                                pc: unsafe { pc.offset_from(instructions.as_ptr()) as usize }, /* TODO: Maybe use try_into?, */
934                                instructions_idx,
935                                locals_base,
936                                return_type,
937                            };
938                            function_frames.push(current_frame);
939                            locals_base = current_size;
940
941                            locals = new_locals;
942
943                            return_type = new_return_type;
944                            instructions = f.code();
945                            constants = f.constants();
946                            instructions_idx = local_idx;
947                            pc = instructions.as_ptr();
948                        }
949                    } else {
950                        bail!("Calling undefined function {}.", idx) // trap
951                    }
952                }
953                InternalOpcode::Select => {
954                    let top = get_local(constants, locals, &mut pc);
955                    let t2 = get_local(constants, locals, &mut pc);
956                    let t1 = get_local(constants, locals, &mut pc);
957                    let target = get_local_mut(locals, &mut pc);
958                    if unsafe { top.short } == 0 {
959                        *target = t2;
960                    } else {
961                        *target = t1;
962                    }
963                }
964                InternalOpcode::GlobalGet => {
965                    let idx = get_u16(&mut pc);
966                    let copy_target = get_local_mut(locals, &mut pc);
967                    *copy_target = globals[idx as usize];
968                }
969                InternalOpcode::GlobalSet => {
970                    let idx = get_u16(&mut pc);
971                    let copy_target = get_local(constants, locals, &mut pc);
972                    globals[idx as usize] = copy_target;
973                }
974                InternalOpcode::I32Load => {
975                    let (result, pos) = memory_load(constants, locals, &mut pc);
976                    let val = read_i32(&memory, pos)?;
977                    *result = StackValue::from(val);
978                }
979                InternalOpcode::I64Load => {
980                    let (result, pos) = memory_load(constants, locals, &mut pc);
981                    let val = read_i64(&memory, pos)?;
982                    *result = StackValue::from(val);
983                }
984                InternalOpcode::I32Load8S => {
985                    let (result, pos) = memory_load(constants, locals, &mut pc);
986                    let val = read_i8(&memory, pos)?;
987                    *result = StackValue::from(val as i32);
988                }
989                InternalOpcode::I32Load8U => {
990                    let (result, pos) = memory_load(constants, locals, &mut pc);
991                    let val = read_u8(&memory, pos)?;
992                    *result = StackValue::from(val as i32);
993                }
994                InternalOpcode::I32Load16S => {
995                    let (result, pos) = memory_load(constants, locals, &mut pc);
996                    let val = read_i16(&memory, pos)?;
997                    *result = StackValue::from(val as i32);
998                }
999                InternalOpcode::I32Load16U => {
1000                    let (result, pos) = memory_load(constants, locals, &mut pc);
1001                    let val = read_u16(&memory, pos)?;
1002                    *result = StackValue::from(val as i32);
1003                }
1004                InternalOpcode::I64Load8S => {
1005                    let (result, pos) = memory_load(constants, locals, &mut pc);
1006                    let val = read_i8(&memory, pos)?;
1007                    *result = StackValue::from(val as i64);
1008                }
1009                InternalOpcode::I64Load8U => {
1010                    let (result, pos) = memory_load(constants, locals, &mut pc);
1011                    let val = read_u8(&memory, pos)?;
1012                    *result = StackValue::from(val as i64);
1013                }
1014                InternalOpcode::I64Load16S => {
1015                    let (result, pos) = memory_load(constants, locals, &mut pc);
1016                    let val = read_i16(&memory, pos)?;
1017                    *result = StackValue::from(val as i64);
1018                }
1019                InternalOpcode::I64Load16U => {
1020                    let (result, pos) = memory_load(constants, locals, &mut pc);
1021                    let val = read_u16(&memory, pos)?;
1022                    *result = StackValue::from(val as i64);
1023                }
1024                InternalOpcode::I64Load32S => {
1025                    let (result, pos) = memory_load(constants, locals, &mut pc);
1026                    let val = read_i32(&memory, pos)?;
1027                    *result = StackValue::from(val as i64);
1028                }
1029                InternalOpcode::I64Load32U => {
1030                    let (result, pos) = memory_load(constants, locals, &mut pc);
1031                    let val = read_u32(&memory, pos)?;
1032                    *result = StackValue::from(val as i64);
1033                }
1034                InternalOpcode::I32Store => {
1035                    let (val, pos) = memory_store(constants, locals, &mut pc);
1036                    write_memory_at(&mut memory, pos, &unsafe { val.short }.to_le_bytes())?;
1037                }
1038                InternalOpcode::I64Store => {
1039                    let (val, pos) = memory_store(constants, locals, &mut pc);
1040                    write_memory_at(&mut memory, pos, &unsafe { val.long }.to_le_bytes())?;
1041                }
1042                InternalOpcode::I32Store8 => {
1043                    let (val, pos) = memory_store(constants, locals, &mut pc);
1044                    write_memory_at(&mut memory, pos, &unsafe { val.short }.to_le_bytes()[..1])?;
1045                }
1046                InternalOpcode::I32Store16 => {
1047                    let (val, pos) = memory_store(constants, locals, &mut pc);
1048                    write_memory_at(&mut memory, pos, &unsafe { val.short }.to_le_bytes()[..2])?;
1049                }
1050                InternalOpcode::I64Store8 => {
1051                    let (val, pos) = memory_store(constants, locals, &mut pc);
1052                    write_memory_at(&mut memory, pos, &unsafe { val.long }.to_le_bytes()[..1])?;
1053                }
1054                InternalOpcode::I64Store16 => {
1055                    let (val, pos) = memory_store(constants, locals, &mut pc);
1056                    write_memory_at(&mut memory, pos, &unsafe { val.long }.to_le_bytes()[..2])?;
1057                }
1058                InternalOpcode::I64Store32 => {
1059                    let (val, pos) = memory_store(constants, locals, &mut pc);
1060                    write_memory_at(&mut memory, pos, &unsafe { val.long }.to_le_bytes()[..4])?;
1061                }
1062                InternalOpcode::MemorySize => {
1063                    let target = get_local_mut(locals, &mut pc);
1064                    let l = memory.len() / PAGE_SIZE as usize;
1065                    *target = StackValue::from(l as i32);
1066                }
1067                InternalOpcode::MemoryGrow => {
1068                    let val = get_local(constants, locals, &mut pc);
1069                    let target = get_local_mut(locals, &mut pc);
1070                    let n = unsafe { val.short } as u32;
1071                    let sz = memory.len() / PAGE_SIZE as usize;
1072                    if sz + n as usize > max_memory {
1073                        target.short = -1i32;
1074                    } else {
1075                        if n != 0 {
1076                            unsafe { memory.set_len((sz + n as usize) * PAGE_SIZE as usize) }
1077                        }
1078                        target.short = sz as i32;
1079                    }
1080                }
1081                InternalOpcode::I32Eqz => {
1082                    let source = get_local(constants, locals, &mut pc);
1083                    let target = get_local_mut(locals, &mut pc);
1084                    let val = unsafe { source.short };
1085                    target.short = if val == 0 {
1086                        1i32
1087                    } else {
1088                        0i32
1089                    };
1090                }
1091                InternalOpcode::I32Eq => {
1092                    binary_i32(constants, locals, &mut pc, |left, right| (left == right) as i32);
1093                }
1094                InternalOpcode::I32Ne => {
1095                    binary_i32(constants, locals, &mut pc, |left, right| (left != right) as i32);
1096                }
1097                InternalOpcode::I32LtS => {
1098                    binary_i32(constants, locals, &mut pc, |left, right| (left < right) as i32);
1099                }
1100                InternalOpcode::I32LtU => {
1101                    binary_i32(constants, locals, &mut pc, |left, right| {
1102                        ((left as u32) < (right as u32)) as i32
1103                    });
1104                }
1105                InternalOpcode::I32GtS => {
1106                    binary_i32(constants, locals, &mut pc, |left, right| (left > right) as i32);
1107                }
1108                InternalOpcode::I32GtU => {
1109                    binary_i32(constants, locals, &mut pc, |left, right| {
1110                        ((left as u32) > (right as u32)) as i32
1111                    });
1112                }
1113                InternalOpcode::I32LeS => {
1114                    binary_i32(constants, locals, &mut pc, |left, right| (left <= right) as i32);
1115                }
1116                InternalOpcode::I32LeU => {
1117                    binary_i32(constants, locals, &mut pc, |left, right| {
1118                        ((left as u32) <= (right as u32)) as i32
1119                    });
1120                }
1121                InternalOpcode::I32GeS => {
1122                    binary_i32(constants, locals, &mut pc, |left, right| (left >= right) as i32);
1123                }
1124                InternalOpcode::I32GeU => {
1125                    binary_i32(constants, locals, &mut pc, |left, right| {
1126                        ((left as u32) >= (right as u32)) as i32
1127                    });
1128                }
1129                InternalOpcode::I64Eqz => {
1130                    let source = get_local(constants, locals, &mut pc);
1131                    let target = get_local_mut(locals, &mut pc);
1132                    let val = unsafe { source.long };
1133                    target.short = if val == 0 {
1134                        1i32
1135                    } else {
1136                        0i32
1137                    };
1138                }
1139                InternalOpcode::I64Eq => {
1140                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1141                        (left == right) as i32
1142                    });
1143                }
1144                InternalOpcode::I64Ne => {
1145                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1146                        (left != right) as i32
1147                    });
1148                }
1149                InternalOpcode::I64LtS => {
1150                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1151                        (left < right) as i32
1152                    });
1153                }
1154                InternalOpcode::I64LtU => {
1155                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1156                        ((left as u64) < (right as u64)) as i32
1157                    });
1158                }
1159                InternalOpcode::I64GtS => {
1160                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1161                        (left > right) as i32
1162                    });
1163                }
1164                InternalOpcode::I64GtU => {
1165                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1166                        ((left as u64) > (right as u64)) as i32
1167                    });
1168                }
1169                InternalOpcode::I64LeS => {
1170                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1171                        (left <= right) as i32
1172                    });
1173                }
1174                InternalOpcode::I64LeU => {
1175                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1176                        ((left as u64) <= (right as u64)) as i32
1177                    });
1178                }
1179                InternalOpcode::I64GeS => {
1180                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1181                        (left >= right) as i32
1182                    });
1183                }
1184                InternalOpcode::I64GeU => {
1185                    binary_i64_test(constants, locals, &mut pc, |left, right| {
1186                        ((left as u64) >= (right as u64)) as i32
1187                    });
1188                }
1189                InternalOpcode::I32Clz => {
1190                    unary_i32(constants, locals, &mut pc, |x| x.leading_zeros() as i32);
1191                }
1192                InternalOpcode::I32Ctz => {
1193                    unary_i32(constants, locals, &mut pc, |x| x.trailing_zeros() as i32);
1194                }
1195                InternalOpcode::I32Popcnt => {
1196                    unary_i32(constants, locals, &mut pc, |x| x.count_ones() as i32);
1197                }
1198                InternalOpcode::I32Add => {
1199                    binary_i32(constants, locals, &mut pc, |x, y| x.wrapping_add(y));
1200                }
1201                InternalOpcode::I32Sub => {
1202                    binary_i32(constants, locals, &mut pc, |x, y| x.wrapping_sub(y));
1203                }
1204                InternalOpcode::I32Mul => {
1205                    binary_i32(constants, locals, &mut pc, |x, y| x.wrapping_mul(y));
1206                }
1207                InternalOpcode::I32DivS => {
1208                    binary_i32_partial(constants, locals, &mut pc, |x, y| x.checked_div(y))?;
1209                }
1210                InternalOpcode::I32DivU => {
1211                    binary_i32_partial(constants, locals, &mut pc, |x, y| {
1212                        (x as u32).checked_div(y as u32).map(|x| x as i32)
1213                    })?;
1214                }
1215                InternalOpcode::I32RemS => {
1216                    binary_i32_partial(constants, locals, &mut pc, |x, y| x.checked_rem(y))?;
1217                }
1218                InternalOpcode::I32RemU => {
1219                    binary_i32_partial(constants, locals, &mut pc, |x, y| {
1220                        (x as u32).checked_rem(y as u32).map(|x| x as i32)
1221                    })?;
1222                }
1223                InternalOpcode::I32And => {
1224                    binary_i32(constants, locals, &mut pc, |x, y| x & y);
1225                }
1226                InternalOpcode::I32Or => {
1227                    binary_i32(constants, locals, &mut pc, |x, y| x | y);
1228                }
1229                InternalOpcode::I32Xor => {
1230                    binary_i32(constants, locals, &mut pc, |x, y| x ^ y);
1231                }
1232                InternalOpcode::I32Shl => {
1233                    binary_i32(constants, locals, &mut pc, |x, y| x << (y as u32 % 32));
1234                }
1235                InternalOpcode::I32ShrS => {
1236                    binary_i32(constants, locals, &mut pc, |x, y| x >> (y as u32 % 32));
1237                }
1238                InternalOpcode::I32ShrU => {
1239                    binary_i32(constants, locals, &mut pc, |x, y| {
1240                        ((x as u32) >> (y as u32 % 32)) as i32
1241                    });
1242                }
1243                InternalOpcode::I32Rotl => {
1244                    binary_i32(constants, locals, &mut pc, |x, y| x.rotate_left(y as u32 % 32));
1245                }
1246                InternalOpcode::I32Rotr => {
1247                    binary_i32(constants, locals, &mut pc, |x, y| x.rotate_right(y as u32 % 32));
1248                }
1249                InternalOpcode::I64Clz => {
1250                    unary_i64(constants, locals, &mut pc, |x| x.leading_zeros() as i64);
1251                }
1252                InternalOpcode::I64Ctz => {
1253                    unary_i64(constants, locals, &mut pc, |x| x.trailing_zeros() as i64);
1254                }
1255                InternalOpcode::I64Popcnt => {
1256                    unary_i64(constants, locals, &mut pc, |x| x.count_ones() as i64);
1257                }
1258                InternalOpcode::I64Add => {
1259                    binary_i64(constants, locals, &mut pc, |x, y| x.wrapping_add(y));
1260                }
1261                InternalOpcode::I64Sub => {
1262                    binary_i64(constants, locals, &mut pc, |x, y| x.wrapping_sub(y));
1263                }
1264                InternalOpcode::I64Mul => {
1265                    binary_i64(constants, locals, &mut pc, |x, y| x.wrapping_mul(y));
1266                }
1267                InternalOpcode::I64DivS => {
1268                    binary_i64_partial(constants, locals, &mut pc, |x, y| x.checked_div(y))?;
1269                }
1270                InternalOpcode::I64DivU => {
1271                    binary_i64_partial(constants, locals, &mut pc, |x, y| {
1272                        (x as u64).checked_div(y as u64).map(|x| x as i64)
1273                    })?;
1274                }
1275                InternalOpcode::I64RemS => {
1276                    binary_i64_partial(constants, locals, &mut pc, |x, y| x.checked_rem(y))?;
1277                }
1278                InternalOpcode::I64RemU => {
1279                    binary_i64_partial(constants, locals, &mut pc, |x, y| {
1280                        (x as u64).checked_rem(y as u64).map(|x| x as i64)
1281                    })?;
1282                }
1283                InternalOpcode::I64And => {
1284                    binary_i64(constants, locals, &mut pc, |x, y| x & y);
1285                }
1286                InternalOpcode::I64Or => {
1287                    binary_i64(constants, locals, &mut pc, |x, y| x | y);
1288                }
1289                InternalOpcode::I64Xor => {
1290                    binary_i64(constants, locals, &mut pc, |x, y| x ^ y);
1291                }
1292                InternalOpcode::I64Shl => {
1293                    binary_i64(constants, locals, &mut pc, |x, y| x << (y as u64 % 64));
1294                }
1295                InternalOpcode::I64ShrS => {
1296                    binary_i64(constants, locals, &mut pc, |x, y| x >> (y as u64 % 64));
1297                }
1298                InternalOpcode::I64ShrU => {
1299                    binary_i64(constants, locals, &mut pc, |x, y| {
1300                        ((x as u64) >> (y as u64 % 64)) as i64
1301                    });
1302                }
1303                InternalOpcode::I64Rotl => {
1304                    binary_i64(constants, locals, &mut pc, |x, y| {
1305                        x.rotate_left((y as u64 % 64) as u32)
1306                    });
1307                }
1308                InternalOpcode::I64Rotr => {
1309                    binary_i64(constants, locals, &mut pc, |x, y| {
1310                        x.rotate_right((y as u64 % 64) as u32)
1311                    });
1312                }
1313                InternalOpcode::I32WrapI64 => {
1314                    let source = get_local(constants, locals, &mut pc);
1315                    let target = get_local_mut(locals, &mut pc);
1316                    target.short = unsafe { source.long } as i32;
1317                }
1318                InternalOpcode::I64ExtendI32S => {
1319                    let source = get_local(constants, locals, &mut pc);
1320                    let target = get_local_mut(locals, &mut pc);
1321                    target.long = unsafe { source.short } as i64;
1322                }
1323                InternalOpcode::I64ExtendI32U => {
1324                    let source = get_local(constants, locals, &mut pc);
1325                    let target = get_local_mut(locals, &mut pc);
1326                    // The two as' are important since the semantics of the cast in rust is that
1327                    // it will sign extend if the source is signed. So first we make it unsigned,
1328                    // and then extend, making it so that it is extended with 0's.
1329                    target.long = unsafe { source.short } as u32 as i64;
1330                }
1331                InternalOpcode::I32Extend8S => {
1332                    unary_i32(constants, locals, &mut pc, |x| x as i8 as i32)
1333                }
1334                InternalOpcode::I32Extend16S => {
1335                    unary_i32(constants, locals, &mut pc, |x| x as i16 as i32)
1336                }
1337                InternalOpcode::I64Extend8S => {
1338                    unary_i64(constants, locals, &mut pc, |x| x as i8 as i64)
1339                }
1340                InternalOpcode::I64Extend16S => {
1341                    unary_i64(constants, locals, &mut pc, |x| x as i16 as i64)
1342                }
1343                InternalOpcode::I64Extend32S => {
1344                    unary_i64(constants, locals, &mut pc, |x| x as i32 as i64)
1345                }
1346            }
1347        }
1348        match return_type {
1349            Some((v, ValueType::I32)) => Ok(ExecutionOutcome::Success {
1350                result: Some(Value::I32(unsafe { locals[v].short })),
1351                memory,
1352            }),
1353            Some((v, ValueType::I64)) => Ok(ExecutionOutcome::Success {
1354                result: Some(Value::I64(unsafe { locals[v].long })),
1355                memory,
1356            }),
1357            None => Ok(ExecutionOutcome::Success {
1358                result: None,
1359                memory,
1360            }),
1361        }
1362    }
1363}