1use crate::{
12 artifact::{StackValue, *},
13 constants::{MAX_NUM_PAGES, PAGE_SIZE},
14 types::*,
15};
16use anyhow::{anyhow, bail, ensure};
17use std::{convert::TryInto, io::Write};
18
19#[cfg(not(target_endian = "little"))]
20compile_error!("The intepreter only supports little endian platforms.");
21
22#[derive(Debug, Copy, Clone)]
24pub enum NoInterrupt {}
25
26pub trait Host<I> {
32 type Interrupt;
33 fn tick_initial_memory(&mut self, num_pages: u32) -> RunResult<()>;
36 fn call(
49 &mut self,
50 f: &I,
51 memory: &mut [u8],
52 stack: &mut RuntimeStack,
53 ) -> RunResult<Option<Self::Interrupt>>;
54
55 fn tick_energy(&mut self, _energy: u64) -> RunResult<()>;
57
58 fn track_call(&mut self) -> RunResult<()>;
65
66 fn track_return(&mut self);
69}
70
71pub type RunResult<A> = anyhow::Result<A>;
74
75#[derive(Debug)]
79pub struct RunConfig {
80 pc: usize,
82 instructions_idx: usize,
89 function_frames: Vec<FunctionState>,
91 return_type: Option<(usize, ValueType)>,
95 memory: Vec<u8>,
97 locals_vec: Vec<StackValue>,
100 locals_base: usize,
102 globals: Vec<StackValue>,
104 max_memory: usize,
108 return_value_loc: usize,
115}
116
117impl RunConfig {
118 #[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
119 pub fn push_value<F>(&mut self, f: F)
122 where
123 StackValue: From<F>, {
124 let v: StackValue = f.into();
125 self.locals_vec[self.locals_base + self.return_value_loc] = v;
126 }
127}
128
129#[derive(Debug)]
130pub enum ExecutionOutcome<Interrupt> {
132 Success {
134 result: Option<Value>,
137 memory: Vec<u8>,
139 },
140 Interrupted {
143 reason: Interrupt,
145 config: RunConfig,
147 },
148}
149
150#[derive(Debug)]
151struct FunctionState {
155 pc: usize,
157 instructions_idx: usize,
159 locals_base: usize,
163 return_type: Option<(usize, ValueType)>,
167}
168
169#[derive(Clone, Copy, Debug, Eq, PartialEq)]
170pub enum Value {
175 I32(i32),
176 I64(i64),
177}
178
179impl From<Value> for ValueType {
180 fn from(v: Value) -> Self {
181 match v {
182 Value::I32(_) => ValueType::I32,
183 Value::I64(_) => ValueType::I64,
184 }
185 }
186}
187
188impl From<Value> for i64 {
189 fn from(v: Value) -> Self {
190 match v {
191 Value::I32(x) => i64::from(x),
192 Value::I64(x) => x,
193 }
194 }
195}
196
197#[derive(Debug)]
200pub struct RuntimeStack {
201 stack: Vec<StackValue>,
203}
204
205#[derive(Debug)]
206pub enum RuntimeError {
208 DirectlyCallImport,
210}
211
212impl std::fmt::Display for RuntimeError {
213 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
214 match self {
215 RuntimeError::DirectlyCallImport => {
216 write!(f, "Calling an imported function directly is not supported.")
217 }
218 }
219 }
220}
221
222impl RuntimeStack {
223 #[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
224 pub fn pop(&mut self) -> StackValue { self.stack.pop().expect("Stack not empty") }
227
228 #[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
229 pub fn push(&mut self, x: StackValue) { self.stack.push(x); }
231
232 #[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
233 pub fn push_value<F>(&mut self, f: F)
236 where
237 StackValue: From<F>, {
238 self.push(StackValue::from(f))
239 }
240
241 pub unsafe fn pop_u32(&mut self) -> u32 { self.pop().short as u32 }
249
250 pub unsafe fn peek_u32(&mut self) -> u32 {
258 self.stack.last().expect("Non-empty stack").short as u32
259 }
260
261 pub unsafe fn pop_u64(&mut self) -> u64 { self.pop().long as u64 }
269}
270
271#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
272fn get_u16(pc: &mut *const u8) -> u16 {
273 let r = unsafe { pc.cast::<u16>().read_unaligned() };
274 *pc = unsafe { pc.add(2) };
275 r
276}
277
278#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
279fn get_u32(pc: &mut *const u8) -> u32 {
280 let r = unsafe { pc.cast::<u32>().read_unaligned() };
281 *pc = unsafe { pc.add(4) };
282 r
283}
284
285#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
286fn get_i32(pc: &mut *const u8) -> i32 {
287 let r = unsafe { pc.cast::<i32>().read_unaligned() };
288 *pc = unsafe { pc.add(4) };
289 r
290}
291
292#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
293fn get_local(constants: &[i64], locals: &[StackValue], pc: &mut *const u8) -> StackValue {
294 let v = get_i32(pc);
295 if v >= 0 {
296 let v = v as usize;
297 *unsafe { locals.get_unchecked(v) }
299 } else {
300 let v = (-(v + 1)) as usize;
301 let v = unsafe { constants.get_unchecked(v) };
303 StackValue::from(*v)
304 }
305}
306
307#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
308fn get_local_mut<'a>(locals: &'a mut [StackValue], pc: &mut *const u8) -> &'a mut StackValue {
309 let v = get_i32(pc);
310 unsafe { locals.get_unchecked_mut(v as usize) }
314}
315
316#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
317fn read_u8(bytes: &[u8], pos: usize) -> RunResult<u8> {
318 bytes.get(pos).copied().ok_or_else(|| anyhow!("Memory access out of bounds."))
319}
320
321#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
322fn read_u16(bytes: &[u8], pos: usize) -> RunResult<u16> {
323 ensure!(pos + 2 <= bytes.len(), "Memory access out of bounds.");
324 let r = unsafe { bytes.as_ptr().add(pos).cast::<u16>().read_unaligned() };
325 Ok(r)
326}
327
328#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
329fn read_u32(bytes: &[u8], pos: usize) -> RunResult<u32> {
330 ensure!(pos + 4 <= bytes.len(), "Memory access out of bounds.");
331 let r = unsafe { bytes.as_ptr().add(pos).cast::<u32>().read_unaligned() };
332 Ok(r)
333}
334
335#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
336fn read_i8(bytes: &[u8], pos: usize) -> RunResult<i8> {
337 bytes.get(pos).map(|&x| x as i8).ok_or_else(|| anyhow!("Memory access out of bounds."))
338}
339
340#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
341fn read_i16(bytes: &[u8], pos: usize) -> RunResult<i16> {
342 ensure!(pos + 2 <= bytes.len(), "Memory access out of bounds.");
343 let r = unsafe { bytes.as_ptr().add(pos).cast::<i16>().read_unaligned() };
344 Ok(r)
345}
346
347#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
348fn read_i32(bytes: &[u8], pos: usize) -> RunResult<i32> {
349 ensure!(pos + 4 <= bytes.len(), "Memory access out of bounds.");
350 let r = unsafe { bytes.as_ptr().add(pos).cast::<i32>().read_unaligned() };
351 Ok(r)
352}
353
354#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
355fn read_i64(bytes: &[u8], pos: usize) -> RunResult<i64> {
356 ensure!(pos + 8 <= bytes.len(), "Memory access out of bounds.");
357 let r = unsafe { bytes.as_ptr().add(pos).cast::<i64>().read_unaligned() };
358 Ok(r)
359}
360
361#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
362fn memory_load<'a>(
369 constants: &[i64],
370 locals: &'a mut [StackValue],
371 pc: &mut *const u8,
372) -> (&'a mut StackValue, usize) {
373 let offset = get_u32(pc);
374 let base = get_local(constants, locals, pc);
375 let result = get_local_mut(locals, pc);
376 let pos = unsafe { base.short } as u32 as usize + offset as usize;
377 (result, pos)
378}
379
380#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
381fn memory_store(
385 constants: &[i64],
386 locals: &[StackValue],
387 pc: &mut *const u8,
388) -> (StackValue, usize) {
389 let offset = get_u32(pc);
390 let value = get_local(constants, locals, pc);
391 let base = get_local(constants, locals, pc);
392 (value, unsafe { base.short } as u32 as usize + offset as usize)
393}
394
395#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
396fn write_memory_at(memory: &mut [u8], pos: usize, bytes: &[u8]) -> RunResult<()> {
397 let end = pos + bytes.len();
398 ensure!(end <= memory.len(), "Illegal memory access.");
399 memory[pos..end].copy_from_slice(bytes);
400 Ok(())
401}
402
403#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
404fn unary_i32(
405 constants: &[i64],
406 locals: &mut [StackValue],
407 pc: &mut *const u8,
408 f: impl Fn(i32) -> i32,
409) {
410 let source = get_local(constants, locals, pc);
411 let target = get_local_mut(locals, pc);
412 target.short = f(unsafe { source.short });
413}
414
415#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
416fn unary_i64(
417 constants: &[i64],
418 locals: &mut [StackValue],
419 pc: &mut *const u8,
420 f: impl Fn(i64) -> i64,
421) {
422 let source = get_local(constants, locals, pc);
423 let target = get_local_mut(locals, pc);
424 target.long = f(unsafe { source.long });
425}
426
427#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
428fn binary_i32(
429 constants: &[i64],
430 locals: &mut [StackValue],
431 pc: &mut *const u8,
432 f: impl Fn(i32, i32) -> i32,
433) {
434 let right = get_local(constants, locals, pc);
435 let left = get_local(constants, locals, pc);
436 let target = get_local_mut(locals, pc);
437 target.short = f(unsafe { left.short }, unsafe { right.short });
438}
439
440#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
441fn binary_i32_partial(
442 constants: &[i64],
443 locals: &mut [StackValue],
444 pc: &mut *const u8,
445 f: impl Fn(i32, i32) -> Option<i32>,
446) -> RunResult<()> {
447 let right = get_local(constants, locals, pc);
448 let left = get_local(constants, locals, pc);
449 let target = get_local_mut(locals, pc);
450 target.short = f(unsafe { left.short }, unsafe { right.short })
451 .ok_or_else(|| anyhow!("Runtime exception in i32 binary."))?;
452 Ok(())
453}
454
455#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
456fn binary_i64(
457 constants: &[i64],
458 locals: &mut [StackValue],
459 pc: &mut *const u8,
460 f: impl Fn(i64, i64) -> i64,
461) {
462 let right = get_local(constants, locals, pc);
463 let left = get_local(constants, locals, pc);
464 let target = get_local_mut(locals, pc);
465 target.long = f(unsafe { left.long }, unsafe { right.long });
466}
467
468#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
469fn binary_i64_partial(
470 constants: &[i64],
471 locals: &mut [StackValue],
472 pc: &mut *const u8,
473 f: impl Fn(i64, i64) -> Option<i64>,
474) -> RunResult<()> {
475 let right = get_local(constants, locals, pc);
476 let left = get_local(constants, locals, pc);
477 let target = get_local_mut(locals, pc);
478 target.long = f(unsafe { left.long }, unsafe { right.long })
479 .ok_or_else(|| anyhow!("Runtime exception in i64 binary"))?;
480 Ok(())
481}
482
483#[cfg_attr(not(feature = "fuzz-coverage"), inline(always))]
484fn binary_i64_test(
485 constants: &[i64],
486 locals: &mut [StackValue],
487 pc: &mut *const u8,
488 f: impl Fn(i64, i64) -> i32,
489) {
490 let right = get_local(constants, locals, pc);
491 let left = get_local(constants, locals, pc);
492 let target = get_local_mut(locals, pc);
493 target.short = f(unsafe { left.long }, unsafe { right.long });
494}
495
496impl<I: TryFromImport, R: RunnableCode> Artifact<I, R> {
497 pub fn run<Q: std::fmt::Display + Ord + ?Sized, H: Host<I>>(
510 &self,
511 host: &mut H,
512 name: &Q,
513 args: &[Value],
514 ) -> RunResult<ExecutionOutcome<H::Interrupt>>
515 where
516 Name: std::borrow::Borrow<Q>, {
517 let start = *self.get_entrypoint_index(name)?;
518 ensure!(start as usize >= self.imports.len(), RuntimeError::DirectlyCallImport);
521 let instructions_idx = start as usize - self.imports.len();
522 let outer_function = &self.code[instructions_idx]; let num_args: u32 = args.len().try_into()?;
524 ensure!(
525 outer_function.num_params() == num_args,
526 "The number of arguments does not match the number of parameters {} != {}.",
527 num_args,
528 outer_function.num_params(),
529 );
530 for (p, actual) in outer_function.params().iter().zip(args.iter()) {
531 let actual_ty = ValueType::from(*actual);
533 ensure!(
534 *p == actual_ty,
535 "Argument of incorrect type: actual {:#?}, expected {:#?}.",
536 actual_ty,
537 *p
538 )
539 }
540
541 let globals = self.global.inits.iter().copied().map(StackValue::from).collect::<Vec<_>>();
542 let mut locals: Vec<StackValue> =
543 vec![unsafe { std::mem::zeroed() }; outer_function.num_registers() as usize];
544 for (&arg, place) in args.iter().zip(&mut locals) {
545 *place = match arg {
546 Value::I32(v) => StackValue::from(v),
547 Value::I64(v) => StackValue::from(v),
548 };
549 }
550 let memory = {
551 if let Some(m) = self.memory.as_ref() {
552 host.tick_initial_memory(m.init_size)?;
553 let mut memory = vec![0u8; (MAX_NUM_PAGES * PAGE_SIZE) as usize];
555 unsafe {
556 memory.set_len((m.init_size * PAGE_SIZE) as usize);
557 }
558 for data in m.init.iter() {
559 (&mut memory[data.offset as usize..]).write_all(&data.init)?;
560 }
561 memory
562 } else {
563 Vec::new()
564 }
565 };
566
567 let max_memory = self.memory.as_ref().map(|x| x.max_size).unwrap_or(0) as usize;
568
569 let pc = 0;
570
571 let function_frames: Vec<FunctionState> = Vec::new();
572 let return_type = match outer_function.return_type() {
573 BlockType::EmptyType => None,
574 BlockType::ValueType(vt) => Some((0, vt)),
575 };
576 let locals_base = 0;
577
578 let config = RunConfig {
579 pc,
580 instructions_idx,
581 function_frames,
582 return_type,
583 memory,
584 locals_vec: locals,
585 locals_base,
586 globals,
587 max_memory,
588 return_value_loc: 0, };
590 self.run_config(host, config)
591 }
592
593 fn get_entrypoint_index<Q>(&self, name: &Q) -> RunResult<&FuncIndex>
595 where
596 Q: std::fmt::Display + Ord + ?Sized,
597 Name: std::borrow::Borrow<Q>, {
598 self.export
599 .get(name)
600 .ok_or_else(|| anyhow!("Trying to invoke a method that does not exist: {}.", name))
601 }
602
603 pub fn has_entrypoint<Q>(&self, name: &Q) -> bool
605 where
606 Q: std::fmt::Display + Ord + ?Sized,
607 Name: std::borrow::Borrow<Q>, {
608 self.get_entrypoint_index(name).is_ok()
609 }
610
611 pub fn run_config<H: Host<I>>(
619 &self,
620 host: &mut H,
621 config: RunConfig,
622 ) -> RunResult<ExecutionOutcome<H::Interrupt>> {
623 let RunConfig {
629 pc,
630 mut instructions_idx,
631 mut function_frames,
632 mut return_type,
633 mut memory,
634 mut locals_vec,
635 mut locals_base,
636 mut globals,
637 max_memory,
638 return_value_loc: _,
639 } = config;
640
641 let mut stack = RuntimeStack {
643 stack: vec![unsafe { std::mem::zeroed() }; 10],
646 };
647
648 let mut locals = &mut locals_vec[locals_base..];
649 let code = unsafe { self.code.get_unchecked(instructions_idx) };
655 let mut constants = code.constants();
656 let mut instructions = code.code();
657 let mut pc = unsafe { instructions.as_ptr().add(pc) };
658 'outer: loop {
659 let instr = unsafe { *pc };
660 pc = unsafe { pc.add(1) };
661 match unsafe { std::mem::transmute::<u8, InternalOpcode>(instr) } {
667 InternalOpcode::Unreachable => bail!("Unreachable."),
668 InternalOpcode::If => {
669 let condition = get_local(constants, locals, &mut pc);
670 let else_target = get_u32(&mut pc);
671 if unsafe { condition.short } == 0 {
672 pc = unsafe { instructions.as_ptr().add(else_target as usize) };
674 } }
676 InternalOpcode::Br => {
677 let target = get_u32(&mut pc);
680 pc = unsafe { instructions.as_ptr().add(target as usize) };
681 }
682 InternalOpcode::BrIf => {
683 let target = get_u32(&mut pc);
686 let condition = get_local(constants, locals, &mut pc);
687 if unsafe { condition.short } != 0 {
688 pc = unsafe { instructions.as_ptr().add(target as usize) };
689 } }
691 InternalOpcode::BrTable => {
692 let condition = get_local(constants, locals, &mut pc);
693 let num_labels = get_u16(&mut pc);
694 let top: u32 = unsafe { condition.short } as u32;
695 if top < u32::from(num_labels) {
696 pc = unsafe { pc.add((top as usize + 1) * 4) }; } let target = get_u32(&mut pc);
700 pc = unsafe { instructions.as_ptr().add(target as usize) };
701 }
702 InternalOpcode::BrTableCarry => {
703 let condition = get_local(constants, locals, &mut pc);
704 let copy_source = get_local(constants, locals, &mut pc);
705 let num_labels = get_u16(&mut pc);
706 let top: u32 = unsafe { condition.short } as u32;
707 if top < u32::from(num_labels) {
708 pc = unsafe { pc.add((top as usize + 1) * 8) }; } let copy_target = get_local_mut(locals, &mut pc);
711 *copy_target = copy_source;
712 let target = get_u32(&mut pc);
713 pc = unsafe { instructions.as_ptr().add(target as usize) };
714 }
715 InternalOpcode::Copy => {
716 let copy_source = get_local(constants, locals, &mut pc);
717 let copy_target = get_local_mut(locals, &mut pc);
718 *copy_target = copy_source;
719 }
720 InternalOpcode::Return => {
721 host.track_return();
722 if let Some(top_frame) = function_frames.pop() {
723 if let Some((place, _)) = return_type {
726 locals_vec[top_frame.locals_base + place] = locals[0];
727 }
728 instructions_idx = top_frame.instructions_idx;
729 let code = unsafe { self.code.get_unchecked(instructions_idx) };
737 instructions = code.code();
738 pc = unsafe { instructions.as_ptr().add(top_frame.pc) };
739 constants = code.constants();
740 return_type = top_frame.return_type;
741 unsafe { locals_vec.set_len(locals_base) };
743 locals_base = top_frame.locals_base;
744 locals = &mut locals_vec[locals_base..];
745 } else {
746 break 'outer;
747 }
748 }
749 InternalOpcode::TickEnergy => {
750 let v = get_u32(&mut pc);
751 host.tick_energy(v as u64)?;
752 }
753 InternalOpcode::Call => {
754 let idx = get_u32(&mut pc);
765 if let Some(f) = self.imports.get(idx as usize) {
766 let params_len = f.ty().parameters.len();
767 stack.stack.clear();
768 stack.stack.reserve(params_len);
769 for offset in (0..params_len).rev() {
773 let val = get_local(constants, locals, &mut pc);
774 unsafe { stack.stack.as_mut_ptr().add(offset).write(val) }
775 }
776 unsafe {
777 stack.stack.set_len(params_len);
778 }
779 let return_value_loc = if f.ty().result.is_some() {
780 let target = get_i32(&mut pc);
781 target as usize
782 } else {
783 0
784 };
785 if let Some(reason) = host.call(f, &mut memory, &mut stack)? {
787 return Ok(ExecutionOutcome::Interrupted {
788 reason,
789 config: RunConfig {
790 pc: unsafe { pc.offset_from(instructions.as_ptr()) as usize }, instructions_idx,
792 function_frames,
793 return_type,
794 memory,
795 locals_vec,
796 locals_base,
797 globals,
798 max_memory,
799 return_value_loc,
800 },
801 });
802 } else if f.ty().result.is_some() {
803 locals[return_value_loc] = stack.pop();
804 }
805 assert!(stack.stack.is_empty());
806 } else {
807 host.track_call()?;
808 let local_idx = idx as usize - self.imports.len();
809 let f = self
810 .code
811 .get(local_idx)
812 .ok_or_else(|| anyhow!("Accessing non-existent code."))?;
813
814 let current_size = locals_vec.len();
816 let new_size = current_size + f.num_registers() as usize;
817 locals_vec.resize(new_size, unsafe { std::mem::zeroed() });
818 let (prefix, new_locals) = locals_vec.split_at_mut(current_size);
819 let current_locals = &mut prefix[locals_base..];
820
821 for p in new_locals[..f.num_params() as usize].iter_mut().rev() {
823 *p = get_local(constants, current_locals, &mut pc)
824 }
825 let new_return_type = match f.return_type() {
826 BlockType::EmptyType => None,
827 BlockType::ValueType(v) => Some((get_i32(&mut pc) as usize, v)),
828 };
829
830 let current_frame = FunctionState {
831 pc: unsafe { pc.offset_from(instructions.as_ptr()) as usize }, instructions_idx,
833 locals_base,
834 return_type,
835 };
836 function_frames.push(current_frame);
837 locals_base = current_size;
838
839 locals = new_locals;
840 return_type = new_return_type;
841 instructions = f.code();
842 constants = f.constants();
843 instructions_idx = local_idx;
844 pc = instructions.as_ptr();
845 }
846 }
847 InternalOpcode::CallIndirect => {
848 let ty_idx = get_u32(&mut pc);
849 let ty = self
850 .ty
851 .get(ty_idx as usize)
852 .ok_or_else(|| anyhow!("Non-existent type."))?;
853 let idx = get_local(constants, locals, &mut pc);
854 let idx = unsafe { idx.short } as u32;
855 if let Some(Some(f_idx)) = self.table.functions.get(idx as usize) {
856 if let Some(f) = self.imports.get(*f_idx as usize) {
857 let ty_actual = f.ty();
858 ensure!(ty_actual == ty, "Actual type different from expected.");
860
861 let params_len = f.ty().parameters.len();
862 stack.stack.clear();
863 stack.stack.reserve(params_len);
864 for offset in (0..params_len).rev() {
865 let val = get_local(constants, locals, &mut pc);
866 unsafe { stack.stack.as_mut_ptr().add(offset).write(val) }
867 }
868 unsafe {
869 stack.stack.set_len(params_len);
870 }
871 let return_value_loc = if f.ty().result.is_some() {
872 let target = get_i32(&mut pc);
873 target as usize
874 } else {
875 0
876 };
877
878 if let Some(reason) = host.call(f, &mut memory, &mut stack)? {
880 return Ok(ExecutionOutcome::Interrupted {
881 reason,
882 config: RunConfig {
883 pc: unsafe {
884 pc.offset_from(instructions.as_ptr()) as usize
885 }, instructions_idx,
887 function_frames,
888 return_type,
889 memory,
890 locals_vec,
891 locals_base,
892 globals,
893 max_memory,
894 return_value_loc,
895 },
896 });
897 } else if f.ty().result.is_some() {
898 locals[return_value_loc] = stack.pop();
899 }
900 } else {
901 host.track_call()?;
902 let local_idx = *f_idx as usize - self.imports.len();
903 let f = self
904 .code
905 .get(local_idx)
906 .ok_or_else(|| anyhow!("Accessing non-existent code."))?;
907 let ty_actual =
908 self.ty.get(f.type_idx() as usize).ok_or_else(|| {
909 anyhow!("Non-existent type. This should not happen.")
910 })?;
911 ensure!(
912 f.type_idx() == ty_idx || ty_actual == ty,
913 "Actual type different from expected."
914 );
915
916 let current_size = locals_vec.len();
919 let new_size = current_size + f.num_registers() as usize;
920 locals_vec.resize(new_size, unsafe { std::mem::zeroed() });
921 let (prefix, new_locals) = locals_vec.split_at_mut(current_size);
922 let current_locals = &mut prefix[locals_base..];
923 for p in new_locals[..f.num_params() as usize].iter_mut().rev() {
925 *p = get_local(constants, current_locals, &mut pc)
926 }
927 let new_return_type = match f.return_type() {
928 BlockType::EmptyType => None,
929 BlockType::ValueType(v) => Some((get_i32(&mut pc) as usize, v)),
930 };
931
932 let current_frame = FunctionState {
933 pc: unsafe { pc.offset_from(instructions.as_ptr()) as usize }, instructions_idx,
935 locals_base,
936 return_type,
937 };
938 function_frames.push(current_frame);
939 locals_base = current_size;
940
941 locals = new_locals;
942
943 return_type = new_return_type;
944 instructions = f.code();
945 constants = f.constants();
946 instructions_idx = local_idx;
947 pc = instructions.as_ptr();
948 }
949 } else {
950 bail!("Calling undefined function {}.", idx) }
952 }
953 InternalOpcode::Select => {
954 let top = get_local(constants, locals, &mut pc);
955 let t2 = get_local(constants, locals, &mut pc);
956 let t1 = get_local(constants, locals, &mut pc);
957 let target = get_local_mut(locals, &mut pc);
958 if unsafe { top.short } == 0 {
959 *target = t2;
960 } else {
961 *target = t1;
962 }
963 }
964 InternalOpcode::GlobalGet => {
965 let idx = get_u16(&mut pc);
966 let copy_target = get_local_mut(locals, &mut pc);
967 *copy_target = globals[idx as usize];
968 }
969 InternalOpcode::GlobalSet => {
970 let idx = get_u16(&mut pc);
971 let copy_target = get_local(constants, locals, &mut pc);
972 globals[idx as usize] = copy_target;
973 }
974 InternalOpcode::I32Load => {
975 let (result, pos) = memory_load(constants, locals, &mut pc);
976 let val = read_i32(&memory, pos)?;
977 *result = StackValue::from(val);
978 }
979 InternalOpcode::I64Load => {
980 let (result, pos) = memory_load(constants, locals, &mut pc);
981 let val = read_i64(&memory, pos)?;
982 *result = StackValue::from(val);
983 }
984 InternalOpcode::I32Load8S => {
985 let (result, pos) = memory_load(constants, locals, &mut pc);
986 let val = read_i8(&memory, pos)?;
987 *result = StackValue::from(val as i32);
988 }
989 InternalOpcode::I32Load8U => {
990 let (result, pos) = memory_load(constants, locals, &mut pc);
991 let val = read_u8(&memory, pos)?;
992 *result = StackValue::from(val as i32);
993 }
994 InternalOpcode::I32Load16S => {
995 let (result, pos) = memory_load(constants, locals, &mut pc);
996 let val = read_i16(&memory, pos)?;
997 *result = StackValue::from(val as i32);
998 }
999 InternalOpcode::I32Load16U => {
1000 let (result, pos) = memory_load(constants, locals, &mut pc);
1001 let val = read_u16(&memory, pos)?;
1002 *result = StackValue::from(val as i32);
1003 }
1004 InternalOpcode::I64Load8S => {
1005 let (result, pos) = memory_load(constants, locals, &mut pc);
1006 let val = read_i8(&memory, pos)?;
1007 *result = StackValue::from(val as i64);
1008 }
1009 InternalOpcode::I64Load8U => {
1010 let (result, pos) = memory_load(constants, locals, &mut pc);
1011 let val = read_u8(&memory, pos)?;
1012 *result = StackValue::from(val as i64);
1013 }
1014 InternalOpcode::I64Load16S => {
1015 let (result, pos) = memory_load(constants, locals, &mut pc);
1016 let val = read_i16(&memory, pos)?;
1017 *result = StackValue::from(val as i64);
1018 }
1019 InternalOpcode::I64Load16U => {
1020 let (result, pos) = memory_load(constants, locals, &mut pc);
1021 let val = read_u16(&memory, pos)?;
1022 *result = StackValue::from(val as i64);
1023 }
1024 InternalOpcode::I64Load32S => {
1025 let (result, pos) = memory_load(constants, locals, &mut pc);
1026 let val = read_i32(&memory, pos)?;
1027 *result = StackValue::from(val as i64);
1028 }
1029 InternalOpcode::I64Load32U => {
1030 let (result, pos) = memory_load(constants, locals, &mut pc);
1031 let val = read_u32(&memory, pos)?;
1032 *result = StackValue::from(val as i64);
1033 }
1034 InternalOpcode::I32Store => {
1035 let (val, pos) = memory_store(constants, locals, &mut pc);
1036 write_memory_at(&mut memory, pos, &unsafe { val.short }.to_le_bytes())?;
1037 }
1038 InternalOpcode::I64Store => {
1039 let (val, pos) = memory_store(constants, locals, &mut pc);
1040 write_memory_at(&mut memory, pos, &unsafe { val.long }.to_le_bytes())?;
1041 }
1042 InternalOpcode::I32Store8 => {
1043 let (val, pos) = memory_store(constants, locals, &mut pc);
1044 write_memory_at(&mut memory, pos, &unsafe { val.short }.to_le_bytes()[..1])?;
1045 }
1046 InternalOpcode::I32Store16 => {
1047 let (val, pos) = memory_store(constants, locals, &mut pc);
1048 write_memory_at(&mut memory, pos, &unsafe { val.short }.to_le_bytes()[..2])?;
1049 }
1050 InternalOpcode::I64Store8 => {
1051 let (val, pos) = memory_store(constants, locals, &mut pc);
1052 write_memory_at(&mut memory, pos, &unsafe { val.long }.to_le_bytes()[..1])?;
1053 }
1054 InternalOpcode::I64Store16 => {
1055 let (val, pos) = memory_store(constants, locals, &mut pc);
1056 write_memory_at(&mut memory, pos, &unsafe { val.long }.to_le_bytes()[..2])?;
1057 }
1058 InternalOpcode::I64Store32 => {
1059 let (val, pos) = memory_store(constants, locals, &mut pc);
1060 write_memory_at(&mut memory, pos, &unsafe { val.long }.to_le_bytes()[..4])?;
1061 }
1062 InternalOpcode::MemorySize => {
1063 let target = get_local_mut(locals, &mut pc);
1064 let l = memory.len() / PAGE_SIZE as usize;
1065 *target = StackValue::from(l as i32);
1066 }
1067 InternalOpcode::MemoryGrow => {
1068 let val = get_local(constants, locals, &mut pc);
1069 let target = get_local_mut(locals, &mut pc);
1070 let n = unsafe { val.short } as u32;
1071 let sz = memory.len() / PAGE_SIZE as usize;
1072 if sz + n as usize > max_memory {
1073 target.short = -1i32;
1074 } else {
1075 if n != 0 {
1076 unsafe { memory.set_len((sz + n as usize) * PAGE_SIZE as usize) }
1077 }
1078 target.short = sz as i32;
1079 }
1080 }
1081 InternalOpcode::I32Eqz => {
1082 let source = get_local(constants, locals, &mut pc);
1083 let target = get_local_mut(locals, &mut pc);
1084 let val = unsafe { source.short };
1085 target.short = if val == 0 {
1086 1i32
1087 } else {
1088 0i32
1089 };
1090 }
1091 InternalOpcode::I32Eq => {
1092 binary_i32(constants, locals, &mut pc, |left, right| (left == right) as i32);
1093 }
1094 InternalOpcode::I32Ne => {
1095 binary_i32(constants, locals, &mut pc, |left, right| (left != right) as i32);
1096 }
1097 InternalOpcode::I32LtS => {
1098 binary_i32(constants, locals, &mut pc, |left, right| (left < right) as i32);
1099 }
1100 InternalOpcode::I32LtU => {
1101 binary_i32(constants, locals, &mut pc, |left, right| {
1102 ((left as u32) < (right as u32)) as i32
1103 });
1104 }
1105 InternalOpcode::I32GtS => {
1106 binary_i32(constants, locals, &mut pc, |left, right| (left > right) as i32);
1107 }
1108 InternalOpcode::I32GtU => {
1109 binary_i32(constants, locals, &mut pc, |left, right| {
1110 ((left as u32) > (right as u32)) as i32
1111 });
1112 }
1113 InternalOpcode::I32LeS => {
1114 binary_i32(constants, locals, &mut pc, |left, right| (left <= right) as i32);
1115 }
1116 InternalOpcode::I32LeU => {
1117 binary_i32(constants, locals, &mut pc, |left, right| {
1118 ((left as u32) <= (right as u32)) as i32
1119 });
1120 }
1121 InternalOpcode::I32GeS => {
1122 binary_i32(constants, locals, &mut pc, |left, right| (left >= right) as i32);
1123 }
1124 InternalOpcode::I32GeU => {
1125 binary_i32(constants, locals, &mut pc, |left, right| {
1126 ((left as u32) >= (right as u32)) as i32
1127 });
1128 }
1129 InternalOpcode::I64Eqz => {
1130 let source = get_local(constants, locals, &mut pc);
1131 let target = get_local_mut(locals, &mut pc);
1132 let val = unsafe { source.long };
1133 target.short = if val == 0 {
1134 1i32
1135 } else {
1136 0i32
1137 };
1138 }
1139 InternalOpcode::I64Eq => {
1140 binary_i64_test(constants, locals, &mut pc, |left, right| {
1141 (left == right) as i32
1142 });
1143 }
1144 InternalOpcode::I64Ne => {
1145 binary_i64_test(constants, locals, &mut pc, |left, right| {
1146 (left != right) as i32
1147 });
1148 }
1149 InternalOpcode::I64LtS => {
1150 binary_i64_test(constants, locals, &mut pc, |left, right| {
1151 (left < right) as i32
1152 });
1153 }
1154 InternalOpcode::I64LtU => {
1155 binary_i64_test(constants, locals, &mut pc, |left, right| {
1156 ((left as u64) < (right as u64)) as i32
1157 });
1158 }
1159 InternalOpcode::I64GtS => {
1160 binary_i64_test(constants, locals, &mut pc, |left, right| {
1161 (left > right) as i32
1162 });
1163 }
1164 InternalOpcode::I64GtU => {
1165 binary_i64_test(constants, locals, &mut pc, |left, right| {
1166 ((left as u64) > (right as u64)) as i32
1167 });
1168 }
1169 InternalOpcode::I64LeS => {
1170 binary_i64_test(constants, locals, &mut pc, |left, right| {
1171 (left <= right) as i32
1172 });
1173 }
1174 InternalOpcode::I64LeU => {
1175 binary_i64_test(constants, locals, &mut pc, |left, right| {
1176 ((left as u64) <= (right as u64)) as i32
1177 });
1178 }
1179 InternalOpcode::I64GeS => {
1180 binary_i64_test(constants, locals, &mut pc, |left, right| {
1181 (left >= right) as i32
1182 });
1183 }
1184 InternalOpcode::I64GeU => {
1185 binary_i64_test(constants, locals, &mut pc, |left, right| {
1186 ((left as u64) >= (right as u64)) as i32
1187 });
1188 }
1189 InternalOpcode::I32Clz => {
1190 unary_i32(constants, locals, &mut pc, |x| x.leading_zeros() as i32);
1191 }
1192 InternalOpcode::I32Ctz => {
1193 unary_i32(constants, locals, &mut pc, |x| x.trailing_zeros() as i32);
1194 }
1195 InternalOpcode::I32Popcnt => {
1196 unary_i32(constants, locals, &mut pc, |x| x.count_ones() as i32);
1197 }
1198 InternalOpcode::I32Add => {
1199 binary_i32(constants, locals, &mut pc, |x, y| x.wrapping_add(y));
1200 }
1201 InternalOpcode::I32Sub => {
1202 binary_i32(constants, locals, &mut pc, |x, y| x.wrapping_sub(y));
1203 }
1204 InternalOpcode::I32Mul => {
1205 binary_i32(constants, locals, &mut pc, |x, y| x.wrapping_mul(y));
1206 }
1207 InternalOpcode::I32DivS => {
1208 binary_i32_partial(constants, locals, &mut pc, |x, y| x.checked_div(y))?;
1209 }
1210 InternalOpcode::I32DivU => {
1211 binary_i32_partial(constants, locals, &mut pc, |x, y| {
1212 (x as u32).checked_div(y as u32).map(|x| x as i32)
1213 })?;
1214 }
1215 InternalOpcode::I32RemS => {
1216 binary_i32_partial(constants, locals, &mut pc, |x, y| x.checked_rem(y))?;
1217 }
1218 InternalOpcode::I32RemU => {
1219 binary_i32_partial(constants, locals, &mut pc, |x, y| {
1220 (x as u32).checked_rem(y as u32).map(|x| x as i32)
1221 })?;
1222 }
1223 InternalOpcode::I32And => {
1224 binary_i32(constants, locals, &mut pc, |x, y| x & y);
1225 }
1226 InternalOpcode::I32Or => {
1227 binary_i32(constants, locals, &mut pc, |x, y| x | y);
1228 }
1229 InternalOpcode::I32Xor => {
1230 binary_i32(constants, locals, &mut pc, |x, y| x ^ y);
1231 }
1232 InternalOpcode::I32Shl => {
1233 binary_i32(constants, locals, &mut pc, |x, y| x << (y as u32 % 32));
1234 }
1235 InternalOpcode::I32ShrS => {
1236 binary_i32(constants, locals, &mut pc, |x, y| x >> (y as u32 % 32));
1237 }
1238 InternalOpcode::I32ShrU => {
1239 binary_i32(constants, locals, &mut pc, |x, y| {
1240 ((x as u32) >> (y as u32 % 32)) as i32
1241 });
1242 }
1243 InternalOpcode::I32Rotl => {
1244 binary_i32(constants, locals, &mut pc, |x, y| x.rotate_left(y as u32 % 32));
1245 }
1246 InternalOpcode::I32Rotr => {
1247 binary_i32(constants, locals, &mut pc, |x, y| x.rotate_right(y as u32 % 32));
1248 }
1249 InternalOpcode::I64Clz => {
1250 unary_i64(constants, locals, &mut pc, |x| x.leading_zeros() as i64);
1251 }
1252 InternalOpcode::I64Ctz => {
1253 unary_i64(constants, locals, &mut pc, |x| x.trailing_zeros() as i64);
1254 }
1255 InternalOpcode::I64Popcnt => {
1256 unary_i64(constants, locals, &mut pc, |x| x.count_ones() as i64);
1257 }
1258 InternalOpcode::I64Add => {
1259 binary_i64(constants, locals, &mut pc, |x, y| x.wrapping_add(y));
1260 }
1261 InternalOpcode::I64Sub => {
1262 binary_i64(constants, locals, &mut pc, |x, y| x.wrapping_sub(y));
1263 }
1264 InternalOpcode::I64Mul => {
1265 binary_i64(constants, locals, &mut pc, |x, y| x.wrapping_mul(y));
1266 }
1267 InternalOpcode::I64DivS => {
1268 binary_i64_partial(constants, locals, &mut pc, |x, y| x.checked_div(y))?;
1269 }
1270 InternalOpcode::I64DivU => {
1271 binary_i64_partial(constants, locals, &mut pc, |x, y| {
1272 (x as u64).checked_div(y as u64).map(|x| x as i64)
1273 })?;
1274 }
1275 InternalOpcode::I64RemS => {
1276 binary_i64_partial(constants, locals, &mut pc, |x, y| x.checked_rem(y))?;
1277 }
1278 InternalOpcode::I64RemU => {
1279 binary_i64_partial(constants, locals, &mut pc, |x, y| {
1280 (x as u64).checked_rem(y as u64).map(|x| x as i64)
1281 })?;
1282 }
1283 InternalOpcode::I64And => {
1284 binary_i64(constants, locals, &mut pc, |x, y| x & y);
1285 }
1286 InternalOpcode::I64Or => {
1287 binary_i64(constants, locals, &mut pc, |x, y| x | y);
1288 }
1289 InternalOpcode::I64Xor => {
1290 binary_i64(constants, locals, &mut pc, |x, y| x ^ y);
1291 }
1292 InternalOpcode::I64Shl => {
1293 binary_i64(constants, locals, &mut pc, |x, y| x << (y as u64 % 64));
1294 }
1295 InternalOpcode::I64ShrS => {
1296 binary_i64(constants, locals, &mut pc, |x, y| x >> (y as u64 % 64));
1297 }
1298 InternalOpcode::I64ShrU => {
1299 binary_i64(constants, locals, &mut pc, |x, y| {
1300 ((x as u64) >> (y as u64 % 64)) as i64
1301 });
1302 }
1303 InternalOpcode::I64Rotl => {
1304 binary_i64(constants, locals, &mut pc, |x, y| {
1305 x.rotate_left((y as u64 % 64) as u32)
1306 });
1307 }
1308 InternalOpcode::I64Rotr => {
1309 binary_i64(constants, locals, &mut pc, |x, y| {
1310 x.rotate_right((y as u64 % 64) as u32)
1311 });
1312 }
1313 InternalOpcode::I32WrapI64 => {
1314 let source = get_local(constants, locals, &mut pc);
1315 let target = get_local_mut(locals, &mut pc);
1316 target.short = unsafe { source.long } as i32;
1317 }
1318 InternalOpcode::I64ExtendI32S => {
1319 let source = get_local(constants, locals, &mut pc);
1320 let target = get_local_mut(locals, &mut pc);
1321 target.long = unsafe { source.short } as i64;
1322 }
1323 InternalOpcode::I64ExtendI32U => {
1324 let source = get_local(constants, locals, &mut pc);
1325 let target = get_local_mut(locals, &mut pc);
1326 target.long = unsafe { source.short } as u32 as i64;
1330 }
1331 InternalOpcode::I32Extend8S => {
1332 unary_i32(constants, locals, &mut pc, |x| x as i8 as i32)
1333 }
1334 InternalOpcode::I32Extend16S => {
1335 unary_i32(constants, locals, &mut pc, |x| x as i16 as i32)
1336 }
1337 InternalOpcode::I64Extend8S => {
1338 unary_i64(constants, locals, &mut pc, |x| x as i8 as i64)
1339 }
1340 InternalOpcode::I64Extend16S => {
1341 unary_i64(constants, locals, &mut pc, |x| x as i16 as i64)
1342 }
1343 InternalOpcode::I64Extend32S => {
1344 unary_i64(constants, locals, &mut pc, |x| x as i32 as i64)
1345 }
1346 }
1347 }
1348 match return_type {
1349 Some((v, ValueType::I32)) => Ok(ExecutionOutcome::Success {
1350 result: Some(Value::I32(unsafe { locals[v].short })),
1351 memory,
1352 }),
1353 Some((v, ValueType::I64)) => Ok(ExecutionOutcome::Success {
1354 result: Some(Value::I64(unsafe { locals[v].long })),
1355 memory,
1356 }),
1357 None => Ok(ExecutionOutcome::Success {
1358 result: None,
1359 memory,
1360 }),
1361 }
1362 }
1363}