1use crate::{
2 func::{FuncInstance, FuncInstanceInternal, FuncRef},
3 host::Externals,
4 isa,
5 memory::MemoryRef,
6 memory_units::Pages,
7 module::ModuleRef,
8 nan_preserving_float::{F32, F64},
9 value::{
10 ArithmeticOps,
11 ExtendInto,
12 Float,
13 Integer,
14 LittleEndianConvert,
15 TransmuteInto,
16 TryTruncateInto,
17 WrapInto,
18 },
19 RuntimeValue,
20 Signature,
21 Trap,
22 TrapCode,
23 ValueType,
24};
25use alloc::{boxed::Box, vec::Vec};
26use casper_wasm::elements::Local;
27#[cfg(feature = "sign_ext")]
28use casper_wasmi_core::SignExtendFrom;
29use core::{fmt, ops};
30use validation::{DEFAULT_MEMORY_INDEX, DEFAULT_TABLE_INDEX};
31
32pub const DEFAULT_VALUE_STACK_LIMIT: usize = 1024 * 1024;
34
35pub const DEFAULT_CALL_STACK_LIMIT: usize = 64 * 1024;
37
38#[derive(Copy, Clone, Debug, PartialEq, Default)]
49#[repr(transparent)]
50struct ValueInternal(pub u64);
51
52impl ValueInternal {
53 pub fn with_type(self, ty: ValueType) -> RuntimeValue {
54 match ty {
55 ValueType::I32 => RuntimeValue::I32(<_>::from_value_internal(self)),
56 ValueType::I64 => RuntimeValue::I64(<_>::from_value_internal(self)),
57 ValueType::F32 => RuntimeValue::F32(<_>::from_value_internal(self)),
58 ValueType::F64 => RuntimeValue::F64(<_>::from_value_internal(self)),
59 }
60 }
61}
62
63trait FromValueInternal
64where
65 Self: Sized,
66{
67 fn from_value_internal(val: ValueInternal) -> Self;
68}
69
70macro_rules! impl_from_value_internal {
71 ($($t:ty),*) => {
72 $(
73 impl FromValueInternal for $t {
74 fn from_value_internal(
75 ValueInternal(val): ValueInternal,
76 ) -> Self {
77 val as _
78 }
79 }
80
81 impl From<$t> for ValueInternal {
82 fn from(other: $t) -> Self {
83 ValueInternal(other as _)
84 }
85 }
86 )*
87 };
88}
89
90macro_rules! impl_from_value_internal_float {
91 ($($t:ty),*) => {
92 $(
93 impl FromValueInternal for $t {
94 fn from_value_internal(
95 ValueInternal(val): ValueInternal,
96 ) -> Self {
97 <$t>::from_bits(val as _)
98 }
99 }
100
101 impl From<$t> for ValueInternal {
102 fn from(other: $t) -> Self {
103 ValueInternal(other.to_bits() as _)
104 }
105 }
106 )*
107 };
108}
109
110impl_from_value_internal!(i8, u8, i16, u16, i32, u32, i64, u64);
111impl_from_value_internal_float!(f32, f64, F32, F64);
112
113impl From<bool> for ValueInternal {
114 fn from(other: bool) -> Self {
115 (if other { 1 } else { 0 }).into()
116 }
117}
118
119impl FromValueInternal for bool {
120 fn from_value_internal(ValueInternal(val): ValueInternal) -> Self {
121 val != 0
122 }
123}
124
125impl From<RuntimeValue> for ValueInternal {
126 fn from(other: RuntimeValue) -> Self {
127 match other {
128 RuntimeValue::I32(val) => val.into(),
129 RuntimeValue::I64(val) => val.into(),
130 RuntimeValue::F32(val) => val.into(),
131 RuntimeValue::F64(val) => val.into(),
132 }
133 }
134}
135
136pub enum InstructionOutcome {
138 RunNextInstruction,
140 Branch(isa::Target),
142 ExecuteCall(FuncRef),
144 Return(isa::DropKeep),
146}
147
148#[derive(PartialEq, Eq)]
149pub enum InterpreterState {
151 Initialized,
153 Started,
155 Resumable(Option<ValueType>),
158}
159
160impl InterpreterState {
161 pub fn is_resumable(&self) -> bool {
162 matches!(self, InterpreterState::Resumable(_))
163 }
164}
165
166enum RunResult {
168 Return,
170 NestedCall(FuncRef),
172}
173
174pub struct Interpreter {
176 value_stack: ValueStack,
177 call_stack: CallStack,
178 return_type: Option<ValueType>,
179 state: InterpreterState,
180 scratch: Vec<RuntimeValue>,
181}
182
183impl Interpreter {
184 pub fn new(
185 func: &FuncRef,
186 args: &[RuntimeValue],
187 mut stack_recycler: Option<&mut StackRecycler>,
188 ) -> Result<Interpreter, Trap> {
189 let mut value_stack = StackRecycler::recreate_value_stack(&mut stack_recycler);
190 for &arg in args {
191 let arg = arg.into();
192 value_stack.push(arg).map_err(
193 |_| Trap::from(TrapCode::StackOverflow),
196 )?;
197 }
198
199 let mut call_stack = StackRecycler::recreate_call_stack(&mut stack_recycler);
200 let initial_frame = FunctionContext::new(func.clone());
201 call_stack.push(initial_frame);
202
203 let return_type = func.signature().return_type();
204
205 Ok(Interpreter {
206 value_stack,
207 call_stack,
208 return_type,
209 state: InterpreterState::Initialized,
210 scratch: Vec::new(),
211 })
212 }
213
214 pub fn state(&self) -> &InterpreterState {
215 &self.state
216 }
217
218 pub fn start_execution<'a, E: Externals + 'a>(
219 &mut self,
220 externals: &'a mut E,
221 ) -> Result<Option<RuntimeValue>, Trap> {
222 assert!(self.state == InterpreterState::Initialized);
224
225 self.state = InterpreterState::Started;
226 self.run_interpreter_loop(externals)?;
227
228 let opt_return_value = self
229 .return_type
230 .map(|vt| self.value_stack.pop().with_type(vt));
231
232 assert!(self.value_stack.len() == 0);
234
235 Ok(opt_return_value)
236 }
237
238 pub fn resume_execution<'a, E: Externals + 'a>(
239 &mut self,
240 return_val: Option<RuntimeValue>,
241 externals: &'a mut E,
242 ) -> Result<Option<RuntimeValue>, Trap> {
243 use core::mem::swap;
244
245 assert!(self.state.is_resumable());
247
248 let mut resumable_state = InterpreterState::Started;
249 swap(&mut self.state, &mut resumable_state);
250
251 if let Some(return_val) = return_val {
252 self.value_stack
253 .push(return_val.into())
254 .map_err(Trap::from)?;
255 }
256
257 self.run_interpreter_loop(externals)?;
258
259 let opt_return_value = self
260 .return_type
261 .map(|vt| self.value_stack.pop().with_type(vt));
262
263 assert!(self.value_stack.len() == 0);
265
266 Ok(opt_return_value)
267 }
268
269 fn run_interpreter_loop<'a, E: Externals + 'a>(
270 &mut self,
271 externals: &'a mut E,
272 ) -> Result<(), Trap> {
273 loop {
274 let mut function_context = self.call_stack.pop().expect(
275 "on loop entry - not empty; on loop continue - checking for emptiness; qed",
276 );
277 let function_ref = function_context.function.clone();
278 let function_body = function_ref
279 .body()
280 .expect(
281 "Host functions checked in function_return below; Internal functions always have a body; qed"
282 );
283
284 if !function_context.is_initialized() {
285 function_context.initialize(&function_body.locals, &mut self.value_stack)?;
287 }
288
289 let function_return = self
290 .do_run_function(&mut function_context, &function_body.code)
291 .map_err(Trap::from)?;
292
293 match function_return {
294 RunResult::Return => {
295 if self.call_stack.is_empty() {
296 return Ok(());
299 }
300 }
301 RunResult::NestedCall(nested_func) => {
302 if self.call_stack.is_full() {
303 return Err(TrapCode::StackOverflow.into());
304 }
305
306 match *nested_func.as_internal() {
307 FuncInstanceInternal::Internal { .. } => {
308 let nested_context = FunctionContext::new(nested_func.clone());
309 self.call_stack.push(function_context);
310 self.call_stack.push(nested_context);
311 }
312 FuncInstanceInternal::Host { ref signature, .. } => {
313 prepare_function_args(
314 signature,
315 &mut self.value_stack,
316 &mut self.scratch,
317 );
318 self.call_stack.push(function_context);
320
321 let return_val = match FuncInstance::invoke(
322 &nested_func,
323 &self.scratch,
324 externals,
325 ) {
326 Ok(val) => val,
327 Err(trap) => {
328 if trap.is_host() {
329 self.state = InterpreterState::Resumable(
330 nested_func.signature().return_type(),
331 );
332 }
333 return Err(trap);
334 }
335 };
336
337 let value_ty = return_val.as_ref().map(|val| val.value_type());
339 let expected_ty = nested_func.signature().return_type();
340 if value_ty != expected_ty {
341 return Err(TrapCode::UnexpectedSignature.into());
342 }
343
344 if let Some(return_val) = return_val {
345 self.value_stack
346 .push(return_val.into())
347 .map_err(Trap::from)?;
348 }
349 }
350 }
351 }
352 }
353 }
354 }
355
356 fn do_run_function(
357 &mut self,
358 function_context: &mut FunctionContext,
359 instructions: &isa::Instructions,
360 ) -> Result<RunResult, TrapCode> {
361 let mut iter = instructions.iterate_from(function_context.position);
362
363 loop {
364 let instruction = iter.next().expect(
365 "Ran out of instructions, this should be impossible \
366 since validation ensures that we either have an explicit \
367 return or an implicit block `end`.",
368 );
369
370 match self.run_instruction(function_context, &instruction)? {
371 InstructionOutcome::RunNextInstruction => {}
372 InstructionOutcome::Branch(target) => {
373 iter = instructions.iterate_from(target.dst_pc);
374 self.value_stack.drop_keep(target.drop_keep);
375 }
376 InstructionOutcome::ExecuteCall(func_ref) => {
377 function_context.position = iter.position();
378 return Ok(RunResult::NestedCall(func_ref));
379 }
380 InstructionOutcome::Return(drop_keep) => {
381 self.value_stack.drop_keep(drop_keep);
382 break;
383 }
384 }
385 }
386
387 Ok(RunResult::Return)
388 }
389
390 #[inline(always)]
391 fn run_instruction(
392 &mut self,
393 context: &mut FunctionContext,
394 instruction: &isa::Instruction,
395 ) -> Result<InstructionOutcome, TrapCode> {
396 match instruction {
397 isa::Instruction::Unreachable => self.run_unreachable(context),
398
399 isa::Instruction::Br(target) => self.run_br(context, *target),
400 isa::Instruction::BrIfEqz(target) => self.run_br_eqz(*target),
401 isa::Instruction::BrIfNez(target) => self.run_br_nez(*target),
402 isa::Instruction::BrTable(targets) => self.run_br_table(*targets),
403 isa::Instruction::Return(drop_keep) => self.run_return(*drop_keep),
404
405 isa::Instruction::Call(index) => self.run_call(context, *index),
406 isa::Instruction::CallIndirect(index, table) => {
407 self.run_call_indirect(context, *index, *table)
408 }
409
410 isa::Instruction::Drop => self.run_drop(),
411 isa::Instruction::Select => self.run_select(),
412
413 isa::Instruction::GetLocal(depth) => self.run_get_local(*depth),
414 isa::Instruction::SetLocal(depth) => self.run_set_local(*depth),
415 isa::Instruction::TeeLocal(depth) => self.run_tee_local(*depth),
416 isa::Instruction::GetGlobal(index) => self.run_get_global(context, *index),
417 isa::Instruction::SetGlobal(index) => self.run_set_global(context, *index),
418
419 isa::Instruction::I32Load(offset) => self.run_load::<i32>(context, *offset),
420 isa::Instruction::I64Load(offset) => self.run_load::<i64>(context, *offset),
421 isa::Instruction::F32Load(offset) => self.run_load::<F32>(context, *offset),
422 isa::Instruction::F64Load(offset) => self.run_load::<F64>(context, *offset),
423 isa::Instruction::I32Load8S(offset) => {
424 self.run_load_extend::<i8, i32>(context, *offset)
425 }
426 isa::Instruction::I32Load8U(offset) => {
427 self.run_load_extend::<u8, i32>(context, *offset)
428 }
429 isa::Instruction::I32Load16S(offset) => {
430 self.run_load_extend::<i16, i32>(context, *offset)
431 }
432 isa::Instruction::I32Load16U(offset) => {
433 self.run_load_extend::<u16, i32>(context, *offset)
434 }
435 isa::Instruction::I64Load8S(offset) => {
436 self.run_load_extend::<i8, i64>(context, *offset)
437 }
438 isa::Instruction::I64Load8U(offset) => {
439 self.run_load_extend::<u8, i64>(context, *offset)
440 }
441 isa::Instruction::I64Load16S(offset) => {
442 self.run_load_extend::<i16, i64>(context, *offset)
443 }
444 isa::Instruction::I64Load16U(offset) => {
445 self.run_load_extend::<u16, i64>(context, *offset)
446 }
447 isa::Instruction::I64Load32S(offset) => {
448 self.run_load_extend::<i32, i64>(context, *offset)
449 }
450 isa::Instruction::I64Load32U(offset) => {
451 self.run_load_extend::<u32, i64>(context, *offset)
452 }
453
454 isa::Instruction::I32Store(offset) => self.run_store::<i32>(context, *offset),
455 isa::Instruction::I64Store(offset) => self.run_store::<i64>(context, *offset),
456 isa::Instruction::F32Store(offset) => self.run_store::<F32>(context, *offset),
457 isa::Instruction::F64Store(offset) => self.run_store::<F64>(context, *offset),
458 isa::Instruction::I32Store8(offset) => self.run_store_wrap::<i32, i8>(context, *offset),
459 isa::Instruction::I32Store16(offset) => {
460 self.run_store_wrap::<i32, i16>(context, *offset)
461 }
462 isa::Instruction::I64Store8(offset) => self.run_store_wrap::<i64, i8>(context, *offset),
463 isa::Instruction::I64Store16(offset) => {
464 self.run_store_wrap::<i64, i16>(context, *offset)
465 }
466 isa::Instruction::I64Store32(offset) => {
467 self.run_store_wrap::<i64, i32>(context, *offset)
468 }
469
470 isa::Instruction::CurrentMemory => self.run_current_memory(context),
471 isa::Instruction::GrowMemory => self.run_grow_memory(context),
472
473 isa::Instruction::I32Const(val) => self.run_const((*val).into()),
474 isa::Instruction::I64Const(val) => self.run_const((*val).into()),
475 isa::Instruction::F32Const(val) => self.run_const((*val).into()),
476 isa::Instruction::F64Const(val) => self.run_const((*val).into()),
477
478 isa::Instruction::I32Eqz => self.run_eqz::<i32>(),
479 isa::Instruction::I32Eq => self.run_eq::<i32>(),
480 isa::Instruction::I32Ne => self.run_ne::<i32>(),
481 isa::Instruction::I32LtS => self.run_lt::<i32>(),
482 isa::Instruction::I32LtU => self.run_lt::<u32>(),
483 isa::Instruction::I32GtS => self.run_gt::<i32>(),
484 isa::Instruction::I32GtU => self.run_gt::<u32>(),
485 isa::Instruction::I32LeS => self.run_lte::<i32>(),
486 isa::Instruction::I32LeU => self.run_lte::<u32>(),
487 isa::Instruction::I32GeS => self.run_gte::<i32>(),
488 isa::Instruction::I32GeU => self.run_gte::<u32>(),
489
490 isa::Instruction::I64Eqz => self.run_eqz::<i64>(),
491 isa::Instruction::I64Eq => self.run_eq::<i64>(),
492 isa::Instruction::I64Ne => self.run_ne::<i64>(),
493 isa::Instruction::I64LtS => self.run_lt::<i64>(),
494 isa::Instruction::I64LtU => self.run_lt::<u64>(),
495 isa::Instruction::I64GtS => self.run_gt::<i64>(),
496 isa::Instruction::I64GtU => self.run_gt::<u64>(),
497 isa::Instruction::I64LeS => self.run_lte::<i64>(),
498 isa::Instruction::I64LeU => self.run_lte::<u64>(),
499 isa::Instruction::I64GeS => self.run_gte::<i64>(),
500 isa::Instruction::I64GeU => self.run_gte::<u64>(),
501
502 isa::Instruction::F32Eq => self.run_eq::<F32>(),
503 isa::Instruction::F32Ne => self.run_ne::<F32>(),
504 isa::Instruction::F32Lt => self.run_lt::<F32>(),
505 isa::Instruction::F32Gt => self.run_gt::<F32>(),
506 isa::Instruction::F32Le => self.run_lte::<F32>(),
507 isa::Instruction::F32Ge => self.run_gte::<F32>(),
508
509 isa::Instruction::F64Eq => self.run_eq::<F64>(),
510 isa::Instruction::F64Ne => self.run_ne::<F64>(),
511 isa::Instruction::F64Lt => self.run_lt::<F64>(),
512 isa::Instruction::F64Gt => self.run_gt::<F64>(),
513 isa::Instruction::F64Le => self.run_lte::<F64>(),
514 isa::Instruction::F64Ge => self.run_gte::<F64>(),
515
516 isa::Instruction::I32Clz => self.run_clz::<i32>(),
517 isa::Instruction::I32Ctz => self.run_ctz::<i32>(),
518 isa::Instruction::I32Popcnt => self.run_popcnt::<i32>(),
519 isa::Instruction::I32Add => self.run_add::<i32>(),
520 isa::Instruction::I32Sub => self.run_sub::<i32>(),
521 isa::Instruction::I32Mul => self.run_mul::<i32>(),
522 isa::Instruction::I32DivS => self.run_div::<i32, i32>(),
523 isa::Instruction::I32DivU => self.run_div::<i32, u32>(),
524 isa::Instruction::I32RemS => self.run_rem::<i32, i32>(),
525 isa::Instruction::I32RemU => self.run_rem::<i32, u32>(),
526 isa::Instruction::I32And => self.run_and::<i32>(),
527 isa::Instruction::I32Or => self.run_or::<i32>(),
528 isa::Instruction::I32Xor => self.run_xor::<i32>(),
529 isa::Instruction::I32Shl => self.run_shl::<i32>(0x1F),
530 isa::Instruction::I32ShrS => self.run_shr::<i32, i32>(0x1F),
531 isa::Instruction::I32ShrU => self.run_shr::<i32, u32>(0x1F),
532 isa::Instruction::I32Rotl => self.run_rotl::<i32>(),
533 isa::Instruction::I32Rotr => self.run_rotr::<i32>(),
534
535 isa::Instruction::I64Clz => self.run_clz::<i64>(),
536 isa::Instruction::I64Ctz => self.run_ctz::<i64>(),
537 isa::Instruction::I64Popcnt => self.run_popcnt::<i64>(),
538 isa::Instruction::I64Add => self.run_add::<i64>(),
539 isa::Instruction::I64Sub => self.run_sub::<i64>(),
540 isa::Instruction::I64Mul => self.run_mul::<i64>(),
541 isa::Instruction::I64DivS => self.run_div::<i64, i64>(),
542 isa::Instruction::I64DivU => self.run_div::<i64, u64>(),
543 isa::Instruction::I64RemS => self.run_rem::<i64, i64>(),
544 isa::Instruction::I64RemU => self.run_rem::<i64, u64>(),
545 isa::Instruction::I64And => self.run_and::<i64>(),
546 isa::Instruction::I64Or => self.run_or::<i64>(),
547 isa::Instruction::I64Xor => self.run_xor::<i64>(),
548 isa::Instruction::I64Shl => self.run_shl::<i64>(0x3F),
549 isa::Instruction::I64ShrS => self.run_shr::<i64, i64>(0x3F),
550 isa::Instruction::I64ShrU => self.run_shr::<i64, u64>(0x3F),
551 isa::Instruction::I64Rotl => self.run_rotl::<i64>(),
552 isa::Instruction::I64Rotr => self.run_rotr::<i64>(),
553
554 isa::Instruction::F32Abs => self.run_abs::<F32>(),
555 isa::Instruction::F32Neg => self.run_neg::<F32>(),
556 isa::Instruction::F32Ceil => self.run_ceil::<F32>(),
557 isa::Instruction::F32Floor => self.run_floor::<F32>(),
558 isa::Instruction::F32Trunc => self.run_trunc::<F32>(),
559 isa::Instruction::F32Nearest => self.run_nearest::<F32>(),
560 isa::Instruction::F32Sqrt => self.run_sqrt::<F32>(),
561 isa::Instruction::F32Add => self.run_add::<F32>(),
562 isa::Instruction::F32Sub => self.run_sub::<F32>(),
563 isa::Instruction::F32Mul => self.run_mul::<F32>(),
564 isa::Instruction::F32Div => self.run_div::<F32, F32>(),
565 isa::Instruction::F32Min => self.run_min::<F32>(),
566 isa::Instruction::F32Max => self.run_max::<F32>(),
567 isa::Instruction::F32Copysign => self.run_copysign::<F32>(),
568
569 isa::Instruction::F64Abs => self.run_abs::<F64>(),
570 isa::Instruction::F64Neg => self.run_neg::<F64>(),
571 isa::Instruction::F64Ceil => self.run_ceil::<F64>(),
572 isa::Instruction::F64Floor => self.run_floor::<F64>(),
573 isa::Instruction::F64Trunc => self.run_trunc::<F64>(),
574 isa::Instruction::F64Nearest => self.run_nearest::<F64>(),
575 isa::Instruction::F64Sqrt => self.run_sqrt::<F64>(),
576 isa::Instruction::F64Add => self.run_add::<F64>(),
577 isa::Instruction::F64Sub => self.run_sub::<F64>(),
578 isa::Instruction::F64Mul => self.run_mul::<F64>(),
579 isa::Instruction::F64Div => self.run_div::<F64, F64>(),
580 isa::Instruction::F64Min => self.run_min::<F64>(),
581 isa::Instruction::F64Max => self.run_max::<F64>(),
582 isa::Instruction::F64Copysign => self.run_copysign::<F64>(),
583
584 isa::Instruction::I32WrapI64 => self.run_wrap::<i64, i32>(),
585 isa::Instruction::I32TruncSF32 => self.run_trunc_to_int::<F32, i32, i32>(),
586 isa::Instruction::I32TruncUF32 => self.run_trunc_to_int::<F32, u32, i32>(),
587 isa::Instruction::I32TruncSF64 => self.run_trunc_to_int::<F64, i32, i32>(),
588 isa::Instruction::I32TruncUF64 => self.run_trunc_to_int::<F64, u32, i32>(),
589 isa::Instruction::I64ExtendSI32 => self.run_extend::<i32, i64, i64>(),
590 isa::Instruction::I64ExtendUI32 => self.run_extend::<u32, u64, i64>(),
591 isa::Instruction::I64TruncSF32 => self.run_trunc_to_int::<F32, i64, i64>(),
592 isa::Instruction::I64TruncUF32 => self.run_trunc_to_int::<F32, u64, i64>(),
593 isa::Instruction::I64TruncSF64 => self.run_trunc_to_int::<F64, i64, i64>(),
594 isa::Instruction::I64TruncUF64 => self.run_trunc_to_int::<F64, u64, i64>(),
595 isa::Instruction::F32ConvertSI32 => self.run_extend::<i32, F32, F32>(),
596 isa::Instruction::F32ConvertUI32 => self.run_extend::<u32, F32, F32>(),
597 isa::Instruction::F32ConvertSI64 => self.run_wrap::<i64, F32>(),
598 isa::Instruction::F32ConvertUI64 => self.run_wrap::<u64, F32>(),
599 isa::Instruction::F32DemoteF64 => self.run_wrap::<F64, F32>(),
600 isa::Instruction::F64ConvertSI32 => self.run_extend::<i32, F64, F64>(),
601 isa::Instruction::F64ConvertUI32 => self.run_extend::<u32, F64, F64>(),
602 isa::Instruction::F64ConvertSI64 => self.run_extend::<i64, F64, F64>(),
603 isa::Instruction::F64ConvertUI64 => self.run_extend::<u64, F64, F64>(),
604 isa::Instruction::F64PromoteF32 => self.run_extend::<F32, F64, F64>(),
605
606 isa::Instruction::I32ReinterpretF32 => self.run_reinterpret::<F32, i32>(),
607 isa::Instruction::I64ReinterpretF64 => self.run_reinterpret::<F64, i64>(),
608 isa::Instruction::F32ReinterpretI32 => self.run_reinterpret::<i32, F32>(),
609 isa::Instruction::F64ReinterpretI64 => self.run_reinterpret::<i64, F64>(),
610
611 #[cfg(feature = "sign_ext")]
612 isa::Instruction::I32Extend8S => self.run_iextend::<i8, i32>(),
613 #[cfg(feature = "sign_ext")]
614 isa::Instruction::I32Extend16S => self.run_iextend::<i16, i32>(),
615 #[cfg(feature = "sign_ext")]
616 isa::Instruction::I64Extend8S => self.run_iextend::<i8, i64>(),
617 #[cfg(feature = "sign_ext")]
618 isa::Instruction::I64Extend16S => self.run_iextend::<i16, i64>(),
619 #[cfg(feature = "sign_ext")]
620 isa::Instruction::I64Extend32S => self.run_iextend::<i32, i64>(),
621 }
622 }
623
624 fn run_unreachable(
625 &mut self,
626 _context: &mut FunctionContext,
627 ) -> Result<InstructionOutcome, TrapCode> {
628 Err(TrapCode::Unreachable)
629 }
630
631 fn run_br(
632 &mut self,
633 _context: &mut FunctionContext,
634 target: isa::Target,
635 ) -> Result<InstructionOutcome, TrapCode> {
636 Ok(InstructionOutcome::Branch(target))
637 }
638
639 fn run_br_nez(&mut self, target: isa::Target) -> Result<InstructionOutcome, TrapCode> {
640 let condition = self.value_stack.pop_as();
641 if condition {
642 Ok(InstructionOutcome::Branch(target))
643 } else {
644 Ok(InstructionOutcome::RunNextInstruction)
645 }
646 }
647
648 fn run_br_eqz(&mut self, target: isa::Target) -> Result<InstructionOutcome, TrapCode> {
649 let condition = self.value_stack.pop_as();
650 if condition {
651 Ok(InstructionOutcome::RunNextInstruction)
652 } else {
653 Ok(InstructionOutcome::Branch(target))
654 }
655 }
656
657 fn run_br_table(&mut self, targets: isa::BrTargets) -> Result<InstructionOutcome, TrapCode> {
658 let index: u32 = self.value_stack.pop_as();
659
660 let dst = targets.get(index);
661
662 Ok(InstructionOutcome::Branch(dst))
663 }
664
665 fn run_return(&mut self, drop_keep: isa::DropKeep) -> Result<InstructionOutcome, TrapCode> {
666 Ok(InstructionOutcome::Return(drop_keep))
667 }
668
669 fn run_call(
670 &mut self,
671 context: &mut FunctionContext,
672 func_idx: u32,
673 ) -> Result<InstructionOutcome, TrapCode> {
674 let func = context
675 .module()
676 .func_by_index(func_idx)
677 .expect("Due to validation func should exists");
678 Ok(InstructionOutcome::ExecuteCall(func))
679 }
680
681 fn run_call_indirect(
682 &mut self,
683 context: &mut FunctionContext,
684 signature_idx: u32,
685 table_idx: u32,
686 ) -> Result<InstructionOutcome, TrapCode> {
687 let table_func_idx: u32 = self.value_stack.pop_as();
688 #[cfg(feature = "call_indirect_overlong")]
689 let table = context
690 .module()
691 .table_by_index(table_idx)
692 .ok_or(TrapCode::TableAccessOutOfBounds)?;
693 #[cfg(not(feature = "call_indirect_overlong"))]
694 let table = context
695 .module()
696 .table_by_index(DEFAULT_TABLE_INDEX)
697 .ok_or(TrapCode::TableAccessOutOfBounds)?;
698 let func_ref = table
699 .get(table_func_idx)
700 .map_err(|_| TrapCode::TableAccessOutOfBounds)?
701 .ok_or(TrapCode::ElemUninitialized)?;
702
703 {
704 let actual_signature = func_ref.signature();
705 let expected_signature = context
706 .module()
707 .signature_by_index(signature_idx)
708 .expect("Due to validation type should exists");
709
710 if &*expected_signature != actual_signature {
711 return Err(TrapCode::UnexpectedSignature);
712 }
713 }
714
715 Ok(InstructionOutcome::ExecuteCall(func_ref))
716 }
717
718 fn run_drop(&mut self) -> Result<InstructionOutcome, TrapCode> {
719 let _ = self.value_stack.pop();
720 Ok(InstructionOutcome::RunNextInstruction)
721 }
722
723 fn run_select(&mut self) -> Result<InstructionOutcome, TrapCode> {
724 let (left, mid, right) = self.value_stack.pop_triple();
725
726 let condition = <_>::from_value_internal(right);
727 let val = if condition { left } else { mid };
728 self.value_stack.push(val)?;
729 Ok(InstructionOutcome::RunNextInstruction)
730 }
731
732 fn run_get_local(&mut self, index: u32) -> Result<InstructionOutcome, TrapCode> {
733 let val = *self.value_stack.pick_mut(index as usize);
734 self.value_stack.push(val)?;
735 Ok(InstructionOutcome::RunNextInstruction)
736 }
737
738 fn run_set_local(&mut self, index: u32) -> Result<InstructionOutcome, TrapCode> {
739 let val = self.value_stack.pop();
740 *self.value_stack.pick_mut(index as usize) = val;
741 Ok(InstructionOutcome::RunNextInstruction)
742 }
743
744 fn run_tee_local(&mut self, index: u32) -> Result<InstructionOutcome, TrapCode> {
745 let val = *self.value_stack.top();
746 *self.value_stack.pick_mut(index as usize) = val;
747 Ok(InstructionOutcome::RunNextInstruction)
748 }
749
750 fn run_get_global(
751 &mut self,
752 context: &mut FunctionContext,
753 index: u32,
754 ) -> Result<InstructionOutcome, TrapCode> {
755 let global = context
756 .module()
757 .global_by_index(index)
758 .expect("Due to validation global should exists");
759 let val = global.get();
760 self.value_stack.push(val.into())?;
761 Ok(InstructionOutcome::RunNextInstruction)
762 }
763
764 fn run_set_global(
765 &mut self,
766 context: &mut FunctionContext,
767 index: u32,
768 ) -> Result<InstructionOutcome, TrapCode> {
769 let val = self.value_stack.pop();
770 let global = context
771 .module()
772 .global_by_index(index)
773 .expect("Due to validation global should exists");
774 global
775 .set(val.with_type(global.value_type()))
776 .expect("Due to validation set to a global should succeed");
777 Ok(InstructionOutcome::RunNextInstruction)
778 }
779
780 fn run_load<T>(
781 &mut self,
782 context: &mut FunctionContext,
783 offset: u32,
784 ) -> Result<InstructionOutcome, TrapCode>
785 where
786 ValueInternal: From<T>,
787 T: LittleEndianConvert,
788 {
789 let raw_address = self.value_stack.pop_as();
790 let address = effective_address(offset, raw_address)?;
791 let m = context
792 .memory()
793 .expect("Due to validation memory should exists");
794 let n: T = m
795 .get_value(address)
796 .map_err(|_| TrapCode::MemoryAccessOutOfBounds)?;
797 self.value_stack.push(n.into())?;
798 Ok(InstructionOutcome::RunNextInstruction)
799 }
800
801 fn run_load_extend<T, U>(
802 &mut self,
803 context: &mut FunctionContext,
804 offset: u32,
805 ) -> Result<InstructionOutcome, TrapCode>
806 where
807 T: ExtendInto<U>,
808 ValueInternal: From<U>,
809 T: LittleEndianConvert,
810 {
811 let raw_address = self.value_stack.pop_as();
812 let address = effective_address(offset, raw_address)?;
813 let m = context
814 .memory()
815 .expect("Due to validation memory should exists");
816 let v: T = m
817 .get_value(address)
818 .map_err(|_| TrapCode::MemoryAccessOutOfBounds)?;
819 let stack_value: U = v.extend_into();
820 self.value_stack
821 .push(stack_value.into())
822 .map(|_| InstructionOutcome::RunNextInstruction)
823 }
824
825 fn run_store<T>(
826 &mut self,
827 context: &mut FunctionContext,
828 offset: u32,
829 ) -> Result<InstructionOutcome, TrapCode>
830 where
831 T: FromValueInternal,
832 T: LittleEndianConvert,
833 {
834 let stack_value = self.value_stack.pop_as::<T>();
835 let raw_address = self.value_stack.pop_as::<u32>();
836 let address = effective_address(offset, raw_address)?;
837
838 let m = context
839 .memory()
840 .expect("Due to validation memory should exists");
841 m.set_value(address, stack_value)
842 .map_err(|_| TrapCode::MemoryAccessOutOfBounds)?;
843 Ok(InstructionOutcome::RunNextInstruction)
844 }
845
846 fn run_store_wrap<T, U>(
847 &mut self,
848 context: &mut FunctionContext,
849 offset: u32,
850 ) -> Result<InstructionOutcome, TrapCode>
851 where
852 T: FromValueInternal,
853 T: WrapInto<U>,
854 U: LittleEndianConvert,
855 {
856 let stack_value: T = <_>::from_value_internal(self.value_stack.pop());
857 let stack_value = stack_value.wrap_into();
858 let raw_address = self.value_stack.pop_as::<u32>();
859 let address = effective_address(offset, raw_address)?;
860 let m = context
861 .memory()
862 .expect("Due to validation memory should exists");
863 m.set_value(address, stack_value)
864 .map_err(|_| TrapCode::MemoryAccessOutOfBounds)?;
865 Ok(InstructionOutcome::RunNextInstruction)
866 }
867
868 fn run_current_memory(
869 &mut self,
870 context: &mut FunctionContext,
871 ) -> Result<InstructionOutcome, TrapCode> {
872 let m = context
873 .memory()
874 .expect("Due to validation memory should exists");
875 let s = m.current_size().0;
876 self.value_stack.push(ValueInternal(s as _))?;
877 Ok(InstructionOutcome::RunNextInstruction)
878 }
879
880 fn run_grow_memory(
881 &mut self,
882 context: &mut FunctionContext,
883 ) -> Result<InstructionOutcome, TrapCode> {
884 let pages: u32 = self.value_stack.pop_as();
885 let m = context
886 .memory()
887 .expect("Due to validation memory should exists");
888 let m = match m.grow(Pages(pages as usize)) {
889 Ok(Pages(new_size)) => new_size as u32,
890 Err(_) => u32::MAX, };
892 self.value_stack.push(ValueInternal(m as _))?;
893 Ok(InstructionOutcome::RunNextInstruction)
894 }
895
896 fn run_const(&mut self, val: RuntimeValue) -> Result<InstructionOutcome, TrapCode> {
897 self.value_stack
898 .push(val.into())
899 .map(|_| InstructionOutcome::RunNextInstruction)
900 }
901
902 fn run_relop<T, F>(&mut self, f: F) -> Result<InstructionOutcome, TrapCode>
903 where
904 T: FromValueInternal,
905 F: FnOnce(T, T) -> bool,
906 {
907 let (left, right) = self.value_stack.pop_pair_as::<T>();
908 let v = if f(left, right) {
909 ValueInternal(1)
910 } else {
911 ValueInternal(0)
912 };
913 self.value_stack.push(v)?;
914 Ok(InstructionOutcome::RunNextInstruction)
915 }
916
917 fn run_eqz<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
918 where
919 T: FromValueInternal,
920 T: PartialEq<T> + Default,
921 {
922 let v = self.value_stack.pop_as::<T>();
923 let v = ValueInternal(if v == Default::default() { 1 } else { 0 });
924 self.value_stack.push(v)?;
925 Ok(InstructionOutcome::RunNextInstruction)
926 }
927
928 fn run_eq<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
929 where
930 T: FromValueInternal + PartialEq<T>,
931 {
932 self.run_relop(|left: T, right: T| left == right)
933 }
934
935 fn run_ne<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
936 where
937 T: FromValueInternal + PartialEq<T>,
938 {
939 self.run_relop(|left: T, right: T| left != right)
940 }
941
942 fn run_lt<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
943 where
944 T: FromValueInternal + PartialOrd<T>,
945 {
946 self.run_relop(|left: T, right: T| left < right)
947 }
948
949 fn run_gt<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
950 where
951 T: FromValueInternal + PartialOrd<T>,
952 {
953 self.run_relop(|left: T, right: T| left > right)
954 }
955
956 fn run_lte<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
957 where
958 T: FromValueInternal + PartialOrd<T>,
959 {
960 self.run_relop(|left: T, right: T| left <= right)
961 }
962
963 fn run_gte<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
964 where
965 T: FromValueInternal + PartialOrd<T>,
966 {
967 self.run_relop(|left: T, right: T| left >= right)
968 }
969
970 fn run_unop<T, U, F>(&mut self, f: F) -> Result<InstructionOutcome, TrapCode>
971 where
972 F: FnOnce(T) -> U,
973 T: FromValueInternal,
974 ValueInternal: From<U>,
975 {
976 let v = self.value_stack.pop_as::<T>();
977 let v = f(v);
978 self.value_stack.push(v.into())?;
979 Ok(InstructionOutcome::RunNextInstruction)
980 }
981
982 fn run_clz<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
983 where
984 ValueInternal: From<T>,
985 T: Integer<T> + FromValueInternal,
986 {
987 self.run_unop(|v: T| v.leading_zeros())
988 }
989
990 fn run_ctz<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
991 where
992 ValueInternal: From<T>,
993 T: Integer<T> + FromValueInternal,
994 {
995 self.run_unop(|v: T| v.trailing_zeros())
996 }
997
998 fn run_popcnt<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
999 where
1000 ValueInternal: From<T>,
1001 T: Integer<T> + FromValueInternal,
1002 {
1003 self.run_unop(|v: T| v.count_ones())
1004 }
1005
1006 fn run_add<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1007 where
1008 ValueInternal: From<T>,
1009 T: ArithmeticOps<T> + FromValueInternal,
1010 {
1011 let (left, right) = self.value_stack.pop_pair_as::<T>();
1012 let v = left.add(right);
1013 self.value_stack.push(v.into())?;
1014 Ok(InstructionOutcome::RunNextInstruction)
1015 }
1016
1017 fn run_sub<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1018 where
1019 ValueInternal: From<T>,
1020 T: ArithmeticOps<T> + FromValueInternal,
1021 {
1022 let (left, right) = self.value_stack.pop_pair_as::<T>();
1023 let v = left.sub(right);
1024 self.value_stack.push(v.into())?;
1025 Ok(InstructionOutcome::RunNextInstruction)
1026 }
1027
1028 fn run_mul<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1029 where
1030 ValueInternal: From<T>,
1031 T: ArithmeticOps<T> + FromValueInternal,
1032 {
1033 let (left, right) = self.value_stack.pop_pair_as::<T>();
1034 let v = left.mul(right);
1035 self.value_stack.push(v.into())?;
1036 Ok(InstructionOutcome::RunNextInstruction)
1037 }
1038
1039 fn run_div<T, U>(&mut self) -> Result<InstructionOutcome, TrapCode>
1040 where
1041 ValueInternal: From<T>,
1042 T: TransmuteInto<U> + FromValueInternal,
1043 U: ArithmeticOps<U> + TransmuteInto<T>,
1044 {
1045 let (left, right) = self.value_stack.pop_pair_as::<T>();
1046 let (left, right) = (left.transmute_into(), right.transmute_into());
1047 let v = left.div(right)?;
1048 let v = v.transmute_into();
1049 self.value_stack.push(v.into())?;
1050 Ok(InstructionOutcome::RunNextInstruction)
1051 }
1052
1053 fn run_rem<T, U>(&mut self) -> Result<InstructionOutcome, TrapCode>
1054 where
1055 ValueInternal: From<T>,
1056 T: TransmuteInto<U> + FromValueInternal,
1057 U: Integer<U> + TransmuteInto<T>,
1058 {
1059 let (left, right) = self.value_stack.pop_pair_as::<T>();
1060 let (left, right) = (left.transmute_into(), right.transmute_into());
1061 let v = left.rem(right)?;
1062 let v = v.transmute_into();
1063 self.value_stack.push(v.into())?;
1064 Ok(InstructionOutcome::RunNextInstruction)
1065 }
1066
1067 fn run_and<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1068 where
1069 ValueInternal: From<<T as ops::BitAnd>::Output>,
1070 T: ops::BitAnd<T> + FromValueInternal,
1071 {
1072 let (left, right) = self.value_stack.pop_pair_as::<T>();
1073 let v = left.bitand(right);
1074 self.value_stack.push(v.into())?;
1075 Ok(InstructionOutcome::RunNextInstruction)
1076 }
1077
1078 fn run_or<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1079 where
1080 ValueInternal: From<<T as ops::BitOr>::Output>,
1081 T: ops::BitOr<T> + FromValueInternal,
1082 {
1083 let (left, right) = self.value_stack.pop_pair_as::<T>();
1084 let v = left.bitor(right);
1085 self.value_stack.push(v.into())?;
1086 Ok(InstructionOutcome::RunNextInstruction)
1087 }
1088
1089 fn run_xor<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1090 where
1091 ValueInternal: From<<T as ops::BitXor>::Output>,
1092 T: ops::BitXor<T> + FromValueInternal,
1093 {
1094 let (left, right) = self.value_stack.pop_pair_as::<T>();
1095 let v = left.bitxor(right);
1096 self.value_stack.push(v.into())?;
1097 Ok(InstructionOutcome::RunNextInstruction)
1098 }
1099
1100 fn run_shl<T>(&mut self, mask: T) -> Result<InstructionOutcome, TrapCode>
1101 where
1102 ValueInternal: From<<T as ops::Shl<T>>::Output>,
1103 T: ops::Shl<T> + ops::BitAnd<T, Output = T> + FromValueInternal,
1104 {
1105 let (left, right) = self.value_stack.pop_pair_as::<T>();
1106 let v = left.shl(right & mask);
1107 self.value_stack.push(v.into())?;
1108 Ok(InstructionOutcome::RunNextInstruction)
1109 }
1110
1111 fn run_shr<T, U>(&mut self, mask: U) -> Result<InstructionOutcome, TrapCode>
1112 where
1113 ValueInternal: From<T>,
1114 T: TransmuteInto<U> + FromValueInternal,
1115 U: ops::Shr<U> + ops::BitAnd<U, Output = U>,
1116 <U as ops::Shr<U>>::Output: TransmuteInto<T>,
1117 {
1118 let (left, right) = self.value_stack.pop_pair_as::<T>();
1119 let (left, right) = (left.transmute_into(), right.transmute_into());
1120 let v = left.shr(right & mask);
1121 let v = v.transmute_into();
1122 self.value_stack.push(v.into())?;
1123 Ok(InstructionOutcome::RunNextInstruction)
1124 }
1125
1126 fn run_rotl<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1127 where
1128 ValueInternal: From<T>,
1129 T: Integer<T> + FromValueInternal,
1130 {
1131 let (left, right) = self.value_stack.pop_pair_as::<T>();
1132 let v = left.rotl(right);
1133 self.value_stack.push(v.into())?;
1134 Ok(InstructionOutcome::RunNextInstruction)
1135 }
1136
1137 fn run_rotr<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1138 where
1139 ValueInternal: From<T>,
1140 T: Integer<T> + FromValueInternal,
1141 {
1142 let (left, right) = self.value_stack.pop_pair_as::<T>();
1143 let v = left.rotr(right);
1144 self.value_stack.push(v.into())?;
1145 Ok(InstructionOutcome::RunNextInstruction)
1146 }
1147
1148 fn run_abs<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1149 where
1150 ValueInternal: From<T>,
1151 T: Float<T> + FromValueInternal,
1152 {
1153 self.run_unop(|v: T| v.abs())
1154 }
1155
1156 fn run_neg<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1157 where
1158 ValueInternal: From<<T as ops::Neg>::Output>,
1159 T: ops::Neg + FromValueInternal,
1160 {
1161 self.run_unop(|v: T| v.neg())
1162 }
1163
1164 fn run_ceil<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1165 where
1166 ValueInternal: From<T>,
1167 T: Float<T> + FromValueInternal,
1168 {
1169 self.run_unop(|v: T| v.ceil())
1170 }
1171
1172 fn run_floor<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1173 where
1174 ValueInternal: From<T>,
1175 T: Float<T> + FromValueInternal,
1176 {
1177 self.run_unop(|v: T| v.floor())
1178 }
1179
1180 fn run_trunc<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1181 where
1182 ValueInternal: From<T>,
1183 T: Float<T> + FromValueInternal,
1184 {
1185 self.run_unop(|v: T| v.trunc())
1186 }
1187
1188 fn run_nearest<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1189 where
1190 ValueInternal: From<T>,
1191 T: Float<T> + FromValueInternal,
1192 {
1193 self.run_unop(|v: T| v.nearest())
1194 }
1195
1196 fn run_sqrt<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1197 where
1198 ValueInternal: From<T>,
1199 T: Float<T> + FromValueInternal,
1200 {
1201 self.run_unop(|v: T| v.sqrt())
1202 }
1203
1204 fn run_min<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1205 where
1206 ValueInternal: From<T>,
1207 T: Float<T> + FromValueInternal,
1208 {
1209 let (left, right) = self.value_stack.pop_pair_as::<T>();
1210 let v = left.min(right);
1211 self.value_stack.push(v.into())?;
1212 Ok(InstructionOutcome::RunNextInstruction)
1213 }
1214
1215 fn run_max<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1216 where
1217 ValueInternal: From<T>,
1218 T: Float<T> + FromValueInternal,
1219 {
1220 let (left, right) = self.value_stack.pop_pair_as::<T>();
1221 let v = left.max(right);
1222 self.value_stack.push(v.into())?;
1223 Ok(InstructionOutcome::RunNextInstruction)
1224 }
1225
1226 fn run_copysign<T>(&mut self) -> Result<InstructionOutcome, TrapCode>
1227 where
1228 ValueInternal: From<T>,
1229 T: Float<T> + FromValueInternal,
1230 {
1231 let (left, right) = self.value_stack.pop_pair_as::<T>();
1232 let v = left.copysign(right);
1233 self.value_stack.push(v.into())?;
1234 Ok(InstructionOutcome::RunNextInstruction)
1235 }
1236
1237 fn run_wrap<T, U>(&mut self) -> Result<InstructionOutcome, TrapCode>
1238 where
1239 ValueInternal: From<U>,
1240 T: WrapInto<U> + FromValueInternal,
1241 {
1242 self.run_unop(|v: T| v.wrap_into())
1243 }
1244
1245 fn run_trunc_to_int<T, U, V>(&mut self) -> Result<InstructionOutcome, TrapCode>
1246 where
1247 ValueInternal: From<V>,
1248 T: TryTruncateInto<U, TrapCode> + FromValueInternal,
1249 U: TransmuteInto<V>,
1250 {
1251 let v = self.value_stack.pop_as::<T>();
1252
1253 v.try_truncate_into()
1254 .map(|v| v.transmute_into())
1255 .map(|v| self.value_stack.push(v.into()))
1256 .map(|_| InstructionOutcome::RunNextInstruction)
1257 }
1258
1259 fn run_extend<T, U, V>(&mut self) -> Result<InstructionOutcome, TrapCode>
1260 where
1261 ValueInternal: From<V>,
1262 T: ExtendInto<U> + FromValueInternal,
1263 U: TransmuteInto<V>,
1264 {
1265 let v = self.value_stack.pop_as::<T>();
1266
1267 let v = v.extend_into().transmute_into();
1268 self.value_stack.push(v.into())?;
1269
1270 Ok(InstructionOutcome::RunNextInstruction)
1271 }
1272
1273 fn run_reinterpret<T, U>(&mut self) -> Result<InstructionOutcome, TrapCode>
1274 where
1275 ValueInternal: From<U>,
1276 T: FromValueInternal,
1277 T: TransmuteInto<U>,
1278 {
1279 let v = self.value_stack.pop_as::<T>();
1280
1281 let v = v.transmute_into();
1282 self.value_stack.push(v.into())?;
1283
1284 Ok(InstructionOutcome::RunNextInstruction)
1285 }
1286
1287 #[cfg(feature = "sign_ext")]
1288 fn run_iextend<T, U>(&mut self) -> Result<InstructionOutcome, TrapCode>
1289 where
1290 ValueInternal: From<U>,
1291 U: SignExtendFrom<T> + FromValueInternal,
1292 {
1293 let v = self.value_stack.pop_as::<U>();
1294
1295 let v = v.sign_extend_from();
1296 self.value_stack.push(v.into())?;
1297
1298 Ok(InstructionOutcome::RunNextInstruction)
1299 }
1300}
1301
1302struct FunctionContext {
1304 pub is_initialized: bool,
1306 pub function: FuncRef,
1308 pub module: ModuleRef,
1309 pub memory: Option<MemoryRef>,
1310 pub position: u32,
1312}
1313
1314impl FunctionContext {
1315 pub fn new(function: FuncRef) -> Self {
1316 let module = match function.as_internal() {
1317 FuncInstanceInternal::Internal { module, .. } => module.upgrade().expect("module deallocated"),
1318 FuncInstanceInternal::Host { .. } => panic!("Host functions can't be called as internally defined functions; Thus FunctionContext can be created only with internally defined functions; qed"),
1319 };
1320 let memory = module.memory_by_index(DEFAULT_MEMORY_INDEX);
1321 FunctionContext {
1322 is_initialized: false,
1323 function,
1324 module: ModuleRef(module),
1325 memory,
1326 position: 0,
1327 }
1328 }
1329
1330 pub fn is_initialized(&self) -> bool {
1331 self.is_initialized
1332 }
1333
1334 pub fn initialize(
1335 &mut self,
1336 locals: &[Local],
1337 value_stack: &mut ValueStack,
1338 ) -> Result<(), TrapCode> {
1339 debug_assert!(!self.is_initialized);
1340
1341 let num_locals = locals.iter().map(|l| l.count() as usize).sum();
1342
1343 value_stack.extend(num_locals)?;
1344
1345 self.is_initialized = true;
1346 Ok(())
1347 }
1348
1349 pub fn module(&self) -> ModuleRef {
1350 self.module.clone()
1351 }
1352
1353 pub fn memory(&self) -> Option<&MemoryRef> {
1354 self.memory.as_ref()
1355 }
1356}
1357
1358impl fmt::Debug for FunctionContext {
1359 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1360 write!(f, "FunctionContext")
1361 }
1362}
1363
1364fn effective_address(address: u32, offset: u32) -> Result<u32, TrapCode> {
1365 match offset.checked_add(address) {
1366 None => Err(TrapCode::MemoryAccessOutOfBounds),
1367 Some(address) => Ok(address),
1368 }
1369}
1370
1371fn prepare_function_args(
1372 signature: &Signature,
1373 caller_stack: &mut ValueStack,
1374 host_args: &mut Vec<RuntimeValue>,
1375) {
1376 let req_args = signature.params();
1377 let len_args = req_args.len();
1378 let stack_args = caller_stack.pop_slice(len_args);
1379 assert_eq!(len_args, stack_args.len());
1380 host_args.clear();
1381 let prepared_args = req_args
1382 .iter()
1383 .zip(stack_args)
1384 .map(|(req_arg, stack_arg)| stack_arg.with_type(*req_arg));
1385 host_args.extend(prepared_args);
1386}
1387
1388pub fn check_function_args(signature: &Signature, args: &[RuntimeValue]) -> Result<(), Trap> {
1389 if signature.params().len() != args.len() {
1390 return Err(TrapCode::UnexpectedSignature.into());
1391 }
1392
1393 if signature
1394 .params()
1395 .iter()
1396 .zip(args.iter().map(|param_value| param_value.value_type()))
1397 .any(|(expected_type, actual_type)| &actual_type != expected_type)
1398 {
1399 return Err(TrapCode::UnexpectedSignature.into());
1400 }
1401
1402 Ok(())
1403}
1404
1405struct ValueStack {
1406 buf: Box<[ValueInternal]>,
1407 sp: usize,
1409}
1410
1411impl core::fmt::Debug for ValueStack {
1412 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1413 f.debug_struct("ValueStack")
1414 .field("entries", &&self.buf[..self.sp])
1415 .field("stack_ptr", &self.sp)
1416 .finish()
1417 }
1418}
1419
1420impl ValueStack {
1421 #[inline]
1422 fn drop_keep(&mut self, drop_keep: isa::DropKeep) {
1423 if drop_keep.keep == isa::Keep::Single {
1424 let top = *self.top();
1425 *self.pick_mut(drop_keep.drop as usize + 1) = top;
1426 }
1427
1428 let cur_stack_len = self.len();
1429 self.sp = cur_stack_len - drop_keep.drop as usize;
1430 }
1431
1432 #[inline]
1433 fn pop_as<T>(&mut self) -> T
1434 where
1435 T: FromValueInternal,
1436 {
1437 let value = self.pop();
1438
1439 T::from_value_internal(value)
1440 }
1441
1442 #[inline]
1443 fn pop_pair_as<T>(&mut self) -> (T, T)
1444 where
1445 T: FromValueInternal,
1446 {
1447 let right = self.pop_as();
1448 let left = self.pop_as();
1449 (left, right)
1450 }
1451
1452 #[inline]
1453 fn pop_triple(&mut self) -> (ValueInternal, ValueInternal, ValueInternal) {
1454 let right = self.pop();
1455 let mid = self.pop();
1456 let left = self.pop();
1457 (left, mid, right)
1458 }
1459
1460 #[inline]
1461 fn top(&self) -> &ValueInternal {
1462 self.pick(1)
1463 }
1464
1465 fn pick(&self, depth: usize) -> &ValueInternal {
1466 &self.buf[self.sp - depth]
1467 }
1468
1469 #[inline]
1470 fn pick_mut(&mut self, depth: usize) -> &mut ValueInternal {
1471 &mut self.buf[self.sp - depth]
1472 }
1473
1474 #[inline]
1475 fn pop(&mut self) -> ValueInternal {
1476 self.sp -= 1;
1477 self.buf[self.sp]
1478 }
1479
1480 #[inline]
1481 fn push(&mut self, value: ValueInternal) -> Result<(), TrapCode> {
1482 let cell = self.buf.get_mut(self.sp).ok_or(TrapCode::StackOverflow)?;
1483 *cell = value;
1484 self.sp += 1;
1485 Ok(())
1486 }
1487
1488 fn extend(&mut self, len: usize) -> Result<(), TrapCode> {
1489 let cells = self
1490 .buf
1491 .get_mut(self.sp..self.sp + len)
1492 .ok_or(TrapCode::StackOverflow)?;
1493 for cell in cells {
1494 *cell = Default::default();
1495 }
1496 self.sp += len;
1497 Ok(())
1498 }
1499
1500 #[inline]
1501 fn len(&self) -> usize {
1502 self.sp
1503 }
1504
1505 pub fn pop_slice(&mut self, depth: usize) -> &[ValueInternal] {
1514 self.sp -= depth;
1515 let start = self.sp;
1516 let end = self.sp + depth;
1517 &self.buf[start..end]
1518 }
1519}
1520
1521struct CallStack {
1522 buf: Vec<FunctionContext>,
1523 limit: usize,
1524}
1525
1526impl CallStack {
1527 fn push(&mut self, ctx: FunctionContext) {
1528 self.buf.push(ctx);
1529 }
1530
1531 fn pop(&mut self) -> Option<FunctionContext> {
1532 self.buf.pop()
1533 }
1534
1535 fn is_empty(&self) -> bool {
1536 self.buf.is_empty()
1537 }
1538
1539 fn is_full(&self) -> bool {
1540 self.buf.len() + 1 >= self.limit
1541 }
1542}
1543
1544pub struct StackRecycler {
1546 value_stack_buf: Option<Box<[ValueInternal]>>,
1547 value_stack_limit: usize,
1548 call_stack_buf: Option<Vec<FunctionContext>>,
1549 call_stack_limit: usize,
1550}
1551
1552impl StackRecycler {
1553 pub fn with_limits(value_stack_limit: usize, call_stack_limit: usize) -> Self {
1557 Self {
1558 value_stack_buf: None,
1559 value_stack_limit,
1560 call_stack_buf: None,
1561 call_stack_limit,
1562 }
1563 }
1564
1565 pub fn clear(&mut self) {
1574 if let Some(buf) = &mut self.value_stack_buf {
1575 for cell in buf.iter_mut() {
1576 *cell = ValueInternal(0);
1577 }
1578 }
1579 }
1580
1581 fn recreate_value_stack(this: &mut Option<&mut Self>) -> ValueStack {
1582 let limit = this
1583 .as_ref()
1584 .map_or(DEFAULT_VALUE_STACK_LIMIT, |this| this.value_stack_limit)
1585 / ::core::mem::size_of::<ValueInternal>();
1586
1587 let buf = this
1588 .as_mut()
1589 .and_then(|this| this.value_stack_buf.take())
1590 .unwrap_or_else(|| {
1591 let mut buf = Vec::new();
1592 buf.reserve_exact(limit);
1593 buf.resize(limit, ValueInternal(0));
1594 buf.into_boxed_slice()
1595 });
1596
1597 ValueStack { buf, sp: 0 }
1598 }
1599
1600 fn recreate_call_stack(this: &mut Option<&mut Self>) -> CallStack {
1601 let limit = this
1602 .as_ref()
1603 .map_or(DEFAULT_CALL_STACK_LIMIT, |this| this.call_stack_limit);
1604
1605 let buf = this
1606 .as_mut()
1607 .and_then(|this| this.call_stack_buf.take())
1608 .unwrap_or_default();
1609
1610 CallStack { buf, limit }
1611 }
1612
1613 pub(crate) fn recycle(&mut self, mut interpreter: Interpreter) {
1614 interpreter.call_stack.buf.clear();
1615
1616 self.value_stack_buf = Some(interpreter.value_stack.buf);
1617 self.call_stack_buf = Some(interpreter.call_stack.buf);
1618 }
1619}
1620
1621impl Default for StackRecycler {
1622 fn default() -> Self {
1623 Self::with_limits(DEFAULT_VALUE_STACK_LIMIT, DEFAULT_CALL_STACK_LIMIT)
1624 }
1625}