1#![deny(missing_docs)]
11
12use anyhow::{Context, Result, bail, ensure};
13use std::collections::{BTreeMap, HashMap};
14use walrus::ir::*;
15use walrus::*;
16
17pub struct Interpreter {
23 pub globals: BTreeMap<GlobalId, Value>,
26 pub mem: BTreeMap<MemoryId, BTreeMap<usize, [u8; 4]>>,
29 pub stack: Vec<Value>,
32 pub tables: BTreeMap<TableId, Vec<FunctionId>>,
35 functions: HashMap<String, Box<dyn FnMut(&mut Interpreter, &[Value]) -> Result<Vec<Value>>>>,
38 interrupt_handler: Option<
40 Box<dyn FnMut(&mut Interpreter, &Instr, (FunctionId, InstrSeqId, usize)) -> Result<()>>,
41 >,
42 interrupt_handler_mem: Option<
46 Box<
47 dyn FnMut(
48 &mut Interpreter,
49 &Instr,
50 (FunctionId, InstrSeqId, usize),
51 (MemoryId, u64, Value, MemoryAccessType),
52 ) -> Result<()>,
53 >,
54 >,
55
56 init_memories: BTreeMap<MemoryId, Box<[(usize, Box<[u8]>)]>>,
57
58 pub memory_size: BTreeMap<MemoryId, usize>,
62}
63
64pub enum MemoryAccessType {
66 Load,
68 Store,
70}
71
72impl Interpreter {
73 pub fn new(module: &Module) -> Result<Self> {
75 let mut globals = BTreeMap::new();
76 module
77 .globals
78 .iter()
79 .map(|global| {
80 if let GlobalKind::Local(ConstExpr::Value(v)) = global.kind {
81 globals.insert(global.id(), v);
82 Ok(())
83 } else {
84 bail!("global is not a local constant");
85 }
86 })
87 .collect::<Result<Vec<_>>>()?;
88
89 let init_memories = module
90 .data
91 .iter()
92 .filter_map(|data| match &data.kind {
93 walrus::DataKind::Active { memory, offset } => {
94 if let ConstExpr::Value(v) = offset {
95 if let ir::Value::I32(offset) = v {
96 Some((
97 *memory,
98 (*offset as usize, data.value.clone().into_boxed_slice()),
99 ))
100 } else {
101 log::warn!("Data segment {:?} is not i32", offset);
102 None
103 }
104 } else {
105 log::warn!("Data segment {:?} is not a value", offset);
106 None
107 }
108 }
109 _ => {
110 log::warn!("Data segment passive is not supported");
111 None
112 }
113 })
114 .fold(BTreeMap::new(), |mut acc, (memory, (offset, value))| {
115 acc.entry(memory)
116 .or_insert_with(Vec::new)
117 .push((offset, value));
118 acc
119 })
120 .into_iter()
121 .map(|(memory, values)| (memory, values.into_boxed_slice()))
122 .collect::<BTreeMap<_, _>>();
123
124 let mem = module
125 .memories
126 .iter()
127 .map(|mem| (mem.id(), BTreeMap::new()))
128 .collect::<BTreeMap<_, _>>();
129
130 let memory_size = module
131 .memories
132 .iter()
133 .map(|mem| (mem.id(), mem.initial as usize))
134 .collect::<BTreeMap<_, _>>();
135
136 let tables = module
137 .tables
138 .iter()
139 .map(|table| {
140 (table.id(), {
141 let mut vec = Vec::with_capacity(table.initial as usize);
142 for elem_id in &table.elem_segments {
143 let elem = module.elements.get(*elem_id);
144 match &elem.kind {
145 walrus::ElementKind::Active { offset, .. } => {
146 let offset = match offset {
147 ConstExpr::Value(ir::Value::I32(offset)) => *offset as usize,
148 _ => {
149 log::warn!("Table segment {:?} is not i32", offset);
150 continue;
151 }
152 };
153 let items = match &elem.items {
154 ElementItems::Functions(ids) => ids,
155 ElementItems::Expressions(..) => todo!(),
156 };
157
158 if offset > vec.len() {
159 vec.resize(offset, items[0]);
160 } else if offset < vec.len() {
161 vec.truncate(offset);
162 }
163 vec.extend(items);
164 }
165 walrus::ElementKind::Passive => {
166 log::warn!("Table segment {:?} is passive", elem_id);
167 }
168 ElementKind::Declared => {
169 todo!("Declared table segments are not supported")
170 }
171 }
172 }
173 vec
174 })
175 })
176 .collect::<BTreeMap<_, _>>();
177
178 Ok(Self {
179 globals,
180 mem,
181 stack: Vec::with_capacity(256),
182 functions: HashMap::new(),
183 tables,
184 interrupt_handler: None,
185 interrupt_handler_mem: None,
186 init_memories,
187 memory_size,
188 })
189 }
190
191 pub fn mem_set_i32(&mut self, id: MemoryId, address: u64, value: i32) -> Result<()> {
193 let value = u32::to_le_bytes(value as u32);
194 Ok(self.mem_set(id, address, value))
195 }
196
197 pub fn add_function(
200 &mut self,
201 name: impl AsRef<str>,
202 func: impl FnMut(&mut Interpreter, &[Value]) -> Result<Vec<Value>> + 'static,
203 ) {
204 self.functions
205 .insert(name.as_ref().to_string(), Box::new(func));
206 }
207
208 pub fn set_interrupt_handler(
210 &mut self,
211 handler: impl FnMut(&mut Interpreter, &Instr, (FunctionId, InstrSeqId, usize)) -> Result<()>
212 + 'static,
213 ) {
214 self.interrupt_handler = Some(Box::new(handler));
215 }
216
217 pub fn set_interrupt_handler_mem(
220 &mut self,
221 handler: impl FnMut(
222 &mut Interpreter,
223 &Instr,
224 (FunctionId, InstrSeqId, usize),
225 (MemoryId, u64, Value, MemoryAccessType),
226 ) -> Result<()>
227 + 'static,
228 ) {
229 self.interrupt_handler_mem = Some(Box::new(handler));
230 }
231
232 pub fn call_interrupt_handler(
234 &mut self,
235 instr: &Instr,
236 id: (FunctionId, InstrSeqId, usize),
237 ) -> Result<()> {
238 let mut interrupt_handler = self.interrupt_handler.take();
239 if let Some(ref mut handler) = interrupt_handler {
240 handler(self, instr, id)?;
241 }
242
243 self.interrupt_handler = interrupt_handler;
244
245 Ok(())
246 }
247
248 pub fn call_interrupt_handler_mem(
250 &mut self,
251 instr: &Instr,
252 id: (FunctionId, InstrSeqId, usize),
253 mem: (MemoryId, u64, Value, MemoryAccessType),
254 ) -> Result<()> {
255 let mut interrupt_handler = self.interrupt_handler_mem.take();
256 if let Some(ref mut handler) = interrupt_handler {
257 handler(self, instr, id, mem)?;
258 }
259
260 self.interrupt_handler_mem = interrupt_handler;
261
262 Ok(())
263 }
264
265 #[allow(unused)]
266 fn mem_get(&mut self, id: MemoryId, address: u64) -> [u8; 4] {
267 let address = address as usize;
268
269 let mem = self.mem.get_mut(&id).unwrap();
270 *mem.entry(address).or_insert_with(|| {
271 self.init_memories
272 .get(&id)
273 .unwrap()
274 .iter()
275 .find(|(offset, data)| offset <= &address && address < offset + data.len())
276 .map(|(offset, value)| {
277 let offset = address - offset;
278 let mut arr = [0; 4];
279 arr.copy_from_slice(&value[offset..offset + 4]);
280 arr
281 })
282 .unwrap_or([0; 4])
283 })
284 }
285
286 #[allow(unused)]
287 fn mem_set(&mut self, id: MemoryId, address: u64, value: [u8; 4]) {
288 let address = address as usize;
289 let mem = self.mem.get_mut(&id).unwrap();
290 mem.insert(address, value);
291 }
292
293 #[allow(unused)]
294 fn mem_set_u8(&mut self, id: MemoryId, address: u64, value: u8) {
295 let address = address as usize;
296 let mem = self.mem.get_mut(&id).unwrap();
297 if let Some(entry) = mem.get_mut(&address) {
298 entry[0] = value;
299 } else {
300 let mut entry = [0; 4];
301 entry[0] = value;
302 mem.insert(address, entry);
303 }
304 }
305
306 #[allow(unused)]
307 fn stack_pop(&mut self) -> Result<Value> {
308 self.stack.pop().with_context(|| "stack underflow")
309 }
310
311 #[allow(unused)]
312 fn stack_push(&mut self, value: Value) {
313 self.stack.push(value);
314 }
315
316 #[allow(unused)]
317 fn stack_extend(&mut self, values: Vec<Value>) {
318 self.stack.extend(values);
319 }
320
321 #[allow(unused)]
322 fn stack_tee(&self) -> Value {
323 self.stack.last().cloned().unwrap()
324 }
325
326 pub fn call(&mut self, id: FunctionId, module: &Module, args: &[Value]) -> Result<Vec<Value>> {
328 let func = module.funcs.get(id);
329 log::debug!("starting a call of {:?} {:?}", id, func.name);
330 log::debug!("arguments {:?}", args);
331 let local = match &func.kind {
332 walrus::FunctionKind::Local(l) => l,
333 walrus::FunctionKind::Import(import) => {
334 let id = import.import;
335 let func = module.imports.get(id);
336 let name = func.name.clone();
337 let mut func = self
338 .functions
339 .remove(&name)
340 .with_context(|| format!("function {name} not found"))?;
341
342 let ret = func(self, args);
343
344 self.functions.insert(name, func);
345
346 return ret;
347 }
348 _ => bail!("function is not local"),
349 };
350
351 let entry = local.entry_block();
352 let block = local.block(entry);
353
354 assert_eq!(local.args.len(), args.len());
355 let mut locals = BTreeMap::new();
356 for (arg, val) in local.args.iter().zip(args) {
357 locals.insert(*arg, *val);
358 }
359
360 let mut frame = Frame {
361 module,
362 interp: self,
363 locals,
364 local_func: local,
365 };
366
367 for (i, (instr, _)) in block.instrs.iter().enumerate() {
368 match frame.eval(instr, (id, entry, i)) {
369 Ok(BlockRet::Success) => {}
370 Ok(BlockRet::Break { .. }) => unreachable!(),
371 Ok(BlockRet::Return) => {
372 let ty = match block.ty {
373 InstrSeqType::Simple(val_type) => {
374 val_type.map(|v| vec![v]).unwrap_or_default()
375 }
376 InstrSeqType::MultiValue(id) => {
377 let ty = module.types.get(id);
378 ty.results().iter().map(|v| *v).collect()
379 }
380 };
381 let ret = self.stack.split_off(self.stack.len() - ty.len());
382 self.stack.clear();
383
384 return Ok(ret);
385 }
386 Err(err) => {
387 if let Some(name) = &module.funcs.get(id).name {
388 bail!("{name}: {err}")
389 } else {
390 bail!("{err}")
391 }
392 }
393 }
394 }
395 Ok(self.stack.clone())
396 }
397}
398
399struct Frame<'a> {
400 module: &'a Module,
401 interp: &'a mut Interpreter,
402 local_func: &'a LocalFunction,
403 locals: BTreeMap<LocalId, Value>,
404}
405
406macro_rules! block {
407 (
408 $self:ident,
409 $id:ident,
410 $seq:expr
411 ) => {{
412 return $self.block($id, $seq);
413 }};
414}
415
416impl Frame<'_> {
417 fn eval(&mut self, instr: &Instr, place: (FunctionId, InstrSeqId, usize)) -> Result<BlockRet> {
418 use walrus::ir::*;
419
420 fn as_u32(v: i32) -> u32 {
421 #[allow(unnecessary_transmutes)]
422 unsafe {
423 core::mem::transmute(v)
424 }
425 }
426
427 fn as_i32(v: u32) -> i32 {
428 #[allow(unnecessary_transmutes)]
429 unsafe {
430 core::mem::transmute(v)
431 }
432 }
433
434 let id = place.0;
435
436 self.interp.call_interrupt_handler(instr, place)?;
437
438 match instr {
439 Instr::Const(Const { value }) => self.interp.stack_push(*value),
440 Instr::LocalGet(e) => {
441 self.interp
442 .stack_push(self.locals.get(&e.local).cloned().unwrap_or_else(|| {
443 let ty = self.module.locals.get(e.local).ty();
444 match ty {
445 ValType::I32 => Value::I32(0),
446 ValType::I64 => Value::I64(0),
447 ValType::F32 => Value::F32(0.0),
448 ValType::F64 => Value::F64(0.0),
449 _ => unreachable!(),
450 }
451 }))
452 }
453
454 Instr::LocalSet(e) => {
455 let val = self.interp.stack_pop()?;
456 self.locals.insert(e.local, val);
457 }
458 Instr::LocalTee(e) => {
459 let val = self.interp.stack_tee();
460 self.locals.insert(e.local, val);
461 }
462
463 Instr::GlobalGet(e) => self
464 .interp
465 .stack_push(self.interp.globals.get(&e.global).cloned().unwrap()),
466 Instr::GlobalSet(e) => {
467 let val = self.interp.stack_pop()?;
468 *self.interp.globals.get_mut(&e.global).unwrap() = val;
469 }
470
471 Instr::Binop(e) => {
474 let rhs = self.interp.stack_pop()?;
475 let lhs = self.interp.stack_pop()?;
476 match (rhs, lhs) {
477 (Value::I32(rhs), Value::I32(lhs)) => {
478 self.interp.stack_push(Value::I32(match e.op {
479 BinaryOp::I32Sub => lhs - rhs,
480 BinaryOp::I32Add => lhs + rhs,
481 BinaryOp::I32Mul => lhs * rhs,
482 BinaryOp::I32DivU => as_i32(as_u32(lhs) / as_u32(rhs)),
483 BinaryOp::I32DivS => lhs / rhs,
484 BinaryOp::I32RemU => as_i32(as_u32(lhs) % as_u32(rhs)),
485 BinaryOp::I32RemS => lhs % rhs,
486 BinaryOp::I32And => lhs & rhs,
487 BinaryOp::I32Or => lhs | rhs,
488 BinaryOp::I32Xor => lhs ^ rhs,
489 BinaryOp::I32Shl => lhs << rhs,
490 BinaryOp::I32ShrU => as_i32(as_u32(lhs) >> as_u32(rhs)),
491 BinaryOp::I32ShrS => lhs >> rhs,
492 BinaryOp::I32Rotl => lhs.rotate_left(rhs as u32),
493 BinaryOp::I32Rotr => lhs.rotate_right(rhs as u32),
494 BinaryOp::I32Eq => (lhs == rhs) as i32,
495 BinaryOp::I32Ne => (lhs != rhs) as i32,
496 BinaryOp::I32LtU => (as_u32(lhs) < as_u32(rhs)) as i32,
497 BinaryOp::I32LtS => (lhs < rhs) as i32,
498 BinaryOp::I32GtU => (as_u32(lhs) > as_u32(rhs)) as i32,
499 BinaryOp::I32GtS => (lhs > rhs) as i32,
500 BinaryOp::I32LeU => (as_u32(lhs) <= as_u32(rhs)) as i32,
501 BinaryOp::I32LeS => (lhs <= rhs) as i32,
502 BinaryOp::I32GeU => (as_u32(lhs) >= as_u32(rhs)) as i32,
503 BinaryOp::I32GeS => (lhs >= rhs) as i32,
504 op => bail!("invalid binary op {:?}", op),
505 }));
506 }
507 (Value::I64(rhs), Value::I64(lhs)) => match e.op {
508 BinaryOp::I64Sub => self.interp.stack_push(Value::I64(lhs - rhs)),
509 BinaryOp::I64Add => self.interp.stack_push(Value::I64(lhs + rhs)),
510 BinaryOp::I64Eq => {
511 self.interp.stack_push(Value::I32((lhs == rhs) as i32));
512 }
513 BinaryOp::I64Or => self.interp.stack_push(Value::I64(lhs | rhs)),
514 op => bail!("invalid binary op {:?}", op),
515 },
516 v => bail!("invalid types for binary op: {v:?}\ne: {e:?}"),
517 }
518 }
519
520 Instr::Unop(e) => {
521 let val = self.interp.stack_pop()?;
522 match val {
523 Value::I32(val) => {
524 self.interp.stack_push(match e.op {
525 UnaryOp::I32Clz => Value::I32(val.leading_zeros() as i32),
526 UnaryOp::I32Ctz => Value::I32(val.trailing_zeros() as i32),
527 UnaryOp::I32Popcnt => Value::I32(val.count_ones() as i32),
528 UnaryOp::I32Eqz => Value::I32((val == 0) as i32),
529 UnaryOp::I64ExtendUI32 => Value::I64((val as u32) as i64),
532 op => bail!("invalid unary op {:?}", op),
533 });
534 }
535 Value::I64(val) => {
536 self.interp.stack_push(Value::I32(match e.op {
537 UnaryOp::I64Eqz => (val == 0) as i32,
538 op => bail!("invalid unary op {:?}", op),
539 }));
540 }
541 _ => bail!("invalid types for unary op"),
542 }
543 }
544
545 Instr::Load(e) => {
549 let address = self.interp.stack_pop()?;
550 let address = if let Value::I32(address) = address {
551 address
552 } else {
553 bail!("invalid address type for load");
554 };
555 let address = address as u64 + e.arg.offset;
556 ensure!(
557 address > 0,
558 "Read a negative address value from the stack. Did we run out of memory?"
559 );
560 ensure!(address % 4 == 0);
561 let value = self.interp.mem_get(e.memory, address);
562 let value = match e.kind {
563 LoadKind::I32 { .. } => Value::I32(u32::from_le_bytes(value) as i32),
564 LoadKind::I64 { .. } => {
565 let next_value = self.interp.mem_get(e.memory, address + 4);
566 let value =
567 u64::from_le_bytes([value, next_value].concat().try_into().unwrap());
568 Value::I64(value as i64)
569 }
570 LoadKind::F32 => todo!(),
571 LoadKind::F64 => todo!(),
572 LoadKind::I32_8 { kind } => {
573 let value = i32::from_le_bytes([value[0], 0, 0, 0]);
574 let value = match kind {
575 ExtendedLoad::SignExtend => value,
576 ExtendedLoad::ZeroExtend => value as u32 as i32,
577 ExtendedLoad::ZeroExtendAtomic => value as u32 as i32,
578 };
579 Value::I32(value)
580 }
581 variant => bail!("no support for this load kind: {variant:?}"),
582 };
583
584 self.interp.call_interrupt_handler_mem(
585 instr,
586 place,
587 (e.memory, address, value, MemoryAccessType::Load),
588 )?;
589 self.interp.stack_push(value);
590 }
591 Instr::Store(e) => {
592 let value = self.interp.stack_pop()?;
593 let address = self.interp.stack_pop()?;
594 let address = if let Value::I32(address) = address {
595 address
596 } else {
597 bail!("invalid address type for load");
598 };
599 let address = address as u64 + e.arg.offset;
600 ensure!(
601 address > 0,
602 "Read a negative address value from the stack. Did we run out of memory?"
603 );
604 ensure!(address % 4 == 0);
605 match e.kind {
606 StoreKind::I32 { .. } => {
607 let value = if let Value::I32(value) = value {
608 value
609 } else {
610 bail!("invalid value type for store");
611 };
612 let v = u32::to_le_bytes(value as u32);
613 self.interp.call_interrupt_handler_mem(
614 instr,
615 place,
616 (
617 e.memory,
618 address,
619 Value::I32(value),
620 MemoryAccessType::Store,
621 ),
622 )?;
623 self.interp.mem_set(e.memory, address, v);
624 }
625 StoreKind::I32_8 { .. } => {
628 let value = if let Value::I32(value) = value {
629 value
630 } else {
631 bail!("invalid value type for store");
632 };
633 let v = u32::to_le_bytes(value as u32)[0];
634 self.interp.call_interrupt_handler_mem(
635 instr,
636 place,
637 (
638 e.memory,
639 address,
640 Value::I32(value),
641 MemoryAccessType::Store,
642 ),
643 )?;
644 self.interp.mem_set_u8(e.memory, address, v);
645 }
646 StoreKind::I64 { .. } => {
647 let v = if let Value::I64(value) = value {
648 value
649 } else {
650 bail!("invalid value type for store");
651 };
652 let v = u64::to_le_bytes(v as u64);
653 let v = v.chunks(4).map(|chunk| {
654 let mut arr = [0; 4];
655 arr.copy_from_slice(chunk);
656 arr
657 });
658 let mut i = 0;
659 for chunk in v {
660 self.interp.mem_set(e.memory, address + i, chunk);
661 i += 4;
662 }
663 self.interp.call_interrupt_handler_mem(
664 instr,
665 place,
666 (e.memory, address, value, MemoryAccessType::Store),
667 )?;
668 }
669 StoreKind::F32 => todo!(),
670 StoreKind::F64 => todo!(),
671 v => bail!("no support for this store kind: {v:?}"),
672 };
673 }
674
675 Instr::Return(_) => {
676 log::debug!("return");
677
678 return Ok(BlockRet::Return);
679 }
680
681 Instr::Drop(_) => {
682 log::debug!("drop");
683 self.interp.stack_pop()?;
684 }
685
686 Instr::Call(Call { func }) => {
687 let func = *func;
688
689 let ty = self.module.types.get(self.module.funcs.get(func).ty());
690
691 let args = self
692 .interp
693 .stack
694 .split_off(self.interp.stack.len() - ty.params().len());
695
696 let ret = self.interp.call(func, self.module, &args)?;
697 self.interp.stack.extend(ret);
698 }
699
700 Instr::CallIndirect(CallIndirect {
701 ty,
702 table: table_id,
703 }) => {
704 log::debug!("call_indirect");
705
706 let func_index = self.interp.stack_pop()?;
707 let table = self.interp.tables.get(table_id).unwrap();
708 let func_index = if let Value::I32(func_index) = func_index {
709 func_index as usize
710 } else {
711 bail!("invalid function index type for call_indirect");
712 };
713
714 ensure!(
715 func_index < table.len(),
716 "function index out of bounds for call_indirect"
717 );
718
719 let func_id = table[func_index];
720 let ty = self.module.types.get(*ty);
721 let args = self
722 .interp
723 .stack
724 .split_off(self.interp.stack.len() - ty.params().len());
725
726 let ret = self.interp.call(func_id, self.module, &args)?;
727 self.interp.stack.extend(ret);
728 }
729
730 Instr::Block(b) => {
731 log::debug!("block");
732
733 block!(self, id, b.seq);
734 }
735
736 Instr::Loop(l) => {
737 log::debug!("loop");
738
739 loop {
740 match self.block(id, l.seq)? {
741 BlockRet::Success => return Ok(BlockRet::Success),
742 BlockRet::Break { break_point } => {
743 if break_point != l.seq {
744 return Ok(BlockRet::Break { break_point });
745 }
746 }
747 BlockRet::Return => return Ok(BlockRet::Return),
748 }
749 }
750 }
751
752 Instr::BrIf(i) => {
753 log::debug!("br_if");
754 let val = self.interp.stack_pop()?;
755 let val = if let Value::I32(val) = val {
756 val
757 } else {
758 bail!("invalid value type for br_if");
759 };
760 if val != 0 {
761 return Ok(BlockRet::Break {
762 break_point: i.block,
763 });
764 }
765 }
766
767 Instr::Br(i) => {
768 log::debug!("br");
769 return Ok(BlockRet::Break {
770 break_point: i.block,
771 });
772 }
773
774 Instr::IfElse(i) => {
775 log::debug!("if_else");
776 let val = self.interp.stack_pop()?;
777 let val = if let Value::I32(val) = val {
778 val
779 } else {
780 bail!("invalid value type for if_else");
781 };
782 if val != 0 {
783 block!(self, id, i.consequent);
784 } else {
785 block!(self, id, i.alternative);
786 }
787 }
788
789 Instr::Select(i) => {
790 log::debug!("select");
791
792 if i.ty.is_some() {
793 todo!("select with type");
794 }
795
796 let val = self.interp.stack_pop()?;
797 let val = if let Value::I32(val) = val {
798 val
799 } else {
800 bail!("invalid value type for select");
801 };
802 let b = self.interp.stack_pop()?;
803 let a = self.interp.stack_pop()?;
804 if val != 0 {
805 self.interp.stack_push(a);
806 } else {
807 self.interp.stack_push(b);
808 }
809 }
810
811 Instr::MemorySize(i) => {
812 log::debug!("memory size");
813 let id = i.memory;
814 let size = self.interp.memory_size.get(&id).unwrap();
815 self.interp.stack_push(Value::I32(*size as i32));
816 }
817
818 Instr::MemoryGrow(i) => {
819 log::debug!("memory grow");
820 let id = i.memory;
821 let size = *self.interp.memory_size.get(&id).unwrap();
822 let val = self.interp.stack_pop()?;
823 let val = if let Value::I32(val) = val {
824 val
825 } else {
826 bail!("invalid value type for memory grow");
827 };
828 if val < 0 {
829 bail!("invalid value for memory grow");
830 }
831 self.interp.memory_size.insert(id, size + val as usize);
832 }
833
834 Instr::Unreachable(_) => {
835 log::debug!("unreachable");
836 bail!("unreachable");
837 }
838
839 s => bail!("unknown instruction {:?}", s),
849 }
850
851 Ok(BlockRet::Success)
852 }
853
854 fn block(&mut self, function_id: FunctionId, instr_sec_id: InstrSeqId) -> Result<BlockRet> {
855 let unwind_stack_height = self.interp.stack.len();
856 let block = self.local_func.block(instr_sec_id);
857 for (i, (instr, _)) in block.instrs.iter().enumerate() {
858 let ret = self.eval(instr, (function_id, instr_sec_id, i))?;
859 if let BlockRet::Break { break_point } = ret {
860 if break_point == break_point {
861 let block_ty = match block.ty {
862 InstrSeqType::Simple(val_type) => {
863 val_type.map(|v| vec![v]).unwrap_or_default()
864 }
865 InstrSeqType::MultiValue(id) => {
866 let ty = self.module.types.get(id);
867 ty.results().iter().map(|v| *v).collect()
868 }
869 };
870
871 let ret = self
872 .interp
873 .stack
874 .split_off(self.interp.stack.len() - block_ty.len());
875
876 self.interp.stack.truncate(unwind_stack_height);
877
878 self.interp.stack_extend(ret);
879
880 return Ok(BlockRet::Success);
881 }
882
883 return Ok(ret);
884 }
885 }
886 Ok(BlockRet::Success)
887 }
888}
889
890enum BlockRet {
891 Success,
892 Break { break_point: InstrSeqId },
893 Return,
894}