1extern crate core;
6
7use crate::host::{HostArgs, HostFunctionCallback};
8use crate::memory::ExecutionMode::NormalExecution;
9use crate::memory::{Memory, MemoryDebug};
10use crate::VmState::Normal;
11use fixed32::Fp;
12use std::error::Error;
13use std::fmt::{Display, Formatter};
14use std::mem::discriminant;
15use std::ptr;
16use std::str::FromStr;
17use swamp_vm_types::opcode::OpCode;
18use swamp_vm_types::{BinaryInstruction, InstructionPosition};
19
20mod grid;
21pub mod host;
22pub mod map_open;
23pub mod memory;
24pub mod prelude;
25mod range;
26mod sparse;
27mod string;
28mod vec;
29
30#[macro_export]
31macro_rules! u8s_to_u16 {
32 ($lsb:expr, $msb:expr) => {
33 (($msb as u16) << 8) | ($lsb as u16)
38 };
39}
40
41#[macro_export]
42macro_rules! i16_from_u8s {
43 ($lsb:expr, $msb:expr) => {
44 ((($msb as u16) << 8) | ($lsb as u16)) as i16
49 };
50}
51
52#[macro_export]
53macro_rules! u32_from_u8s {
54 ($lsb:expr, $msb:expr, $msb2:expr, $msb3:expr) => {
55 (($msb3 as u32) << 24) | (($msb2 as u32) << 16) | (($msb as u32) << 8) | ($lsb as u32)
56 };
57}
58
59#[macro_export]
60macro_rules! u16_from_u8s {
61 ($lsb:expr, $msb:expr) => {
62 (($msb as u16) << 8) | ($lsb as u16)
63 };
64}
65
66#[macro_export]
67macro_rules! get_reg {
68 ($vm:expr, $reg_idx:expr) => {
69 $vm.registers[$reg_idx as usize]
70 };
71}
72
73#[macro_export]
74macro_rules! set_reg {
75 ($vm:expr, $reg_idx:expr, $value:expr) => {
80 $vm.registers[$reg_idx as usize] = $value as u32;
82 };
83}
84
85type Handler0 = fn(&mut Vm);
86type Handler1 = fn(&mut Vm, u8);
87type Handler2 = fn(&mut Vm, u8, u8);
88type Handler3 = fn(&mut Vm, u8, u8, u8);
89type Handler4 = fn(&mut Vm, u8, u8, u8, u8);
90type Handler5 = fn(&mut Vm, u8, u8, u8, u8, u8);
91type Handler6 = fn(&mut Vm, u8, u8, u8, u8, u8, u8);
92type Handler7 = fn(&mut Vm, u8, u8, u8, u8, u8, u8, u8);
93type Handler8 = fn(&mut Vm, u8, u8, u8, u8, u8, u8, u8, u8);
94
95#[derive(Copy, Clone)]
96enum HandlerType {
97 Args0(Handler0),
98 Args1(Handler1),
99 Args2(Handler2),
100 Args3(Handler3),
101 Args4(Handler4),
102 Args5(Handler5),
103 Args6(Handler6),
104 Args7(Handler7),
105 Args8(Handler8),
106}
107
108#[derive(Debug, Default)]
109pub struct Debug {
110 pub opcodes_executed: usize,
111 pub call_depth: usize,
112 pub max_call_depth: usize,
113 pub max_stack_offset: usize,
114}
115
116pub struct CallFrame {
117 pub return_address: usize, pub previous_frame_offset: usize, pub previous_stack_offset: usize, }
121
122type RegContents = u32;
123
124#[derive(Clone, Copy, Eq, PartialEq, Debug)]
125pub enum TrapCode {
126 StoppedByTestHarness,
127 VecBoundsFail {
128 encountered: usize,
129 element_count: usize,
130 },
131 MapOutOfSpace,
132 MapEntryNotFound,
133 MapEntryNotFoundAndCouldNotBeCreated,
134 MapEntryNotFoundForRemoval,
135 LessThanTrap {
136 a: u32,
137 b: u32,
138 },
139 SparseOutOfSpace,
140 SparseRemoveFailed,
141 SparseGetFailed,
142 MapCouldNotBeCopied,
143 OverlappingMemoryCopy,
144 MemoryCorruption,
145 VecOutOfCapacity {
146 encountered: u16,
147 capacity: u16,
148 },
149 VecEmpty,
150 VecNeverInitialized,
151 GridBoundsXFail {
152 x: u32,
153 width: u16,
154 },
155 GridBoundsYFail {
156 y: u32,
157 height: u16,
158 },
159 GridBoundsFail,
160 InvalidUtf8Sequence,
161 UnalignedAccess,
162 ReverseRangeNotAllowedHere,
163}
164
165impl TrapCode {
166 pub fn is_sort_of_equal(&self, other: &Self) -> bool {
167 discriminant(self) == discriminant(other)
168 }
169}
170
171impl TryFrom<u8> for TrapCode {
172 type Error = ();
173
174 fn try_from(value: u8) -> Result<Self, Self::Error> {
175 let code = match value {
176 0 => Self::StoppedByTestHarness,
177 1 => Self::VecBoundsFail {
178 encountered: 0,
179 element_count: 0,
180 }, 2 => Self::MapOutOfSpace,
182 3 => Self::MapEntryNotFound,
183 4 => Self::MapEntryNotFoundAndCouldNotBeCreated,
184 5 => Self::MapEntryNotFoundForRemoval,
185 6 => Self::LessThanTrap { a: 0, b: 0 },
186 7 => Self::SparseOutOfSpace,
187 8 => Self::SparseRemoveFailed,
188 9 => Self::SparseGetFailed,
189 10 => Self::MapCouldNotBeCopied,
190 11 => Self::OverlappingMemoryCopy,
191 _ => return Err(()),
192 };
193 Ok(code)
194 }
195}
196
197#[derive(Debug, PartialEq, Eq)]
198pub struct ParseTrapCodeError;
199
200impl Display for ParseTrapCodeError {
201 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
202 write!(f, "Unable to parse string into a valid TrapCode")
203 }
204}
205impl Error for ParseTrapCodeError {}
206impl FromStr for TrapCode {
207 type Err = ParseTrapCodeError;
208
209 fn from_str(s: &str) -> Result<Self, Self::Err> {
210 let code = match s {
211 "stopped_by_test_harness" => Self::StoppedByTestHarness,
212 "vec_bounds_fail" => Self::VecBoundsFail {
213 encountered: 0,
214 element_count: 0,
215 }, "vec_out_of_capacity" => Self::VecOutOfCapacity {
217 encountered: 0,
218 capacity: 0,
219 }, "reverse_range_not_allowed_here" => Self::ReverseRangeNotAllowedHere,
221 "map_out_of_space" => Self::MapOutOfSpace,
222 "map_entry_not_found" => Self::MapEntryNotFound,
223 "map_entry_or_create_failed" => Self::MapEntryNotFoundAndCouldNotBeCreated,
224 "map_entry_remove_failed" => Self::MapEntryNotFoundForRemoval,
225 "less_than_trap" => Self::LessThanTrap { a: 0, b: 0 },
226 "sparse_out_of_space" => Self::SparseOutOfSpace,
227 "sparse_remove_failed" => Self::SparseRemoveFailed,
228 "sparse_get_failed" => Self::SparseGetFailed,
229 "map_could_not_be_copied" => Self::MapCouldNotBeCopied,
230 "overlapping_memory_copy" => Self::OverlappingMemoryCopy,
231 _ => return Err(ParseTrapCodeError),
232 };
233
234 Ok(code)
235 }
236}
237
238impl Display for TrapCode {
239 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
240 write!(f, "trap {self:?}")
241 }
242}
243
244#[derive(Eq, Debug, PartialEq)]
245pub enum VmState {
246 Normal,
247 Panic(String),
248 Trap(TrapCode),
249 Halt,
250 Step,
251}
252
253pub struct Vm {
254 memory: Memory,
256
257 pc: usize, instructions: Vec<BinaryInstruction>, execution_complete: bool, call_stack: Vec<CallFrame>, handlers: [HandlerType; 256],
266
267 pub registers: [u32; 256], pub debug: Debug,
271 pub debug_stats_enabled: bool,
272 pub debug_opcodes_enabled: bool,
273 pub debug_operations_enabled: bool,
274 pub state: VmState,
275}
276
277impl Vm {
278 #[must_use]
279 pub const fn is_execution_complete(&self) -> bool {
280 self.execution_complete
281 }
282}
283
284const ALIGNMENT: usize = 8;
285const ALIGNMENT_REST: usize = ALIGNMENT - 1;
286const ALIGNMENT_MASK: usize = !ALIGNMENT_REST;
287
288pub struct VmSetup {
289 pub stack_memory_size: usize,
290 pub heap_memory_size: usize,
291 pub constant_memory: Vec<u8>,
292 pub debug_stats_enabled: bool,
293 pub debug_opcodes_enabled: bool,
294 pub debug_operations_enabled: bool,
295}
296
297impl Vm {
298 #[allow(clippy::too_many_lines)]
299 pub fn new(instructions: Vec<BinaryInstruction>, setup: VmSetup) -> Self {
300 let memory = Memory::new(
301 &setup.constant_memory,
302 setup.stack_memory_size,
303 setup.heap_memory_size,
304 );
305
306 assert!(
307 setup.constant_memory.len() < setup.stack_memory_size / 2,
308 "too much constant memory"
309 );
310
311 let mut vm = Self {
312 memory, pc: 0,
314 instructions,
315 execution_complete: false,
316 call_stack: vec![],
317 handlers: [const { HandlerType::Args0(Self::execute_unimplemented) }; 256],
318 registers: [const { 0 }; 256],
319 debug: Debug {
320 opcodes_executed: 0,
321 call_depth: 0,
322 max_call_depth: 0,
323 max_stack_offset: 0,
324 },
325 debug_stats_enabled: setup.debug_stats_enabled,
326 debug_opcodes_enabled: setup.debug_opcodes_enabled,
327 debug_operations_enabled: setup.debug_operations_enabled,
328 state: Normal,
329 };
330
331 vm.handlers[OpCode::StRegToFrame as usize] =
345 HandlerType::Args6(Self::execute_st_regs_to_frame);
346 vm.handlers[OpCode::StRegToFrameUsingMask as usize] =
347 HandlerType::Args5(Self::execute_st_regs_to_frame_using_mask);
348
349 vm.handlers[OpCode::St32UsingPtrWithOffset as usize] =
350 HandlerType::Args6(Self::execute_stw_using_base_ptr_and_offset);
351 vm.handlers[OpCode::St16UsingPtrWithOffset as usize] =
352 HandlerType::Args6(Self::execute_sth_using_base_ptr_and_offset);
353 vm.handlers[OpCode::St8UsingPtrWithOffset as usize] =
354 HandlerType::Args6(Self::execute_stb_using_base_ptr_and_offset);
355
356 vm.handlers[OpCode::LdRegFromFrameRange as usize] =
358 HandlerType::Args6(Self::execute_ld_regs_from_frame);
359 vm.handlers[OpCode::LdRegFromFrameUsingMask as usize] =
360 HandlerType::Args5(Self::execute_ld_regs_from_frame_using_mask);
361
362 vm.handlers[OpCode::Ld32FromPointerWithOffset as usize] =
363 HandlerType::Args6(Self::execute_ldw_from_base_ptr_and_offset);
364 vm.handlers[OpCode::Ld16FromPointerWithOffset as usize] =
365 HandlerType::Args6(Self::execute_ldh_from_base_ptr_and_offset);
366 vm.handlers[OpCode::Ld8FromPointerWithOffset as usize] =
367 HandlerType::Args6(Self::execute_ldb_from_base_ptr_and_offset);
368
369 vm.handlers[OpCode::Mov8FromImmediateValue as usize] =
371 HandlerType::Args2(Self::execute_mov_8);
372 vm.handlers[OpCode::Mov16FromImmediateValue as usize] =
373 HandlerType::Args3(Self::execute_mov_16);
374 vm.handlers[OpCode::Mov32FromImmediateValue as usize] =
375 HandlerType::Args5(Self::execute_mov_32);
376
377 vm.handlers[OpCode::MovReg as usize] = HandlerType::Args2(Self::execute_mov_reg);
379 vm.handlers[OpCode::LdPtrFromEffectiveFrameAddress as usize] =
380 HandlerType::Args5(Self::execute_lea);
381
382 vm.handlers[OpCode::Ld32FromAbsoluteAddress as usize] =
383 HandlerType::Args5(Self::execute_ldw_from_absolute_address);
384
385 vm.handlers[OpCode::Ld8FromAbsoluteAddress as usize] =
386 HandlerType::Args5(Self::execute_ldb_from_absolute_address);
387
388 vm.handlers[OpCode::BlockCopy as usize] =
390 HandlerType::Args6(Self::execute_mov_mem_with_immediate_size);
391
392 vm.handlers[OpCode::FrameMemClr as usize] =
393 HandlerType::Args8(Self::execute_frame_memory_clear);
394
395 vm.handlers[OpCode::LtI32 as usize] = HandlerType::Args3(Self::execute_lt_i32);
397 vm.handlers[OpCode::LeI32 as usize] = HandlerType::Args3(Self::execute_le_i32);
398 vm.handlers[OpCode::GtI32 as usize] = HandlerType::Args3(Self::execute_gt_i32);
399 vm.handlers[OpCode::GeI32 as usize] = HandlerType::Args3(Self::execute_ge_i32);
400
401 vm.handlers[OpCode::GeU32 as usize] = HandlerType::Args3(Self::execute_ge_u32);
402 vm.handlers[OpCode::LtU32 as usize] = HandlerType::Args3(Self::execute_lt_u32);
403
404 vm.handlers[OpCode::CmpReg as usize] = HandlerType::Args3(Self::execute_cmp_reg);
406 vm.handlers[OpCode::CmpBlock as usize] = HandlerType::Args5(Self::execute_cmp_block);
407
408 vm.handlers[OpCode::Eq8Imm as usize] = HandlerType::Args3(Self::execute_eq_8_imm);
409 vm.handlers[OpCode::TrapOnLessThan as usize] =
410 HandlerType::Args2(Self::execute_trap_on_less_than);
411
412 vm.handlers[OpCode::MovEqualToZero as usize] =
414 HandlerType::Args2(Self::execute_move_equal_to_zero);
415
416 vm.handlers[OpCode::BFalse as usize] = HandlerType::Args3(Self::execute_branch_if_false);
418 vm.handlers[OpCode::BTrue as usize] = HandlerType::Args3(Self::execute_branch_if_true);
419
420 vm.handlers[OpCode::B as usize] = HandlerType::Args2(Self::execute_b);
422
423 vm.handlers[OpCode::AddU32 as usize] = HandlerType::Args3(Self::execute_add_u32);
425 vm.handlers[OpCode::AddU32Imm as usize] = HandlerType::Args6(Self::execute_add_u32_imm);
426 vm.handlers[OpCode::MulU32 as usize] = HandlerType::Args3(Self::execute_mul_u32);
427 vm.handlers[OpCode::SubU32 as usize] = HandlerType::Args3(Self::execute_sub_u32);
428
429 vm.handlers[OpCode::NegI32 as usize] = HandlerType::Args2(Self::execute_neg_i32);
430 vm.handlers[OpCode::ModI32 as usize] = HandlerType::Args3(Self::execute_mod_i32);
431 vm.handlers[OpCode::DivI32 as usize] = HandlerType::Args3(Self::execute_div_i32);
432
433 vm.handlers[OpCode::DivF32 as usize] = HandlerType::Args3(Self::execute_div_f32);
435 vm.handlers[OpCode::MulF32 as usize] = HandlerType::Args3(Self::execute_mul_f32);
436
437 vm.handlers[OpCode::Call as usize] = HandlerType::Args4(Self::execute_call);
439 vm.handlers[OpCode::Enter as usize] = HandlerType::Args4(Self::execute_enter);
440 vm.handlers[OpCode::Ret as usize] = HandlerType::Args0(Self::execute_ret);
441
442 vm.handlers[OpCode::Hlt as usize] = HandlerType::Args0(Self::execute_hlt);
446 vm.handlers[OpCode::UserHalt as usize] = HandlerType::Args0(Self::execute_user_halt);
447 vm.handlers[OpCode::Step as usize] = HandlerType::Args0(Self::execute_step);
448 vm.handlers[OpCode::Trap as usize] = HandlerType::Args1(Self::execute_trap);
449 vm.handlers[OpCode::Panic as usize] = HandlerType::Args1(Self::execute_panic);
450
451 vm.handlers[OpCode::CodepointToString as usize] =
453 HandlerType::Args2(Self::execute_codepoint_to_string);
454
455 vm.handlers[OpCode::BoolToString as usize] =
457 HandlerType::Args2(Self::execute_bool_to_string);
458
459 vm.handlers[OpCode::StringAppend as usize] =
461 HandlerType::Args3(Self::execute_string_append);
462
463 vm.handlers[OpCode::StringCmp as usize] = HandlerType::Args3(Self::execute_string_cmp);
464 vm.handlers[OpCode::StringToString as usize] =
465 HandlerType::Args2(Self::execute_string_to_string);
466
467 vm.handlers[OpCode::StringIterInit as usize] =
468 HandlerType::Args2(Self::execute_string_iter_init);
469 vm.handlers[OpCode::StringIterNext as usize] =
470 HandlerType::Args4(Self::execute_string_iter_next);
471 vm.handlers[OpCode::StringIterNextPair as usize] =
472 HandlerType::Args5(Self::execute_string_iter_next_pair);
473
474 vm.handlers[OpCode::ByteToString as usize] =
475 HandlerType::Args2(Self::execute_byte_to_string);
476
477 vm.handlers[OpCode::IntToRnd as usize] =
479 HandlerType::Args2(Self::execute_pseudo_random_i32);
480 vm.handlers[OpCode::IntMin as usize] = HandlerType::Args3(Self::execute_min_i32);
481 vm.handlers[OpCode::IntMax as usize] = HandlerType::Args3(Self::execute_max_i32);
482 vm.handlers[OpCode::IntClamp as usize] = HandlerType::Args4(Self::execute_clamp_i32);
483
484 vm.handlers[OpCode::IntAbs as usize] = HandlerType::Args2(Self::execute_abs_i32);
485
486 vm.handlers[OpCode::IntToString as usize] = HandlerType::Args2(Self::execute_i32_to_string);
487 vm.handlers[OpCode::IntToFloat as usize] = HandlerType::Args2(Self::execute_i32_to_f32);
488
489 vm.handlers[OpCode::FloatPseudoRandom as usize] =
491 HandlerType::Args2(Self::execute_pseudo_random_i32);
492 vm.handlers[OpCode::FloatMin as usize] = HandlerType::Args3(Self::execute_min_i32);
493 vm.handlers[OpCode::FloatMax as usize] = HandlerType::Args3(Self::execute_max_i32);
494 vm.handlers[OpCode::FloatClamp as usize] = HandlerType::Args4(Self::execute_clamp_i32);
495
496 vm.handlers[OpCode::FloatRound as usize] = HandlerType::Args2(Self::execute_f32_round);
497 vm.handlers[OpCode::FloatFloor as usize] = HandlerType::Args2(Self::execute_f32_floor);
498 vm.handlers[OpCode::FloatSqrt as usize] = HandlerType::Args2(Self::execute_f32_sqrt);
499 vm.handlers[OpCode::FloatSign as usize] = HandlerType::Args2(Self::execute_f32_sign);
500 vm.handlers[OpCode::FloatAbs as usize] = HandlerType::Args2(Self::execute_abs_i32);
501 vm.handlers[OpCode::FloatSin as usize] = HandlerType::Args2(Self::execute_f32_sin);
502 vm.handlers[OpCode::FloatCos as usize] = HandlerType::Args2(Self::execute_f32_cos);
503 vm.handlers[OpCode::FloatAsin as usize] = HandlerType::Args2(Self::execute_f32_asin);
504 vm.handlers[OpCode::FloatAcos as usize] = HandlerType::Args2(Self::execute_f32_acos);
505 vm.handlers[OpCode::FloatToString as usize] =
507 HandlerType::Args2(Self::execute_f32_to_string);
508 vm.handlers[OpCode::FloatPseudoRandom as usize] =
509 HandlerType::Args2(Self::execute_pseudo_random_i32);
510
511 vm.handlers[OpCode::RangeInit as usize] = HandlerType::Args4(Self::execute_range_init);
515 vm.handlers[OpCode::RangeIterInit as usize] =
516 HandlerType::Args2(Self::execute_range_iter_init);
517 vm.handlers[OpCode::RangeIterNext as usize] =
518 HandlerType::Args4(Self::execute_range_iter_next);
519
520 vm.handlers[OpCode::ArrayInitWithLenAndCapacity as usize] =
522 HandlerType::Args7(Self::execute_array_init);
523
524 vm.handlers[OpCode::VecInit as usize] = HandlerType::Args7(Self::execute_vec_init);
526 vm.handlers[OpCode::VecCopy as usize] = HandlerType::Args2(Self::execute_vec_copy);
527 vm.handlers[OpCode::VecCopyRange as usize] = HandlerType::Args3(Self::execute_vec_copy_range);
528 vm.handlers[OpCode::VecCmp as usize] = HandlerType::Args3(Self::execute_vec_cmp);
529 vm.handlers[OpCode::VecIterInit as usize] = HandlerType::Args2(Self::execute_vec_iter_init);
530 vm.handlers[OpCode::VecIterNext as usize] = HandlerType::Args4(Self::execute_vec_iter_next);
531 vm.handlers[OpCode::VecIterNextPair as usize] =
532 HandlerType::Args5(Self::execute_vec_iter_next_pair);
533 vm.handlers[OpCode::VecPushAddr as usize] = HandlerType::Args2(Self::execute_vec_push_addr);
534 vm.handlers[OpCode::VecGet as usize] = HandlerType::Args3(Self::execute_vec_get);
535 vm.handlers[OpCode::VecPop as usize] = HandlerType::Args2(Self::execute_vec_pop);
536 vm.handlers[OpCode::VecRemoveIndex as usize] =
537 HandlerType::Args2(Self::execute_vec_remove_index);
538
539 vm.handlers[OpCode::MapInitWithCapacityAndKeyAndTupleSizeAddr as usize] =
541 HandlerType::Args7(Self::execute_map_open_addressing_init);
542 vm.handlers[OpCode::MapIterInit as usize] = HandlerType::Args2(Self::execute_map_iter_init);
543 vm.handlers[OpCode::MapIterNext as usize] = HandlerType::Args4(Self::execute_map_iter_next);
544 vm.handlers[OpCode::MapIterNextPair as usize] =
545 HandlerType::Args5(Self::execute_map_iter_next_pair);
546 vm.handlers[OpCode::MapGetEntryLocation as usize] =
547 HandlerType::Args3(Self::execute_map_open_addressing_get_entry_location);
548 vm.handlers[OpCode::MapGetOrReserveEntryLocation as usize] =
549 HandlerType::Args3(Self::execute_map_open_addressing_get_or_reserve_entry);
550 vm.handlers[OpCode::MapHas as usize] =
551 HandlerType::Args3(Self::execute_map_open_addressing_has);
552 vm.handlers[OpCode::MapRemove as usize] =
553 HandlerType::Args2(Self::execute_map_open_addressing_remove);
554 vm.handlers[OpCode::MapOverwrite as usize] =
555 HandlerType::Args2(Self::execute_map_overwrite);
556
557 vm.handlers[OpCode::SparseInit as usize] = HandlerType::Args7(Self::execute_sparse_init);
559 vm.handlers[OpCode::SparseAddGiveEntryAddress as usize] =
560 HandlerType::Args7(Self::execute_sparse_add_get_entry_addr);
561 vm.handlers[OpCode::SparseRemove as usize] =
562 HandlerType::Args2(Self::execute_sparse_remove);
563 vm.handlers[OpCode::SparseGetEntryAddr as usize] =
564 HandlerType::Args5(Self::execute_sparse_get_entry_addr);
565 vm.handlers[OpCode::SparseIsAlive as usize] =
566 HandlerType::Args3(Self::execute_sparse_is_alive);
567
568 vm.handlers[OpCode::SparseIterInit as usize] =
569 HandlerType::Args2(Self::execute_sparse_iter_init);
570 vm.handlers[OpCode::SparseIterNext as usize] =
571 HandlerType::Args4(Self::execute_sparse_iter_next);
572 vm.handlers[OpCode::SparseIterNextPair as usize] =
573 HandlerType::Args5(Self::execute_sparse_iter_next_pair);
574
575 vm.handlers[OpCode::GridInit as usize] = HandlerType::Args6(Self::execute_grid_init);
576 vm.handlers[OpCode::GridGetEntryAddr as usize] =
577 HandlerType::Args6(Self::execute_grid_get_entry_addr);
578
579 vm
580 }
581 #[must_use]
582 pub const fn memory(&self) -> &Memory {
583 &self.memory
584 }
585
586 pub fn memory_mut(&mut self) -> &mut Memory {
587 &mut self.memory
588 }
589
590 pub fn step(&mut self, host_function_callback: &mut dyn HostFunctionCallback) -> bool {
591 let instruction = &self.instructions[self.pc];
592 let opcode = instruction.opcode;
593
594 if self.memory.execution_mode == NormalExecution {
595 assert!(self.memory.stack_offset >= self.memory.constant_memory_size);
596 assert!(self.memory.stack_offset <= self.memory.heap_start);
597 }
598
599 self.pc += 1; if opcode == OpCode::HostCall as u8 {
602 self.execute_host_call(
603 instruction.operands[0],
604 instruction.operands[1],
605 instruction.operands[2],
606 host_function_callback,
607 );
608 } else {
609 match self.handlers[opcode as usize] {
610 HandlerType::Args0(handler) => handler(self),
611 HandlerType::Args1(handler) => handler(self, instruction.operands[0]),
612 HandlerType::Args2(handler) => {
613 handler(self, instruction.operands[0], instruction.operands[1]);
614 }
615 HandlerType::Args3(handler) => handler(
616 self,
617 instruction.operands[0],
618 instruction.operands[1],
619 instruction.operands[2],
620 ),
621 HandlerType::Args4(handler) => handler(
622 self,
623 instruction.operands[0],
624 instruction.operands[1],
625 instruction.operands[2],
626 instruction.operands[3],
627 ),
628 HandlerType::Args5(handler) => handler(
629 self,
630 instruction.operands[0],
631 instruction.operands[1],
632 instruction.operands[2],
633 instruction.operands[3],
634 instruction.operands[4],
635 ),
636 HandlerType::Args6(handler) => handler(
637 self,
638 instruction.operands[0],
639 instruction.operands[1],
640 instruction.operands[2],
641 instruction.operands[3],
642 instruction.operands[4],
643 instruction.operands[5],
644 ),
645 HandlerType::Args7(handler) => handler(
646 self,
647 instruction.operands[0],
648 instruction.operands[1],
649 instruction.operands[2],
650 instruction.operands[3],
651 instruction.operands[4],
652 instruction.operands[5],
653 instruction.operands[6],
654 ),
655 HandlerType::Args8(handler) => handler(
656 self,
657 instruction.operands[0],
658 instruction.operands[1],
659 instruction.operands[2],
660 instruction.operands[3],
661 instruction.operands[4],
662 instruction.operands[5],
663 instruction.operands[6],
664 instruction.operands[7],
665 ),
666 }
667 }
668
669 !self.execution_complete
670 }
671
672 #[allow(clippy::too_many_lines)]
673 pub fn execute_internal(&mut self, host_function_callback: &mut dyn HostFunctionCallback) {
674 self.execution_complete = false;
675
676 while !self.execution_complete {
677 let instruction = &self.instructions[self.pc];
678 let opcode = instruction.opcode;
679
680 #[cfg(feature = "debug_vm")]
681 if self.debug_opcodes_enabled {
682 let regs = [0, 1, 2, 3, 4, 128, 129, 130];
683
684 for reg in regs {
685 print!(
686 "{}",
687 tinter::bright_black(&format!("{reg:02X}: {:08X}, ", self.registers[reg]))
688 );
689 }
690 println!();
691
692 let operands = instruction.operands;
693 print!("> {:04X}: ", self.pc);
694 self.debug_opcode(opcode, &operands);
695 }
696
697 #[cfg(feature = "debug_vm")]
698 if self.debug_stats_enabled {
699 self.debug.opcodes_executed += 1;
700 }
701
702 self.pc += 1; if opcode == OpCode::HostCall as u8 {
705 self.execute_host_call(
706 instruction.operands[0],
707 instruction.operands[1],
708 instruction.operands[2],
709 host_function_callback,
710 );
711 } else {
712 match self.handlers[opcode as usize] {
713 HandlerType::Args0(handler) => handler(self),
714 HandlerType::Args1(handler) => handler(self, instruction.operands[0]),
715 HandlerType::Args2(handler) => {
716 handler(self, instruction.operands[0], instruction.operands[1]);
717 }
718 HandlerType::Args3(handler) => handler(
719 self,
720 instruction.operands[0],
721 instruction.operands[1],
722 instruction.operands[2],
723 ),
724 HandlerType::Args4(handler) => handler(
725 self,
726 instruction.operands[0],
727 instruction.operands[1],
728 instruction.operands[2],
729 instruction.operands[3],
730 ),
731 HandlerType::Args5(handler) => handler(
732 self,
733 instruction.operands[0],
734 instruction.operands[1],
735 instruction.operands[2],
736 instruction.operands[3],
737 instruction.operands[4],
738 ),
739 HandlerType::Args6(handler) => handler(
740 self,
741 instruction.operands[0],
742 instruction.operands[1],
743 instruction.operands[2],
744 instruction.operands[3],
745 instruction.operands[4],
746 instruction.operands[5],
747 ),
748 HandlerType::Args7(handler) => handler(
749 self,
750 instruction.operands[0],
751 instruction.operands[1],
752 instruction.operands[2],
753 instruction.operands[3],
754 instruction.operands[4],
755 instruction.operands[5],
756 instruction.operands[6],
757 ),
758 HandlerType::Args8(handler) => handler(
759 self,
760 instruction.operands[0],
761 instruction.operands[1],
762 instruction.operands[2],
763 instruction.operands[3],
764 instruction.operands[4],
765 instruction.operands[5],
766 instruction.operands[6],
767 instruction.operands[7],
768 ),
769 }
770 }
771 }
772 }
773
774 pub const fn set_return_register_address(&mut self, r0_addr: u32) {
775 set_reg!(self, 0, r0_addr);
776 }
777
778 pub fn set_register_pointer_addr_for_parameter(&mut self, register: u8, addr: u32) {
779 assert!((1..=6).contains(®ister), "not a parameter register");
780 set_reg!(self, register, addr);
781 }
782
783 pub fn set_stack_start(&mut self, addr: usize) {
784 if self.debug_operations_enabled {
785 eprintln!("vm: set stack start and frame to: 0x{addr:08X}");
786 }
787 self.memory.set_stack_and_frame(addr);
788 }
789
790 pub fn resume(&mut self, host_function_callback: &mut dyn HostFunctionCallback) {
791 self.execute_internal(host_function_callback);
792 }
793
794 pub fn execute_from_ip(
795 &mut self,
796 ip: &InstructionPosition,
797 host_function_callback: &mut dyn HostFunctionCallback,
798 ) {
799 self.pc = ip.0 as usize;
800 if self.debug_operations_enabled {
801 eprintln!(
802 "starting up the vm, normal_stack_start: {:08X} SP:{:08X} FP:{:08X}",
803 self.memory.stack_start, self.memory.stack_offset, self.memory.frame_offset
804 );
805 }
806
807 self.call_stack.clear();
808 self.memory.reset_offset();
809
810 #[cfg(feature = "debug_vm")]
811 if self.debug_opcodes_enabled {
812 eprintln!(
813 "start executing --------- frame {:X} heap: {:X}",
814 self.memory.frame_offset, self.memory.heap_alloc_offset
815 );
816 }
817
818 self.execute_internal(host_function_callback);
819 }
820
821 pub const fn set_pc(&mut self, pc: &InstructionPosition) {
822 self.pc = pc.0 as usize;
823 }
824
825 pub const fn pc(&self) -> usize {
826 self.pc
827 }
828
829 pub fn fp(&self) -> usize {
830 self.memory.frame_offset
831 }
832
833 pub fn sp(&self) -> usize {
834 self.memory.stack_offset
835 }
836
837 pub fn call_stack(&self) -> &[CallFrame] {
838 &self.call_stack
839 }
840
841 fn execute_unimplemented(&mut self) {
842 let unknown_opcode = OpCode::from(self.instructions[self.pc - 1].opcode);
843 eprintln!("error: opcode not implemented: {unknown_opcode} {unknown_opcode:?}");
844 eprintln!("VM runtime halted.");
845 self.debug_output();
846 panic!("unknown OPCODE! {unknown_opcode} {unknown_opcode:?}");
847 }
848
849 pub fn frame_memory(&self) -> &[u8] {
850 unsafe { std::slice::from_raw_parts(self.memory.frame_ptr(), self.memory.memory_size) }
851 }
852
853 pub fn heap_memory(&self) -> &[u8] {
854 unsafe { std::slice::from_raw_parts(self.memory.get_heap_ptr(0), self.memory.memory_size) }
855 }
856
857 pub fn constant_memory(&self) -> &[u8] {
858 unsafe {
859 std::slice::from_raw_parts(
860 self.memory.get_heap_ptr(0),
861 self.memory.constant_memory_size,
862 )
863 }
864 }
865
866 pub fn all_memory_up_to(&self, offset: usize) -> &[u8] {
867 unsafe { std::slice::from_raw_parts(self.memory.get_heap_ptr(0), offset) }
868 }
869
870 pub fn constant_size(&self) -> usize {
871 self.memory.constant_memory_size
872 }
873 #[must_use]
874 #[allow(clippy::missing_const_for_fn)]
875 pub fn instructions(&self) -> &[BinaryInstruction] {
876 &self.instructions
877 }
878 pub fn reset(&mut self) {
879 self.memory.reset();
880 self.memory.reset_allocator();
881
882 self.pc = 0;
883 self.execution_complete = false;
884 self.call_stack.clear();
885 }
886
887 pub fn reset_stack_and_heap_to_constant_limit(&mut self) {
888 self.memory.reset_allocator();
889 self.memory.reset_stack_and_fp();
890 self.reset_call_stack();
891 self.execution_complete = false;
892 }
894
895 pub fn reset_call_stack(&mut self) {
896 self.call_stack.clear();
898 }
899
900 pub fn reset_debug(&mut self) {
901 self.debug = Debug::default();
902 self.memory.debug = MemoryDebug {
903 max_heap_alloc_offset: 0,
904 }
905 }
906
907 #[must_use]
908 pub fn frame_offset(&self) -> usize {
909 self.memory.frame_offset
910 }
911
912 pub fn load_bytecode(&mut self, instructions: Vec<BinaryInstruction>) {
913 self.instructions = instructions;
914 self.pc = 0;
915 self.execution_complete = false;
916 }
917
918 #[inline]
919 fn execute_mov_32(&mut self, dst_reg: u8, a: u8, b: u8, c: u8, d: u8) {
920 set_reg!(self, dst_reg, Self::u8s_to_32(a, b, c, d));
921 }
922
923 #[inline]
924 fn execute_mov_16(&mut self, dst_reg: u8, a: u8, b: u8) {
925 set_reg!(self, dst_reg, u16_from_u8s!(a, b));
926 }
927
928 #[inline]
929 fn execute_mov_8(&mut self, dst_reg: u8, octet: u8) {
930 set_reg!(self, dst_reg, octet);
931 }
932
933 #[inline]
935 fn execute_mul_f32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
936 let lhs = Fp::from_raw(get_reg!(self, lhs_reg) as i32);
937 let rhs = Fp::from_raw(get_reg!(self, rhs_reg) as i32);
938 set_reg!(self, dst_reg, (lhs * rhs).inner());
939 }
940
941 #[inline]
942 fn execute_div_f32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
943 let lhs = Fp::from_raw(get_reg!(self, lhs_reg) as i32);
944 let rhs = Fp::from_raw(get_reg!(self, rhs_reg) as i32);
945
946 set_reg!(self, dst_reg, (lhs / rhs).inner());
947 }
948
949 #[inline]
950 fn execute_f32_round(&mut self, dst_reg: u8, val_reg: u8) {
951 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
952
953 let int_val: i16 = val.round().into();
954 set_reg!(self, dst_reg, int_val);
955 }
956
957 #[inline]
958 fn execute_f32_floor(&mut self, dst_reg: u8, val_reg: u8) {
959 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
960
961 let floored: i16 = val.floor().into();
962 set_reg!(self, dst_reg, floored);
963 }
964
965 #[inline]
966 fn execute_f32_sqrt(&mut self, dst_reg: u8, val_reg: u8) {
967 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
968
969 set_reg!(self, dst_reg, val.sqrt().inner());
970 }
971
972 #[inline]
973 fn execute_f32_sin(&mut self, dst_reg: u8, val_reg: u8) {
974 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
975
976 set_reg!(self, dst_reg, val.sin().inner());
977 }
978
979 #[inline]
980 fn execute_f32_asin(&mut self, dst_reg: u8, val_reg: u8) {
981 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
982
983 set_reg!(self, dst_reg, val.asin().inner());
984 }
985
986 #[inline]
987 fn execute_f32_cos(&mut self, dst_reg: u8, val_reg: u8) {
988 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
989
990 set_reg!(self, dst_reg, val.cos().inner());
991 }
992
993 #[inline]
994 fn execute_f32_acos(&mut self, dst_reg: u8, val_reg: u8) {
995 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
996
997 set_reg!(self, dst_reg, val.acos().inner());
998 }
999
1000 #[inline]
1010 fn execute_f32_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1011 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1012
1013 self.create_string(dst_reg, &val.to_string())
1014 }
1015
1016 #[inline]
1017 fn execute_f32_sign(&mut self, dst_reg: u8, val_reg: u8) {
1018 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1019 set_reg!(
1021 self,
1022 dst_reg,
1023 Fp::from(if val < 0 {
1024 -1
1025 } else if val > 0 {
1026 1
1027 } else {
1028 0
1029 })
1030 .inner()
1031 );
1032 }
1033
1034 #[inline]
1035 fn execute_neg_i32(&mut self, dst_reg: u8, val_reg: u8) {
1036 let val = get_reg!(self, val_reg) as i32;
1037 set_reg!(self, dst_reg, -val);
1038 }
1039
1040 #[inline]
1041 const fn execute_add_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1042 let lhs = get_reg!(self, lhs_reg);
1043 let rhs = get_reg!(self, rhs_reg);
1044
1045 set_reg!(self, dst_reg, lhs.wrapping_add(rhs));
1046 }
1047
1048 #[inline]
1049 fn execute_add_u32_imm(
1050 &mut self,
1051 dst_reg: u8,
1052 lhs_reg: u8,
1053 rhs_1: u8,
1054 rhs_2: u8,
1055 rhs_3: u8,
1056 rhs_4: u8,
1057 ) {
1058 let lhs = get_reg!(self, lhs_reg);
1059 let rhs = u32_from_u8s!(rhs_1, rhs_2, rhs_3, rhs_4);
1060
1061 set_reg!(self, dst_reg, lhs.wrapping_add(rhs));
1062 }
1063
1064 #[inline]
1065 const fn execute_mul_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1066 let lhs = get_reg!(self, lhs_reg);
1067 let rhs = get_reg!(self, rhs_reg);
1068
1069 set_reg!(self, dst_reg, lhs.wrapping_mul(rhs));
1070 }
1071
1072 #[inline]
1073 const fn execute_sub_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1074 let lhs = get_reg!(self, lhs_reg);
1075 let rhs = get_reg!(self, rhs_reg);
1076
1077 set_reg!(self, dst_reg, lhs.wrapping_sub(rhs));
1078 }
1079
1080 #[inline]
1084 fn execute_mod_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1085 let lhs = get_reg!(self, lhs_reg) as i32;
1086 let rhs = get_reg!(self, rhs_reg) as i32;
1087
1088 let result = ((lhs % rhs) + rhs) % rhs;
1089 set_reg!(self, dst_reg, result);
1090 }
1091
1092 #[inline]
1093 fn execute_div_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1094 let lhs = get_reg!(self, lhs_reg) as i32;
1095 let rhs = get_reg!(self, rhs_reg) as i32;
1096
1097 let result_option = lhs.checked_div(rhs);
1098
1099 match result_option {
1100 Some(result) => {
1101 set_reg!(self, dst_reg, result);
1102 }
1103 None => {
1104 panic!(
1105 "VM Runtime Error: Signed 32-bit integer overflow during DIV_I32 (R{dst_reg} = R{lhs_reg} - R{rhs_reg})"
1106 );
1107 }
1108 }
1109 }
1110
1111 #[inline]
1112 fn execute_lt_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1113 let lhs = get_reg!(self, lhs_reg) as i32;
1114 let rhs = get_reg!(self, rhs_reg) as i32;
1115 set_reg!(self, dest_bool_reg, lhs < rhs);
1116 }
1117
1118 #[inline]
1119 fn execute_le_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1120 let lhs = get_reg!(self, lhs_reg) as i32;
1121 let rhs = get_reg!(self, rhs_reg) as i32;
1122 set_reg!(self, dest_bool_reg, lhs <= rhs);
1123 }
1124
1125 #[inline]
1126 fn execute_gt_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1127 let lhs = get_reg!(self, lhs_reg) as i32;
1128 let rhs = get_reg!(self, rhs_reg) as i32;
1129 set_reg!(self, dest_bool_reg, lhs > rhs);
1130 }
1131
1132 #[inline]
1133 fn execute_ge_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1134 let lhs = get_reg!(self, lhs_reg) as i32;
1135 let rhs = get_reg!(self, rhs_reg) as i32;
1136
1137 set_reg!(self, dest_bool_reg, lhs >= rhs);
1138 }
1139
1140 #[inline]
1141 fn execute_ge_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1142 let lhs = get_reg!(self, lhs_reg);
1143 let rhs = get_reg!(self, rhs_reg);
1144
1145 set_reg!(self, dest_bool_reg, lhs >= rhs);
1146 }
1147
1148 #[inline]
1149 fn execute_lt_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1150 let lhs = get_reg!(self, lhs_reg);
1151 let rhs = get_reg!(self, rhs_reg);
1152
1153 set_reg!(self, dest_bool_reg, lhs < rhs);
1154 }
1155
1156 #[inline]
1157 fn execute_pseudo_random_i32(&mut self, dst_reg: u8, src_reg: u8) {
1158 let src = get_reg!(self, src_reg);
1159 set_reg!(self, dst_reg, squirrel_prng::squirrel_noise5(src, 0) as i32);
1160 }
1161
1162 #[inline]
1163 fn execute_i32_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1164 let val = get_reg!(self, val_reg) as i32;
1165
1166 self.create_string(dst_reg, &val.to_string());
1167 }
1168
1169 #[inline]
1170 fn execute_bool_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1171 let val = get_reg!(self, val_reg) != 0;
1172
1173 self.create_string(dst_reg, &val.to_string());
1174 }
1175
1176 const HEX_DIGITS: &'static [u8; 16] = b"0123456789ABCDEF";
1177
1178 #[inline]
1179 const fn byte_to_prefixed_hex(val: u8, dst: &mut [u8; 4]) {
1180 dst[0] = b'0';
1181 dst[1] = b'x';
1182 dst[2] = Self::HEX_DIGITS[(val >> 4) as usize];
1183 dst[3] = Self::HEX_DIGITS[(val & 0x0F) as usize];
1184 }
1185
1186 #[inline]
1187 fn execute_byte_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1188 let val = get_reg!(self, val_reg);
1189 debug_assert!(val <= 0xff, "byte out of range");
1190
1191 let mut buf = [0u8; 4];
1192 Self::byte_to_prefixed_hex(val as u8, &mut buf);
1193
1194 let s = unsafe { std::str::from_utf8_unchecked(&buf) };
1196
1197 self.create_string(dst_reg, s);
1198 }
1199
1200 #[inline]
1201 fn execute_codepoint_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1202 let char_raw = get_reg!(self, val_reg);
1203 let char = std::char::from_u32(char_raw).unwrap();
1204 self.create_string(dst_reg, &char.to_string());
1205 }
1206
1207 #[inline]
1208 fn execute_i32_to_f32(&mut self, float_dest_reg: u8, int_source_reg: u8) {
1209 let int_source = get_reg!(self, int_source_reg) as i32;
1210 set_reg!(self, float_dest_reg, Fp::from(int_source as i16).inner());
1211 }
1212
1213 #[inline]
1214 fn execute_abs_i32(&mut self, dst_reg: u8, val_reg: u8) {
1215 let val = get_reg!(self, val_reg) as i32;
1216 set_reg!(self, dst_reg, if val < 0 { -val } else { val });
1217 }
1218
1219 #[inline]
1220 fn execute_min_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1221 let lhs = get_reg!(self, lhs_reg) as i32;
1222 let rhs = get_reg!(self, rhs_reg) as i32;
1223
1224 set_reg!(self, dst_reg, if lhs < rhs { lhs } else { rhs });
1225 }
1226
1227 #[inline]
1228 fn execute_max_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1229 let lhs = get_reg!(self, lhs_reg) as i32;
1230 let rhs = get_reg!(self, rhs_reg) as i32;
1231
1232 set_reg!(self, dst_reg, if lhs > rhs { lhs } else { rhs });
1233 }
1234
1235 #[inline]
1236 fn execute_clamp_i32(&mut self, dst_reg: u8, val_reg: u8, min_reg: u8, max_reg: u8) {
1237 let val = get_reg!(self, val_reg) as i32;
1238 let min_val = get_reg!(self, min_reg) as i32;
1239 let max_val = get_reg!(self, max_reg) as i32;
1240
1241 set_reg!(
1242 self,
1243 dst_reg,
1244 if val < min_val {
1245 min_val
1246 } else if val > max_val {
1247 max_val
1248 } else {
1249 val
1250 }
1251 );
1252 }
1253
1254 #[inline]
1256 fn execute_cmp_reg(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1257 set_reg!(
1258 self,
1259 dest_bool_reg,
1260 self.registers[lhs_reg as usize] == self.registers[rhs_reg as usize]
1261 );
1262 }
1263
1264 #[inline]
1266 fn execute_eq_8_imm(&mut self, dest_bool_reg: u8, val_reg: u8, octet: u8) {
1267 let compare = get_reg!(self, val_reg);
1268 set_reg!(self, dest_bool_reg, compare == octet as u32);
1269 }
1270
1271 #[inline]
1272 fn execute_trap_on_less_than(&mut self, a_reg: u8, b_reg: u8) {
1273 let a = get_reg!(self, a_reg);
1274 let b = get_reg!(self, b_reg);
1275 if a < b {
1276 self.internal_trap(TrapCode::LessThanTrap { a, b })
1277 }
1278 }
1279
1280 #[inline]
1281 fn execute_move_equal_to_zero(&mut self, dst_reg: u8, src_reg: u8) {
1282 set_reg!(self, dst_reg, get_reg!(self, src_reg) == 0);
1283 }
1284
1285 #[inline]
1286 const fn execute_branch_if_false(
1287 &mut self,
1288 test_reg: u8,
1289 branch_offset_0: u8,
1290 branch_offset_1: u8,
1291 ) {
1292 if get_reg!(self, test_reg) == 0 {
1293 self.pc =
1294 (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1295 }
1296 }
1297
1298 #[inline]
1299 const fn execute_branch_if_true(
1300 &mut self,
1301 test_reg: u8,
1302 branch_offset_0: u8,
1303 branch_offset_1: u8,
1304 ) {
1305 if get_reg!(self, test_reg) != 0 {
1306 self.pc =
1307 (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1308 }
1309 }
1310
1311 #[inline]
1312 fn execute_b(&mut self, branch_offset_0: u8, branch_offset_1: u8) {
1313 self.pc =
1314 (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1315 }
1316
1317 #[inline]
1318 fn execute_hlt(&mut self) {
1319 self.execution_complete = true;
1320 #[cfg(feature = "debug_vm")]
1321 if self.debug_opcodes_enabled {
1322 self.debug_output();
1323 }
1324 }
1325
1326 #[inline]
1327 fn execute_user_halt(&mut self) {
1328 self.execution_complete = true;
1329 self.state = VmState::Halt;
1330 #[cfg(feature = "debug_vm")]
1331 if self.debug_opcodes_enabled {
1332 self.debug_output();
1333 }
1334 }
1335
1336 #[inline]
1337 fn execute_step(&mut self) {
1338 self.execution_complete = true;
1339 self.state = VmState::Step;
1340 #[cfg(feature = "debug_vm")]
1341 if self.debug_opcodes_enabled {
1342 self.debug_output();
1343 }
1344 }
1345
1346 #[inline]
1347 fn execute_trap(&mut self, trap_code: u8) {
1348 self.internal_trap(TrapCode::try_from(trap_code).unwrap());
1349 }
1350
1351 pub fn internal_trap(&mut self, trap_code: TrapCode) {
1352 self.execution_complete = true;
1353
1354 #[cfg(feature = "debug_vm")]
1355 if self.debug_operations_enabled {
1356 eprintln!("vm trap: '{trap_code}'");
1357 }
1358 self.state = VmState::Trap(trap_code);
1359
1360 #[cfg(feature = "debug_vm")]
1361 if self.debug_opcodes_enabled {
1362 self.debug_output();
1363 }
1364 }
1365
1366 #[inline]
1367 fn execute_panic(&mut self, panic_reason_reg: u8) {
1368 self.execution_complete = true;
1369
1370 #[cfg(feature = "debug_vm")]
1371 if self.debug_opcodes_enabled {
1372 self.debug_output();
1373 }
1374
1375 let heap_addr = get_reg!(self, panic_reason_reg);
1376 let str = self.read_string(heap_addr, &self.memory);
1377
1378 #[cfg(feature = "debug_vm")]
1379 if self.debug_stats_enabled {
1380 eprintln!("panic: {str}");
1381 }
1382
1383 self.state = VmState::Panic(str.to_string());
1384 }
1385
1386 fn debug_output(&self) {
1387 eprintln!(
1388 "total opcodes executed: {}, call_stack_depth: {}, max_call_depth:{}",
1389 self.debug.opcodes_executed, self.debug.call_depth, self.debug.max_call_depth
1390 );
1391 }
1392
1393 #[inline]
1394 fn execute_mov_reg(&mut self, dst_reg: u8, src_reg: u8) {
1395 self.registers[dst_reg as usize] = self.registers[src_reg as usize];
1396 }
1397
1398 #[inline]
1399 fn execute_st_regs_to_frame(
1400 &mut self,
1401 frame_offset_0: u8,
1402 frame_offset_1: u8,
1403 frame_offset_2: u8,
1404 frame_offset_3: u8,
1405 start_reg: u8,
1406 count: u8,
1407 ) {
1408 let frame_offset = u32_from_u8s!(
1409 frame_offset_0,
1410 frame_offset_1,
1411 frame_offset_2,
1412 frame_offset_3
1413 );
1414 let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1415 let target_ptr = self.memory.get_frame_ptr_as_u32(frame_offset);
1416 unsafe {
1417 ptr::copy_nonoverlapping(const_reg_ptr, target_ptr, count as usize);
1418 }
1419 }
1420
1421 #[inline]
1422 fn execute_st_regs_to_frame_using_mask(
1423 &mut self,
1424 frame_offset_0: u8,
1425 frame_offset_1: u8,
1426 frame_offset_2: u8,
1427 frame_offset_3: u8,
1428 reg_mask: u8,
1429 ) {
1430 let frame_offset = u32_from_u8s!(
1431 frame_offset_0,
1432 frame_offset_1,
1433 frame_offset_2,
1434 frame_offset_3
1435 );
1436
1437 let mut target_ptr = self.memory.get_frame_ptr_as_u32(frame_offset);
1438 let mut const_reg_ptr = &self.registers[0usize] as *const u32;
1439 let mut mask = reg_mask;
1440 for _ in 0..8 {
1441 if (mask & 0x1) != 0 {
1442 unsafe {
1443 ptr::write(target_ptr, *const_reg_ptr);
1444 target_ptr = target_ptr.add(1);
1445 }
1446 }
1447 mask >>= 1;
1448 unsafe {
1449 const_reg_ptr = const_reg_ptr.add(1);
1450 }
1451 }
1452 }
1453
1454 #[inline]
1455 fn execute_stw_using_base_ptr_and_offset(
1456 &mut self,
1457 base_ptr_reg: u8,
1458 offset_0: u8,
1459 offset_1: u8,
1460 offset_2: u8,
1461 offset_3: u8,
1462 src_reg: u8,
1463 ) {
1464 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1465 let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset) as *mut u32;
1467 let value_to_copy = get_reg!(self, src_reg);
1468
1469 unsafe {
1470 ptr::write(ptr_to_write_to, value_to_copy);
1471 }
1472 }
1473
1474 #[inline]
1475 fn execute_sth_using_base_ptr_and_offset(
1476 &mut self,
1477 base_ptr_reg: u8,
1478 offset_0: u8,
1479 offset_1: u8,
1480 offset_2: u8,
1481 offset_3: u8,
1482 src_reg: u8,
1483 ) {
1484 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1485 let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset) as *mut u16;
1487 let value_to_copy = get_reg!(self, src_reg) as u16;
1488
1489 unsafe {
1490 ptr::write(ptr_to_write_to, value_to_copy);
1491 }
1492 }
1493 #[inline]
1494 fn execute_stb_using_base_ptr_and_offset(
1495 &mut self,
1496 base_ptr_reg: u8,
1497 offset_0: u8,
1498 offset_1: u8,
1499 offset_2: u8,
1500 offset_3: u8,
1501 src_reg: u8,
1502 ) {
1503 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1504 let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset);
1506 let value_to_copy = get_reg!(self, src_reg) as u8;
1507
1508 unsafe {
1509 ptr::write(ptr_to_write_to, value_to_copy);
1510 }
1511 }
1512
1513 #[inline]
1514 pub fn execute_ldb_from_base_ptr_and_offset(
1515 &mut self,
1516 dst_reg: u8,
1517 base_ptr_reg: u8,
1518 offset_0: u8,
1519 offset_1: u8,
1520 offset_2: u8,
1521 offset_3: u8,
1522 ) {
1523 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1524 let ptr_to_read_from = self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset);
1525 unsafe {
1526 set_reg!(self, dst_reg, *ptr_to_read_from);
1527 }
1528 }
1529
1530 #[inline]
1531 pub fn execute_ldw_from_base_ptr_and_offset(
1532 &mut self,
1533 dst_reg: u8,
1534 base_ptr_reg: u8,
1535 offset_0: u8,
1536 offset_1: u8,
1537 offset_2: u8,
1538 offset_3: u8,
1539 ) {
1540 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1541 let ptr_to_read_from =
1542 self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset) as *const u32;
1543 unsafe {
1544 set_reg!(self, dst_reg, *ptr_to_read_from);
1545 }
1546 }
1547
1548 #[inline]
1549 fn execute_ldw_from_absolute_address(
1550 &mut self,
1551 dst_reg: u8,
1552 addr_0: u8,
1553 addr_1: u8,
1554 addr_2: u8,
1555 addr_3: u8,
1556 ) {
1557 let absolute_addr = u32_from_u8s!(addr_0, addr_1, addr_2, addr_3);
1558
1559 let ptr_to_read_from = self.memory.get_heap_const_ptr(absolute_addr as usize) as *const u32;
1560
1561 unsafe {
1562 set_reg!(self, dst_reg, *ptr_to_read_from);
1563 }
1564 }
1565
1566 #[inline]
1567 fn execute_ldb_from_absolute_address(
1568 &mut self,
1569 dst_reg: u8,
1570 addr_0: u8,
1571 addr_1: u8,
1572 addr_2: u8,
1573 addr_3: u8,
1574 ) {
1575 let absolute_addr = u32_from_u8s!(addr_0, addr_1, addr_2, addr_3);
1576
1577 let ptr_to_read_from = self.memory.get_heap_const_ptr(absolute_addr as usize);
1578
1579 unsafe {
1580 set_reg!(self, dst_reg, *ptr_to_read_from);
1581 }
1582 }
1583
1584 #[inline]
1585 pub fn execute_ldh_from_base_ptr_and_offset(
1586 &mut self,
1587 dst_reg: u8,
1588 base_ptr_reg: u8,
1589 offset_0: u8,
1590 offset_1: u8,
1591 offset_2: u8,
1592 offset_3: u8,
1593 ) {
1594 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1595 let ptr_to_read_from =
1596 self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset) as *const u16;
1597 unsafe {
1598 set_reg!(self, dst_reg, *ptr_to_read_from);
1599 }
1600 }
1601
1602 #[inline]
1603 pub fn execute_ld_regs_from_frame(
1604 &mut self,
1605 start_reg: u8,
1606 offset_0: u8,
1607 offset_1: u8,
1608 offset_2: u8,
1609 offset_3: u8,
1610 count: u8,
1611 ) {
1612 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1613 let target_reg_ptr = &mut self.registers[start_reg as usize] as *mut u32;
1614 let source_frame_start = self.memory.get_frame_const_ptr_as_u32(offset);
1615 unsafe {
1616 ptr::copy_nonoverlapping(source_frame_start, target_reg_ptr, count as usize);
1617 }
1618 }
1619
1620 #[inline]
1621 pub fn execute_ld_regs_from_frame_using_mask(
1622 &mut self,
1623 reg_mask: u8,
1624 offset_0: u8,
1625 offset_1: u8,
1626 offset_2: u8,
1627 offset_3: u8,
1628 ) {
1629 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1630 let mut target_reg_ptr = &mut self.registers[0usize] as *mut u32;
1631 let mut source_frame_start = self.memory.get_frame_const_ptr_as_u32(offset);
1632 let mut mask = reg_mask;
1633 for _ in 0..8 {
1634 if mask & 0x01 != 0 {
1635 unsafe {
1636 ptr::write(target_reg_ptr, *source_frame_start);
1637 source_frame_start = source_frame_start.add(1);
1638 }
1639 }
1640 mask >>= 1;
1641 unsafe {
1642 target_reg_ptr = target_reg_ptr.add(1);
1643 }
1644 }
1645 }
1646
1647 #[inline]
1648 fn execute_lea(&mut self, dst_reg: u8, offset_0: u8, offset_1: u8, offset_2: u8, offset_3: u8) {
1649 let current_fp_addr = self.memory.frame_offset as u32;
1650 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1651 set_reg!(self, dst_reg, current_fp_addr + offset);
1652 }
1653
1654 #[inline]
1655 pub fn execute_frame_memory_clear(
1656 &mut self,
1657 dst_pointer_0: u8,
1658 dst_pointer_1: u8,
1659 dst_pointer_2: u8,
1660 dst_pointer_3: u8,
1661 memory_size_0: u8,
1662 memory_size_1: u8,
1663 memory_size_2: u8,
1664 memory_size_3: u8,
1665 ) {
1666 let frame_offset =
1667 u32_from_u8s!(dst_pointer_0, dst_pointer_1, dst_pointer_2, dst_pointer_3);
1668 let total_bytes = u32_from_u8s!(memory_size_0, memory_size_1, memory_size_2, memory_size_3);
1669
1670 assert!(
1671 frame_offset + total_bytes < self.memory.memory_size as u32,
1672 "trying to overwrite memory!"
1673 );
1674 let dst_ptr = self.memory.get_frame_ptr(frame_offset);
1675
1676 unsafe {
1677 ptr::write_bytes(dst_ptr, 0, total_bytes as usize);
1678 }
1679 }
1680
1681 #[inline]
1682 fn execute_mov_mem_with_immediate_size(
1683 &mut self,
1684 dst_pointer_reg: u8,
1685 src_pointer_reg: u8,
1686 memory_size_0: u8,
1687 memory_size_1: u8,
1688 memory_size_2: u8,
1689 memory_size_3: u8,
1690 ) {
1691 let dest_addr = get_reg!(self, dst_pointer_reg);
1692 let src_addr = get_reg!(self, src_pointer_reg);
1693 let memory_size = u32_from_u8s!(memory_size_0, memory_size_1, memory_size_2, memory_size_3);
1694 assert!(
1695 src_addr + memory_size < self.memory.memory_size as u32,
1696 "trying to overwrite memory"
1697 );
1698
1699 let dest_end = dest_addr + memory_size;
1701 let src_end = src_addr + memory_size;
1702
1703 if dest_addr < src_end && src_addr < dest_end {
1704 return self.internal_trap(TrapCode::OverlappingMemoryCopy);
1705 }
1706
1707 #[cfg(feature = "debug_vm")]
1708 if self.debug_operations_enabled {
1709 eprintln!(
1710 "{:04X}> BLKCPY Size={:08X} \n \
1711 DST_ADDR=0x{:08X}\n \
1712 SRC_ADDR=0x{:08X}",
1713 self.pc - 1,
1714 memory_size,
1715 dest_addr,
1716 src_addr,
1717 );
1718 }
1719
1720 let dst_ptr = self.memory.get_heap_ptr(dest_addr as usize);
1721 let src_ptr = self.memory.get_heap_const_ptr(src_addr as usize);
1722
1723 unsafe {
1724 ptr::copy_nonoverlapping(src_ptr, dst_ptr, memory_size as usize);
1725 }
1726 }
1727
1728 #[inline]
1729 fn execute_cmp_block(
1730 &mut self,
1731 dest_bool_reg: u8,
1732 src_addr_reg_a: u8,
1733 src_addr_reg_b: u8,
1734 size_lower: u8,
1735 size_upper: u8,
1736 ) {
1737 let size = u16_from_u8s!(size_lower, size_upper) as usize;
1738
1739 let arc_addr_a = get_reg!(self, src_addr_reg_a);
1740 let src_addr_b = get_reg!(self, src_addr_reg_b);
1741
1742 let src_ptr_a = self.memory.get_heap_const_ptr(arc_addr_a as usize);
1743 let src_ptr_b = self.memory.get_heap_const_ptr(src_addr_b as usize);
1744
1745 unsafe {
1746 let slice_a = std::slice::from_raw_parts(src_ptr_a, size);
1747 let slice_b = std::slice::from_raw_parts(src_ptr_b, size);
1748
1749 set_reg!(self, dest_bool_reg, slice_a == slice_b);
1750 }
1751 }
1752
1753 #[cfg(feature = "debug_vm")]
1754 pub fn debug_opcode(&self, opcode: u8, operands: &[u8; 8]) {
1755 eprintln!(
1756 "{:8} {}",
1757 OpCode::from(opcode),
1758 match self.handlers[opcode as usize] {
1759 HandlerType::Args0(_) => String::new(),
1760 HandlerType::Args1(_) => format!("{:04X}", operands[0]),
1761 HandlerType::Args2(_) => format!("{:04X}, {:04X}", operands[0], operands[1]),
1762 HandlerType::Args3(_) => format!(
1763 "{:04X}, {:04X}, {:04X}",
1764 operands[0], operands[1], operands[2]
1765 ),
1766 HandlerType::Args4(_) => format!(
1767 "{:04X}, {:04X}, {:04X}, {:04X}",
1768 operands[0], operands[1], operands[2], operands[3]
1769 ),
1770 HandlerType::Args5(_) => format!(
1771 "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1772 operands[0], operands[1], operands[2], operands[3], operands[4],
1773 ),
1774 HandlerType::Args6(_) => format!(
1775 "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1776 operands[0], operands[1], operands[2], operands[3], operands[4], operands[5],
1777 ),
1778 HandlerType::Args7(_) => format!(
1779 "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1780 operands[0],
1781 operands[1],
1782 operands[2],
1783 operands[3],
1784 operands[4],
1785 operands[5],
1786 operands[6],
1787 ),
1788 HandlerType::Args8(_) => format!(
1789 "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1790 operands[0],
1791 operands[1],
1792 operands[2],
1793 operands[3],
1794 operands[4],
1795 operands[5],
1796 operands[6],
1797 operands[7],
1798 ),
1799 }
1800 );
1801 }
1802
1803 fn execute_call(
1804 &mut self,
1805 absolute_pc_a: u8,
1806 absolute_pc_b: u8,
1807 absolute_pc_c: u8,
1808 absolute_pc_d: u8,
1809 ) {
1810 let absolute_pc = u32_from_u8s!(absolute_pc_a, absolute_pc_b, absolute_pc_c, absolute_pc_d);
1811 let return_info = CallFrame {
1812 return_address: self.pc + 1,
1813 previous_frame_offset: self.memory.frame_offset,
1814 previous_stack_offset: self.memory.stack_offset,
1815 };
1816
1817 self.call_stack.push(return_info);
1819 self.pc = absolute_pc as usize;
1820
1821 #[cfg(feature = "debug_vm")]
1822 if self.debug_stats_enabled {
1823 self.debug.call_depth += 1;
1824 if self.debug.call_depth > self.debug.max_call_depth {
1825 self.debug.max_call_depth = self.debug.call_depth;
1826 }
1827 }
1828 }
1829
1830 #[inline]
1831 fn execute_host_call(
1832 &mut self,
1833 function_id_lower: u8,
1834 function_id_upper: u8,
1835 register_count: u8,
1836 callback: &mut dyn HostFunctionCallback,
1837 ) {
1838 let heap = self.memory();
1839
1840 let function_id = u8s_to_u16!(function_id_lower, function_id_upper);
1841
1842 unsafe {
1843 let host_args = HostArgs::new(
1844 function_id,
1845 heap.memory,
1846 heap.memory_size,
1847 heap.stack_offset,
1848 self.registers.as_mut_ptr(),
1849 register_count as usize + 1,
1850 );
1851
1852 callback.dispatch_host_call(host_args);
1853 }
1854 }
1855
1856 #[allow(clippy::missing_const_for_fn)]
1857 #[inline(always)]
1858 fn execute_enter(
1859 &mut self,
1860 frame_size_0: u8,
1861 frame_size_1: u8,
1862 frame_size_2: u8,
1863 frame_size_3: u8,
1864 ) {
1865 let frame_size = u32_from_u8s!(frame_size_0, frame_size_1, frame_size_2, frame_size_3);
1866 self.memory.set_fp_from_sp(); self.memory.inc_sp(frame_size as usize);
1868 #[cfg(feature = "debug_vm")]
1869 if self.debug_stats_enabled
1870 && self.memory.stack_offset > self.debug.max_stack_offset {
1871 self.debug.max_stack_offset = self.memory.stack_offset - self.memory.stack_start;
1872 }
1873 }
1874
1875 #[inline]
1876 fn execute_ret(&mut self) {
1877 let call_frame = self.call_stack.pop().unwrap();
1878
1879 self.memory.pop(
1880 call_frame.previous_frame_offset,
1881 call_frame.previous_stack_offset,
1882 );
1883
1884 self.pc = call_frame.return_address;
1886 self.pc -= 1; #[cfg(feature = "debug_vm")]
1891 if self.debug_stats_enabled {
1892 self.debug.call_depth -= 1;
1893 }
1894 }
1895
1896 #[inline]
1897 const fn u8s_to_32(a: u8, b: u8, c: u8, d: u8) -> u32 {
1898 u32::from_le_bytes([a, b, c, d])
1899 }
1900
1901 #[inline]
1902 pub fn get_const_ptr_from_reg(&self, reg: u8) -> *const u8 {
1903 let ptr_addr = get_reg!(self, reg);
1904 self.memory.get_heap_const_ptr(ptr_addr as usize)
1905 }
1906
1907 #[inline]
1908 pub fn get_const_ptr_from_reg_with_offset(&self, reg: u8, offset: u32) -> *const u8 {
1909 let ptr_addr = get_reg!(self, reg) + offset;
1910 self.memory.get_heap_const_ptr(ptr_addr as usize)
1911 }
1912
1913 #[inline]
1914 pub fn get_ptr_from_reg(&self, reg: u8) -> *mut u8 {
1915 let ptr_addr = get_reg!(self, reg);
1916 self.memory.get_heap_ptr(ptr_addr as usize)
1917 }
1918
1919 #[inline]
1920 pub fn get_ptr_and_addr_from_reg(&self, reg: u8) -> (*mut u8, u32) {
1921 let ptr_addr = get_reg!(self, reg);
1922 (self.memory.get_heap_ptr(ptr_addr as usize), ptr_addr)
1923 }
1924
1925 #[inline]
1926 pub fn get_ptr_from_reg_with_offset(&self, reg: u8, offset: u32) -> *mut u8 {
1927 let ptr_addr = get_reg!(self, reg) + offset;
1928 self.memory.get_heap_ptr(ptr_addr as usize)
1929 }
1930}