swamp_vm/
lib.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5extern crate core;
6
7use crate::VmState::Normal;
8use crate::host::{HostArgs, HostFunctionCallback};
9use crate::memory::ExecutionMode::NormalExecution;
10use crate::memory::Memory;
11use fixed32::Fp;
12use std::error::Error;
13use std::fmt::{Display, Formatter};
14use std::ptr;
15use std::str::FromStr;
16use swamp_vm_types::opcode::OpCode;
17use swamp_vm_types::{BinaryInstruction, InstructionPosition};
18
19mod grid;
20pub mod host;
21pub mod map_open;
22pub mod memory;
23pub mod prelude;
24mod range;
25mod sparse;
26mod string;
27mod vec;
28
29#[macro_export]
30macro_rules! u8s_to_u16 {
31    ($lsb:expr, $msb:expr) => {
32        // Cast bytes to u16 before shifting to prevent overflow and ensure correct bit manipulation.
33        // The most significant byte ($msb) is shifted left by 8 bits.
34        // The least significant byte ($lsb) remains in the lower 8 bits.
35        // The results are combined using a bitwise OR.
36        (($msb as u16) << 8) | ($lsb as u16)
37    };
38}
39
40#[macro_export]
41macro_rules! i16_from_u8s {
42    ($lsb:expr, $msb:expr) => {
43        // Cast bytes to u16 before shifting to prevent overflow and ensure correct bit manipulation.
44        // The most significant byte ($msb) is shifted left by 8 bits.
45        // The least significant byte ($lsb) remains in the lower 8 bits.
46        // The results are combined using a bitwise OR.
47        ((($msb as u16) << 8) | ($lsb as u16)) as i16
48    };
49}
50
51#[macro_export]
52macro_rules! u32_from_u8s {
53    ($lsb:expr, $msb:expr, $msb2:expr, $msb3:expr) => {
54        (($msb3 as u32) << 24) | (($msb2 as u32) << 16) | (($msb as u32) << 8) | ($lsb as u32)
55    };
56}
57
58#[macro_export]
59macro_rules! u16_from_u8s {
60    ($lsb:expr, $msb:expr) => {
61        (($msb as u16) << 8) | ($lsb as u16)
62    };
63}
64
65#[macro_export]
66macro_rules! get_reg {
67    ($vm:expr, $reg_idx:expr) => {
68        $vm.registers[$reg_idx as usize]
69    };
70}
71
72#[macro_export]
73macro_rules! set_reg {
74    // Stores a value into a register, converting it to u32
75    // $vm:expr is the VM state (e.g., `&mut self`)
76    // $reg_idx:expr is the destination register index (e.g., `dst_reg`)
77    // $value:expr is the value to store (must be convertible to u32)
78    ($vm:expr, $reg_idx:expr, $value:expr) => {
79        // Use `as u32` to convert the value to the register's storage type
80        $vm.registers[$reg_idx as usize] = $value as u32;
81    };
82}
83
84type Handler0 = fn(&mut Vm);
85type Handler1 = fn(&mut Vm, u8);
86type Handler2 = fn(&mut Vm, u8, u8);
87type Handler3 = fn(&mut Vm, u8, u8, u8);
88type Handler4 = fn(&mut Vm, u8, u8, u8, u8);
89type Handler5 = fn(&mut Vm, u8, u8, u8, u8, u8);
90type Handler6 = fn(&mut Vm, u8, u8, u8, u8, u8, u8);
91type Handler7 = fn(&mut Vm, u8, u8, u8, u8, u8, u8, u8);
92type Handler8 = fn(&mut Vm, u8, u8, u8, u8, u8, u8, u8, u8);
93
94#[derive(Copy, Clone)]
95enum HandlerType {
96    Args0(Handler0),
97    Args1(Handler1),
98    Args2(Handler2),
99    Args3(Handler3),
100    Args4(Handler4),
101    Args5(Handler5),
102    Args6(Handler6),
103    Args7(Handler7),
104    Args8(Handler8),
105}
106
107#[derive(Debug, Default)]
108pub struct Debug {
109    pub opcodes_executed: usize,
110    pub call_depth: usize,
111    pub max_call_depth: usize,
112}
113
114pub struct CallFrame {
115    pub return_address: usize,        // Instruction to return to
116    pub previous_frame_offset: usize, // Previous frame position
117    pub previous_stack_offset: usize, // Size of this frame
118}
119
120type RegContents = u32;
121
122#[derive(Clone, Copy, Eq, PartialEq, Debug)]
123pub enum TrapCode {
124    StoppedByTestHarness,
125    VecBoundsFail {
126        encountered: usize,
127        element_count: usize,
128    },
129    MapOutOfSpace,
130    MapEntryNotFound,
131    MapEntryNotFoundAndCouldNotBeCreated,
132    MapEntryNotFoundForRemoval,
133    LessThanTrap {
134        a: u32,
135        b: u32,
136    },
137    SparseOutOfSpace,
138    SparseRemoveFailed,
139    SparseGetFailed,
140    MapCouldNotBeCopied,
141    OverlappingMemoryCopy,
142    MemoryCorruption,
143    VecOutOfCapacity {
144        encountered: u16,
145        capacity: u16,
146    },
147    VecEmpty,
148    VecNeverInitialized,
149    GridBoundsXFail {
150        x: u32,
151        width: u16,
152    },
153    GridBoundsYFail {
154        y: u32,
155        height: u16,
156    },
157    GridBoundsFail,
158    InvalidUtf8Sequence,
159}
160
161impl TryFrom<u8> for TrapCode {
162    type Error = ();
163
164    fn try_from(value: u8) -> Result<Self, Self::Error> {
165        let code = match value {
166            0 => Self::StoppedByTestHarness,
167            1 => Self::VecBoundsFail {
168                encountered: 0,
169                element_count: 0,
170            }, // TODO: Fix this
171            2 => Self::MapOutOfSpace,
172            3 => Self::MapEntryNotFound,
173            4 => Self::MapEntryNotFoundAndCouldNotBeCreated,
174            5 => Self::MapEntryNotFoundForRemoval,
175            6 => Self::LessThanTrap { a: 0, b: 0 },
176            7 => Self::SparseOutOfSpace,
177            8 => Self::SparseRemoveFailed,
178            9 => Self::SparseGetFailed,
179            10 => Self::MapCouldNotBeCopied,
180            11 => Self::OverlappingMemoryCopy,
181            _ => return Err(()),
182        };
183        Ok(code)
184    }
185}
186
187#[derive(Debug, PartialEq, Eq)]
188pub struct ParseTrapCodeError;
189
190impl Display for ParseTrapCodeError {
191    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
192        write!(f, "Unable to parse string into a valid TrapCode")
193    }
194}
195impl Error for ParseTrapCodeError {}
196impl FromStr for TrapCode {
197    type Err = ParseTrapCodeError;
198
199    fn from_str(s: &str) -> Result<Self, Self::Err> {
200        let code = match s {
201            "stopped_by_test_harness" => Self::StoppedByTestHarness,
202            "vec_bounds_fail" => Self::VecBoundsFail {
203                encountered: 0,
204                element_count: 0,
205            }, // TODO: FIX
206            "map_out_of_space" => Self::MapOutOfSpace,
207            "map_entry_not_found" => Self::MapEntryNotFound,
208            "map_entry_or_create_failed" => Self::MapEntryNotFoundAndCouldNotBeCreated,
209            "map_entry_remove_failed" => Self::MapEntryNotFoundForRemoval,
210            "less_than_trap" => Self::LessThanTrap { a: 0, b: 0 },
211            "sparse_out_of_space" => Self::SparseOutOfSpace,
212            "sparse_remove_failed" => Self::SparseRemoveFailed,
213            "sparse_get_failed" => Self::SparseGetFailed,
214            "map_could_not_be_copied" => Self::MapCouldNotBeCopied,
215            "overlapping_memory_copy" => Self::OverlappingMemoryCopy,
216            _ => return Err(ParseTrapCodeError),
217        };
218
219        Ok(code)
220    }
221}
222
223impl Display for TrapCode {
224    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
225        write!(f, "trap {:?}", self)
226    }
227}
228
229#[derive(Eq, Debug, PartialEq)]
230pub enum VmState {
231    Normal,
232    Panic(String),
233    Trap(TrapCode),
234    Halt,
235    Step,
236}
237
238pub struct Vm {
239    // Memory
240    memory: Memory,
241
242    // Execution state
243    pc: usize,                            // Instruction pointer
244    instructions: Vec<BinaryInstruction>, // Bytecode
245    execution_complete: bool,             // Flag for completion
246
247    // Function call management
248    call_stack: Vec<CallFrame>, // Track function calls
249
250    handlers: [HandlerType; 256],
251
252    pub registers: [u32; 256], // Normal CPUs have around 31 general purpose registers
253
254    // TODO: Error state
255    pub debug: Debug,
256    pub debug_stats_enabled: bool,
257    pub debug_opcodes_enabled: bool,
258    pub debug_operations_enabled: bool,
259    pub state: VmState,
260}
261
262impl Vm {
263    #[must_use]
264    pub const fn is_execution_complete(&self) -> bool {
265        self.execution_complete
266    }
267}
268
269const ALIGNMENT: usize = 8;
270const ALIGNMENT_REST: usize = ALIGNMENT - 1;
271const ALIGNMENT_MASK: usize = !ALIGNMENT_REST;
272
273pub struct VmSetup {
274    pub stack_memory_size: usize,
275    pub heap_memory_size: usize,
276    pub constant_memory: Vec<u8>,
277    pub debug_stats_enabled: bool,
278    pub debug_opcodes_enabled: bool,
279    pub debug_operations_enabled: bool,
280}
281
282impl Vm {
283    #[allow(clippy::too_many_lines)]
284    pub fn new(instructions: Vec<BinaryInstruction>, setup: VmSetup) -> Self {
285        let memory = Memory::new(
286            &setup.constant_memory,
287            setup.stack_memory_size,
288            setup.heap_memory_size,
289        );
290
291        assert!(
292            setup.constant_memory.len() < setup.stack_memory_size / 2,
293            "too much constant memory"
294        );
295
296        let mut vm = Self {
297            memory, // Raw memory pointer
298            pc: 0,
299            instructions,
300            execution_complete: false,
301            call_stack: vec![],
302            handlers: [const { HandlerType::Args0(Self::execute_unimplemented) }; 256],
303            registers: [const { 0 }; 256],
304            debug: Debug {
305                opcodes_executed: 0,
306                call_depth: 0,
307                max_call_depth: 0,
308            },
309            debug_stats_enabled: setup.debug_stats_enabled,
310            debug_opcodes_enabled: setup.debug_opcodes_enabled,
311            debug_operations_enabled: setup.debug_operations_enabled,
312            state: Normal,
313        };
314
315        /*
316            TODO: @idea: Instead of storing function pointers, the instructions vector itself
317            includes the pointer to the instruction's handler code.
318
319            type HandlerPointer = fn(&mut Vm, &FixedSizeOperandBlock);
320            type FixedSizeOperandBlock = [u8; 8];
321
322            BinaryInstruction has a field for HandlerPointer.
323        */
324
325        //vm.handlers[OpCode::Alloc as usize] = HandlerType::Args3(Self::execute_alloc);
326
327        // Store
328        vm.handlers[OpCode::StRegToFrame as usize] =
329            HandlerType::Args6(Self::execute_st_regs_to_frame);
330        vm.handlers[OpCode::StRegToFrameUsingMask as usize] =
331            HandlerType::Args5(Self::execute_st_regs_to_frame_using_mask);
332
333        vm.handlers[OpCode::St32UsingPtrWithOffset as usize] =
334            HandlerType::Args6(Self::execute_stw_using_base_ptr_and_offset);
335        vm.handlers[OpCode::St16UsingPtrWithOffset as usize] =
336            HandlerType::Args6(Self::execute_sth_using_base_ptr_and_offset);
337        vm.handlers[OpCode::St8UsingPtrWithOffset as usize] =
338            HandlerType::Args6(Self::execute_stb_using_base_ptr_and_offset);
339
340        // Load
341        vm.handlers[OpCode::LdRegFromFrameRange as usize] =
342            HandlerType::Args6(Self::execute_ld_regs_from_frame);
343        vm.handlers[OpCode::LdRegFromFrameUsingMask as usize] =
344            HandlerType::Args5(Self::execute_ld_regs_from_frame_using_mask);
345
346        vm.handlers[OpCode::Ld32FromPointerWithOffset as usize] =
347            HandlerType::Args6(Self::execute_ldw_from_base_ptr_and_offset);
348        vm.handlers[OpCode::Ld16FromPointerWithOffset as usize] =
349            HandlerType::Args6(Self::execute_ldh_from_base_ptr_and_offset);
350        vm.handlers[OpCode::Ld8FromPointerWithOffset as usize] =
351            HandlerType::Args6(Self::execute_ldb_from_base_ptr_and_offset);
352
353        // Load immediate
354        vm.handlers[OpCode::Mov8FromImmediateValue as usize] =
355            HandlerType::Args2(Self::execute_mov_8);
356        vm.handlers[OpCode::Mov16FromImmediateValue as usize] =
357            HandlerType::Args3(Self::execute_mov_16);
358        vm.handlers[OpCode::Mov32FromImmediateValue as usize] =
359            HandlerType::Args5(Self::execute_mov_32);
360
361        // Copy data in frame memory
362        vm.handlers[OpCode::MovReg as usize] = HandlerType::Args2(Self::execute_mov_reg);
363        vm.handlers[OpCode::LdPtrFromEffectiveFrameAddress as usize] =
364            HandlerType::Args5(Self::execute_lea);
365
366        vm.handlers[OpCode::Ld32FromAbsoluteAddress as usize] =
367            HandlerType::Args5(Self::execute_ldw_from_absolute_address);
368
369        vm.handlers[OpCode::Ld8FromAbsoluteAddress as usize] =
370            HandlerType::Args5(Self::execute_ldb_from_absolute_address);
371
372        // Copy to and from heap
373        vm.handlers[OpCode::BlockCopyWithOffsets as usize] =
374            HandlerType::Args3(Self::execute_mov_mem_with_offsets);
375        vm.handlers[OpCode::BlockCopy as usize] =
376            HandlerType::Args6(Self::execute_mov_mem_with_immediate_size);
377        vm.handlers[OpCode::BlockCopyWithOffsetsVariableSize as usize] =
378            HandlerType::Args3(Self::execute_mov_mem_with_offsets_with_variable_size);
379
380        vm.handlers[OpCode::FrameMemClr as usize] =
381            HandlerType::Args8(Self::execute_frame_memory_clear);
382
383        // Comparisons - Int
384        vm.handlers[OpCode::LtI32 as usize] = HandlerType::Args3(Self::execute_lt_i32);
385        vm.handlers[OpCode::LeI32 as usize] = HandlerType::Args3(Self::execute_le_i32);
386        vm.handlers[OpCode::GtI32 as usize] = HandlerType::Args3(Self::execute_gt_i32);
387        vm.handlers[OpCode::GeI32 as usize] = HandlerType::Args3(Self::execute_ge_i32);
388
389        vm.handlers[OpCode::GeU32 as usize] = HandlerType::Args3(Self::execute_ge_u32);
390        vm.handlers[OpCode::LtU32 as usize] = HandlerType::Args3(Self::execute_lt_u32);
391
392        // Comparison
393        vm.handlers[OpCode::CmpReg as usize] = HandlerType::Args3(Self::execute_cmp_reg);
394        vm.handlers[OpCode::CmpBlock as usize] = HandlerType::Args5(Self::execute_cmp_block);
395
396        vm.handlers[OpCode::Eq8Imm as usize] = HandlerType::Args3(Self::execute_eq_8_imm);
397        vm.handlers[OpCode::TrapOnLessThan as usize] =
398            HandlerType::Args2(Self::execute_trap_on_less_than);
399
400        // Logical Operations
401        vm.handlers[OpCode::MovEqualToZero as usize] =
402            HandlerType::Args2(Self::execute_move_equal_to_zero);
403
404        // Conditional jumps
405        vm.handlers[OpCode::BFalse as usize] = HandlerType::Args3(Self::execute_branch_if_false);
406        vm.handlers[OpCode::BTrue as usize] = HandlerType::Args3(Self::execute_branch_if_true);
407
408        // Unconditional jump
409        vm.handlers[OpCode::B as usize] = HandlerType::Args2(Self::execute_b);
410
411        // Operators - Int
412        vm.handlers[OpCode::AddU32 as usize] = HandlerType::Args3(Self::execute_add_u32);
413        vm.handlers[OpCode::AddU32Imm as usize] = HandlerType::Args6(Self::execute_add_u32_imm);
414        vm.handlers[OpCode::MulU32 as usize] = HandlerType::Args3(Self::execute_mul_u32);
415        vm.handlers[OpCode::SubU32 as usize] = HandlerType::Args3(Self::execute_sub_u32);
416
417        vm.handlers[OpCode::NegI32 as usize] = HandlerType::Args2(Self::execute_neg_i32);
418        vm.handlers[OpCode::ModI32 as usize] = HandlerType::Args3(Self::execute_mod_i32);
419        vm.handlers[OpCode::DivI32 as usize] = HandlerType::Args3(Self::execute_div_i32);
420
421        // Operators - Float (Fixed Point)
422        vm.handlers[OpCode::DivF32 as usize] = HandlerType::Args3(Self::execute_div_f32);
423        vm.handlers[OpCode::MulF32 as usize] = HandlerType::Args3(Self::execute_mul_f32);
424
425        // Call, enter, ret
426        vm.handlers[OpCode::Call as usize] = HandlerType::Args4(Self::execute_call);
427        vm.handlers[OpCode::Enter as usize] = HandlerType::Args4(Self::execute_enter);
428        vm.handlers[OpCode::Ret as usize] = HandlerType::Args0(Self::execute_ret);
429
430        //vm.handlers[OpCode::HostCall as usize] = HandlerType::Args3(Self::execute_host_call);
431
432        // Halt - return to host
433        vm.handlers[OpCode::Hlt as usize] = HandlerType::Args0(Self::execute_hlt);
434        vm.handlers[OpCode::UserHalt as usize] = HandlerType::Args0(Self::execute_user_halt);
435        vm.handlers[OpCode::Step as usize] = HandlerType::Args0(Self::execute_step);
436        vm.handlers[OpCode::Trap as usize] = HandlerType::Args1(Self::execute_trap);
437        vm.handlers[OpCode::Panic as usize] = HandlerType::Args1(Self::execute_panic);
438
439        // Bool
440        vm.handlers[OpCode::CharToString as usize] =
441            HandlerType::Args2(Self::execute_char_to_string);
442
443        // Bool
444        vm.handlers[OpCode::BoolToString as usize] =
445            HandlerType::Args2(Self::execute_bool_to_string);
446
447        // String
448        vm.handlers[OpCode::StringAppend as usize] =
449            HandlerType::Args3(Self::execute_string_append);
450
451        vm.handlers[OpCode::StringCmp as usize] = HandlerType::Args3(Self::execute_string_cmp);
452        vm.handlers[OpCode::StringToString as usize] =
453            HandlerType::Args2(Self::execute_string_to_string);
454
455        vm.handlers[OpCode::StringIterInit as usize] =
456            HandlerType::Args2(Self::execute_string_iter_init);
457        vm.handlers[OpCode::StringIterNext as usize] =
458            HandlerType::Args4(Self::execute_string_iter_next);
459        vm.handlers[OpCode::StringIterNextPair as usize] =
460            HandlerType::Args5(Self::execute_string_iter_next_pair);
461
462        vm.handlers[OpCode::ByteToString as usize] =
463            HandlerType::Args2(Self::execute_byte_to_string);
464
465        // Int
466        vm.handlers[OpCode::IntToRnd as usize] =
467            HandlerType::Args2(Self::execute_pseudo_random_i32);
468        vm.handlers[OpCode::IntMin as usize] = HandlerType::Args3(Self::execute_min_i32);
469        vm.handlers[OpCode::IntMax as usize] = HandlerType::Args3(Self::execute_max_i32);
470        vm.handlers[OpCode::IntClamp as usize] = HandlerType::Args4(Self::execute_clamp_i32);
471
472        vm.handlers[OpCode::IntAbs as usize] = HandlerType::Args2(Self::execute_abs_i32);
473
474        vm.handlers[OpCode::IntToString as usize] = HandlerType::Args2(Self::execute_i32_to_string);
475        vm.handlers[OpCode::IntToFloat as usize] = HandlerType::Args2(Self::execute_i32_to_f32);
476
477        // Float (Fixed Point)
478        vm.handlers[OpCode::FloatPseudoRandom as usize] =
479            HandlerType::Args2(Self::execute_pseudo_random_i32);
480        vm.handlers[OpCode::FloatMin as usize] = HandlerType::Args3(Self::execute_min_i32);
481        vm.handlers[OpCode::FloatMax as usize] = HandlerType::Args3(Self::execute_max_i32);
482        vm.handlers[OpCode::FloatClamp as usize] = HandlerType::Args4(Self::execute_clamp_i32);
483
484        vm.handlers[OpCode::FloatRound as usize] = HandlerType::Args2(Self::execute_f32_round);
485        vm.handlers[OpCode::FloatFloor as usize] = HandlerType::Args2(Self::execute_f32_floor);
486        vm.handlers[OpCode::FloatSqrt as usize] = HandlerType::Args2(Self::execute_f32_sqrt);
487        vm.handlers[OpCode::FloatSign as usize] = HandlerType::Args2(Self::execute_f32_sign);
488        vm.handlers[OpCode::FloatAbs as usize] = HandlerType::Args2(Self::execute_abs_i32);
489        vm.handlers[OpCode::FloatSin as usize] = HandlerType::Args2(Self::execute_f32_sin);
490        vm.handlers[OpCode::FloatCos as usize] = HandlerType::Args2(Self::execute_f32_cos);
491        vm.handlers[OpCode::FloatAsin as usize] = HandlerType::Args2(Self::execute_f32_asin);
492        vm.handlers[OpCode::FloatAcos as usize] = HandlerType::Args2(Self::execute_f32_acos);
493        // vm.handlers[OpCode::FloatAtan2 as usize] = HandlerType::Args3(Self::execute_f32_atan2); // TODO:
494        vm.handlers[OpCode::FloatToString as usize] =
495            HandlerType::Args2(Self::execute_f32_to_string);
496        vm.handlers[OpCode::FloatPseudoRandom as usize] =
497            HandlerType::Args2(Self::execute_pseudo_random_i32);
498
499        // Collections ==========
500
501        // Range
502        vm.handlers[OpCode::RangeInit as usize] = HandlerType::Args4(Self::execute_range_init);
503        vm.handlers[OpCode::RangeIterInit as usize] =
504            HandlerType::Args2(Self::execute_range_iter_init);
505        vm.handlers[OpCode::RangeIterNext as usize] =
506            HandlerType::Args4(Self::execute_range_iter_next);
507
508        // Array
509        vm.handlers[OpCode::ArrayInitWithLenAndCapacity as usize] =
510            HandlerType::Args7(Self::execute_array_init);
511
512        // Vec
513        vm.handlers[OpCode::VecInit as usize] = HandlerType::Args7(Self::execute_vec_init);
514        vm.handlers[OpCode::VecCopy as usize] = HandlerType::Args2(Self::execute_vec_copy);
515        vm.handlers[OpCode::VecCmp as usize] = HandlerType::Args3(Self::execute_vec_cmp);
516        vm.handlers[OpCode::VecIterInit as usize] = HandlerType::Args2(Self::execute_vec_iter_init);
517        vm.handlers[OpCode::VecIterNext as usize] = HandlerType::Args4(Self::execute_vec_iter_next);
518        vm.handlers[OpCode::VecIterNextPair as usize] =
519            HandlerType::Args5(Self::execute_vec_iter_next_pair);
520        vm.handlers[OpCode::VecPushAddr as usize] = HandlerType::Args2(Self::execute_vec_push_addr);
521        vm.handlers[OpCode::VecGet as usize] = HandlerType::Args3(Self::execute_vec_get);
522        vm.handlers[OpCode::VecPop as usize] = HandlerType::Args2(Self::execute_vec_pop);
523        vm.handlers[OpCode::VecRemoveIndex as usize] =
524            HandlerType::Args2(Self::execute_vec_remove_index);
525
526        // Map
527        vm.handlers[OpCode::MapInitWithCapacityAndKeyAndTupleSizeAddr as usize] =
528            HandlerType::Args7(Self::execute_map_open_addressing_init);
529        vm.handlers[OpCode::MapIterInit as usize] = HandlerType::Args2(Self::execute_map_iter_init);
530        vm.handlers[OpCode::MapIterNext as usize] = HandlerType::Args4(Self::execute_map_iter_next);
531        vm.handlers[OpCode::MapIterNextPair as usize] =
532            HandlerType::Args5(Self::execute_map_iter_next_pair);
533        vm.handlers[OpCode::MapGetEntryLocation as usize] =
534            HandlerType::Args3(Self::execute_map_open_addressing_get_entry_location);
535        vm.handlers[OpCode::MapGetOrReserveEntryLocation as usize] =
536            HandlerType::Args3(Self::execute_map_open_addressing_get_or_reserve_entry);
537        vm.handlers[OpCode::MapHas as usize] =
538            HandlerType::Args3(Self::execute_map_open_addressing_has);
539        vm.handlers[OpCode::MapRemove as usize] =
540            HandlerType::Args2(Self::execute_map_open_addressing_remove);
541        vm.handlers[OpCode::MapOverwrite as usize] =
542            HandlerType::Args2(Self::execute_map_overwrite);
543
544        // Sparse
545        vm.handlers[OpCode::SparseInit as usize] = HandlerType::Args7(Self::execute_sparse_init);
546        vm.handlers[OpCode::SparseAddGiveEntryAddress as usize] =
547            HandlerType::Args7(Self::execute_sparse_add_get_entry_addr);
548        vm.handlers[OpCode::SparseRemove as usize] =
549            HandlerType::Args2(Self::execute_sparse_remove);
550        vm.handlers[OpCode::SparseGetEntryAddr as usize] =
551            HandlerType::Args5(Self::execute_sparse_get_entry_addr);
552        vm.handlers[OpCode::SparseIsAlive as usize] =
553            HandlerType::Args3(Self::execute_sparse_is_alive);
554
555        vm.handlers[OpCode::SparseIterInit as usize] =
556            HandlerType::Args2(Self::execute_sparse_iter_init);
557        vm.handlers[OpCode::SparseIterNext as usize] =
558            HandlerType::Args4(Self::execute_sparse_iter_next);
559        vm.handlers[OpCode::SparseIterNextPair as usize] =
560            HandlerType::Args5(Self::execute_sparse_iter_next_pair);
561
562        vm.handlers[OpCode::GridInit as usize] = HandlerType::Args6(Self::execute_grid_init);
563        vm.handlers[OpCode::GridGetEntryAddr as usize] =
564            HandlerType::Args6(Self::execute_grid_get_entry_addr);
565
566        vm
567    }
568    #[must_use]
569    pub const fn memory(&self) -> &Memory {
570        &self.memory
571    }
572
573    pub fn memory_mut(&mut self) -> &mut Memory {
574        &mut self.memory
575    }
576
577    pub fn step(&mut self, host_function_callback: &mut dyn HostFunctionCallback) -> bool {
578        let instruction = &self.instructions[self.pc];
579        let opcode = instruction.opcode;
580
581        if self.memory.execution_mode == NormalExecution {
582            assert!(self.memory.stack_offset >= self.memory.constant_memory_size);
583            assert!(self.memory.stack_offset <= self.memory.heap_start);
584        }
585
586        self.pc += 1; // IP must be added BEFORE handling the instruction
587
588        if opcode == OpCode::HostCall as u8 {
589            self.execute_host_call(
590                instruction.operands[0],
591                instruction.operands[1],
592                instruction.operands[2],
593                host_function_callback,
594            );
595        } else {
596            match self.handlers[opcode as usize] {
597                HandlerType::Args0(handler) => handler(self),
598                HandlerType::Args1(handler) => handler(self, instruction.operands[0]),
599                HandlerType::Args2(handler) => {
600                    handler(self, instruction.operands[0], instruction.operands[1]);
601                }
602                HandlerType::Args3(handler) => handler(
603                    self,
604                    instruction.operands[0],
605                    instruction.operands[1],
606                    instruction.operands[2],
607                ),
608                HandlerType::Args4(handler) => handler(
609                    self,
610                    instruction.operands[0],
611                    instruction.operands[1],
612                    instruction.operands[2],
613                    instruction.operands[3],
614                ),
615                HandlerType::Args5(handler) => handler(
616                    self,
617                    instruction.operands[0],
618                    instruction.operands[1],
619                    instruction.operands[2],
620                    instruction.operands[3],
621                    instruction.operands[4],
622                ),
623                HandlerType::Args6(handler) => handler(
624                    self,
625                    instruction.operands[0],
626                    instruction.operands[1],
627                    instruction.operands[2],
628                    instruction.operands[3],
629                    instruction.operands[4],
630                    instruction.operands[5],
631                ),
632                HandlerType::Args7(handler) => handler(
633                    self,
634                    instruction.operands[0],
635                    instruction.operands[1],
636                    instruction.operands[2],
637                    instruction.operands[3],
638                    instruction.operands[4],
639                    instruction.operands[5],
640                    instruction.operands[6],
641                ),
642                HandlerType::Args8(handler) => handler(
643                    self,
644                    instruction.operands[0],
645                    instruction.operands[1],
646                    instruction.operands[2],
647                    instruction.operands[3],
648                    instruction.operands[4],
649                    instruction.operands[5],
650                    instruction.operands[6],
651                    instruction.operands[7],
652                ),
653            }
654        }
655
656        !self.execution_complete
657    }
658
659    #[allow(clippy::too_many_lines)]
660    pub fn execute_internal(&mut self, host_function_callback: &mut dyn HostFunctionCallback) {
661        self.execution_complete = false;
662
663        while !self.execution_complete {
664            let instruction = &self.instructions[self.pc];
665            let opcode = instruction.opcode;
666
667            #[cfg(feature = "debug_vm")]
668            if self.debug_opcodes_enabled {
669                let regs = [0, 1, 2, 3, 4, 128, 129, 130];
670
671                for reg in regs {
672                    print!(
673                        "{}",
674                        tinter::bright_black(&format!("{reg:02X}: {:08X}, ", self.registers[reg]))
675                    );
676                }
677                println!();
678
679                let operands = instruction.operands;
680                print!("> {:04X}: ", self.pc);
681                self.debug_opcode(opcode, &operands);
682            }
683
684            #[cfg(feature = "debug_vm")]
685            if self.debug_stats_enabled {
686                self.debug.opcodes_executed += 1;
687            }
688
689            self.pc += 1; // IP must be added BEFORE handling the instruction
690
691            if opcode == OpCode::HostCall as u8 {
692                self.execute_host_call(
693                    instruction.operands[0],
694                    instruction.operands[1],
695                    instruction.operands[2],
696                    host_function_callback,
697                );
698            } else {
699                match self.handlers[opcode as usize] {
700                    HandlerType::Args0(handler) => handler(self),
701                    HandlerType::Args1(handler) => handler(self, instruction.operands[0]),
702                    HandlerType::Args2(handler) => {
703                        handler(self, instruction.operands[0], instruction.operands[1]);
704                    }
705                    HandlerType::Args3(handler) => handler(
706                        self,
707                        instruction.operands[0],
708                        instruction.operands[1],
709                        instruction.operands[2],
710                    ),
711                    HandlerType::Args4(handler) => handler(
712                        self,
713                        instruction.operands[0],
714                        instruction.operands[1],
715                        instruction.operands[2],
716                        instruction.operands[3],
717                    ),
718                    HandlerType::Args5(handler) => handler(
719                        self,
720                        instruction.operands[0],
721                        instruction.operands[1],
722                        instruction.operands[2],
723                        instruction.operands[3],
724                        instruction.operands[4],
725                    ),
726                    HandlerType::Args6(handler) => handler(
727                        self,
728                        instruction.operands[0],
729                        instruction.operands[1],
730                        instruction.operands[2],
731                        instruction.operands[3],
732                        instruction.operands[4],
733                        instruction.operands[5],
734                    ),
735                    HandlerType::Args7(handler) => handler(
736                        self,
737                        instruction.operands[0],
738                        instruction.operands[1],
739                        instruction.operands[2],
740                        instruction.operands[3],
741                        instruction.operands[4],
742                        instruction.operands[5],
743                        instruction.operands[6],
744                    ),
745                    HandlerType::Args8(handler) => handler(
746                        self,
747                        instruction.operands[0],
748                        instruction.operands[1],
749                        instruction.operands[2],
750                        instruction.operands[3],
751                        instruction.operands[4],
752                        instruction.operands[5],
753                        instruction.operands[6],
754                        instruction.operands[7],
755                    ),
756                }
757            }
758        }
759    }
760
761    pub const fn set_return_register_address(&mut self, r0_addr: u32) {
762        set_reg!(self, 0, r0_addr);
763    }
764
765    pub fn set_register_pointer_addr_for_parameter(&mut self, register: u8, addr: u32) {
766        assert!(register >= 1 && register <= 6, "not a parameter register");
767        set_reg!(self, register, addr);
768    }
769
770    pub fn set_stack_start(&mut self, addr: usize) {
771        if self.debug_operations_enabled {
772            eprintln!("vm: set stack start and frame to: 0x{addr:08X}");
773        }
774        self.memory.set_stack_and_frame(addr);
775    }
776
777    pub fn resume(&mut self, host_function_callback: &mut dyn HostFunctionCallback) {
778        self.execute_internal(host_function_callback);
779    }
780
781    pub fn execute_from_ip(
782        &mut self,
783        ip: &InstructionPosition,
784        host_function_callback: &mut dyn HostFunctionCallback,
785    ) {
786        self.pc = ip.0 as usize;
787        if self.debug_operations_enabled {
788            eprintln!(
789                "starting up the vm, normal_stack_start: {:08X} SP:{:08X} FP:{:08X}",
790                self.memory.stack_start, self.memory.stack_offset, self.memory.frame_offset
791            );
792        }
793
794        self.call_stack.clear();
795        self.memory.reset_offset();
796
797        #[cfg(feature = "debug_vm")]
798        if self.debug_opcodes_enabled {
799            eprintln!(
800                "start executing --------- frame {:X} heap: {:X}",
801                self.memory.frame_offset, self.memory.heap_alloc_offset
802            );
803        }
804
805        self.execute_internal(host_function_callback);
806    }
807
808    pub const fn set_pc(&mut self, pc: &InstructionPosition) {
809        self.pc = pc.0 as usize;
810    }
811
812    pub const fn pc(&self) -> usize {
813        self.pc
814    }
815
816    pub fn fp(&self) -> usize {
817        self.memory.frame_offset
818    }
819
820    pub fn sp(&self) -> usize {
821        self.memory.stack_offset
822    }
823
824    pub fn call_stack(&self) -> &[CallFrame] {
825        &self.call_stack
826    }
827
828    fn execute_unimplemented(&mut self) {
829        let unknown_opcode = OpCode::from(self.instructions[self.pc - 1].opcode);
830        eprintln!("error: opcode not implemented: {unknown_opcode} {unknown_opcode:?}");
831        eprintln!("VM runtime halted.");
832        self.debug_output();
833        panic!("unknown OPCODE! {unknown_opcode} {unknown_opcode:?}");
834    }
835
836    pub fn frame_memory(&self) -> &[u8] {
837        unsafe { std::slice::from_raw_parts(self.memory.frame_ptr(), self.memory.memory_size) }
838    }
839
840    pub fn heap_memory(&self) -> &[u8] {
841        unsafe { std::slice::from_raw_parts(self.memory.get_heap_ptr(0), self.memory.memory_size) }
842    }
843
844    pub fn constant_memory(&self) -> &[u8] {
845        unsafe {
846            std::slice::from_raw_parts(
847                self.memory.get_heap_ptr(0),
848                self.memory.constant_memory_size,
849            )
850        }
851    }
852
853    pub fn all_memory_up_to(&self, offset: usize) -> &[u8] {
854        unsafe { std::slice::from_raw_parts(self.memory.get_heap_ptr(0), offset) }
855    }
856
857    pub fn constant_size(&self) -> usize {
858        self.memory.constant_memory_size
859    }
860    #[must_use]
861    #[allow(clippy::missing_const_for_fn)]
862    pub fn instructions(&self) -> &[BinaryInstruction] {
863        &self.instructions
864    }
865    pub fn reset(&mut self) {
866        self.memory.reset();
867        self.memory.reset_allocator();
868
869        self.pc = 0;
870        self.execution_complete = false;
871        self.call_stack.clear();
872    }
873
874    pub fn reset_stack_and_heap_to_constant_limit(&mut self) {
875        self.memory.reset_allocator();
876        self.memory.reset_stack_and_fp();
877        self.reset_call_stack();
878        self.execution_complete = false;
879        //self.pc = 0;
880    }
881
882    pub fn reset_call_stack(&mut self) {
883        //self.memory.reset();
884        self.call_stack.clear();
885    }
886
887    pub fn reset_debug(&mut self) {
888        self.debug = Debug::default();
889    }
890
891    #[must_use]
892    pub fn frame_offset(&self) -> usize {
893        self.memory.frame_offset
894    }
895
896    pub fn load_bytecode(&mut self, instructions: Vec<BinaryInstruction>) {
897        self.instructions = instructions;
898        self.pc = 0;
899        self.execution_complete = false;
900    }
901
902    #[inline]
903    fn execute_mov_32(&mut self, dst_reg: u8, a: u8, b: u8, c: u8, d: u8) {
904        set_reg!(self, dst_reg, Self::u8s_to_32(a, b, c, d));
905    }
906
907    #[inline]
908    fn execute_mov_16(&mut self, dst_reg: u8, a: u8, b: u8) {
909        set_reg!(self, dst_reg, u16_from_u8s!(a, b));
910    }
911
912    #[inline]
913    fn execute_mov_8(&mut self, dst_reg: u8, octet: u8) {
914        set_reg!(self, dst_reg, octet);
915    }
916
917    /*
918    #[inline]
919    pub fn execute_unwrap_jmp_some(&mut self, wrapped_ptr_reg: u8, jmp_ip_0: u8, jmp_ip_1: u8) {
920        get_reg!(self, wrapped_ptr_reg, Ptr => ptr_addr);
921        let ptr = self.memory.get_heap_const_ptr(ptr_addr as usize);
922        unsafe {
923            if *ptr != 0 {
924                self.ip = u8s_to_u16!(jmp_ip_0, jmp_ip_1) as usize;
925            }
926        }
927    }
928
929    #[inline]
930    pub fn execute_unwrap_jmp_none(&mut self, wrapped_ptr_reg: u8, jmp_ip_0: u8, jmp_ip_1: u8) {
931        get_reg!(self, wrapped_ptr_reg, Ptr => ptr_addr);
932        let ptr = self.memory.get_heap_const_ptr(ptr_addr as usize);
933        unsafe {
934            if *ptr == 0 {
935                self.ip = u8s_to_u16!(jmp_ip_0, jmp_ip_1) as usize;
936            }
937        }
938    }
939
940     */
941
942    /*
943    #[inline]
944    fn execute_alloc(&mut self, dst_reg: u8, size_0: u8, size_1: u8) {
945        let memory_size = u8s_to_u16!(size_0, size_1);
946        let data_ptr = self.memory.heap_allocate(memory_size as usize);
947        set_reg!(self, dst_reg, data_ptr);
948    }
949
950     */
951
952    // Fixed Point special methods
953    #[inline]
954    fn execute_mul_f32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
955        let lhs = Fp::from_raw(get_reg!(self, lhs_reg) as i32);
956        let rhs = Fp::from_raw(get_reg!(self, rhs_reg) as i32);
957        set_reg!(self, dst_reg, (lhs * rhs).inner());
958    }
959
960    #[inline]
961    fn execute_div_f32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
962        let lhs = Fp::from_raw(get_reg!(self, lhs_reg) as i32);
963        let rhs = Fp::from_raw(get_reg!(self, rhs_reg) as i32);
964
965        set_reg!(self, dst_reg, (lhs / rhs).inner());
966    }
967
968    #[inline]
969    fn execute_f32_round(&mut self, dst_reg: u8, val_reg: u8) {
970        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
971
972        let int_val: i16 = val.round().into();
973        set_reg!(self, dst_reg, int_val);
974    }
975
976    #[inline]
977    fn execute_f32_floor(&mut self, dst_reg: u8, val_reg: u8) {
978        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
979
980        let floored: i16 = val.floor().into();
981        set_reg!(self, dst_reg, floored);
982    }
983
984    #[inline]
985    fn execute_f32_sqrt(&mut self, dst_reg: u8, val_reg: u8) {
986        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
987
988        set_reg!(self, dst_reg, val.sqrt().inner());
989    }
990
991    #[inline]
992    fn execute_f32_sin(&mut self, dst_reg: u8, val_reg: u8) {
993        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
994
995        set_reg!(self, dst_reg, val.sin().inner());
996    }
997
998    #[inline]
999    fn execute_f32_asin(&mut self, dst_reg: u8, val_reg: u8) {
1000        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1001
1002        set_reg!(self, dst_reg, val.asin().inner());
1003    }
1004
1005    #[inline]
1006    fn execute_f32_cos(&mut self, dst_reg: u8, val_reg: u8) {
1007        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1008
1009        set_reg!(self, dst_reg, val.cos().inner());
1010    }
1011
1012    #[inline]
1013    fn execute_f32_acos(&mut self, dst_reg: u8, val_reg: u8) {
1014        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1015
1016        set_reg!(self, dst_reg, val.acos().inner());
1017    }
1018
1019    /*
1020    #[inline]
1021    fn execute_f32_atan2(&mut self, dst_reg: u8, val_reg: u8, y_reg: u8) {
1022        // TODO: Implement atan2 in fixed32
1023        todo!()
1024    }
1025
1026     */
1027
1028    #[inline]
1029    fn execute_f32_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1030        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1031
1032        self.create_string(dst_reg, &val.to_string())
1033    }
1034
1035    #[inline]
1036    fn execute_f32_sign(&mut self, dst_reg: u8, val_reg: u8) {
1037        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1038        // TODO: signum() is/was incorrect in Fixed32 crate
1039        set_reg!(
1040            self,
1041            dst_reg,
1042            Fp::from(if val < 0 {
1043                -1
1044            } else if val > 0 {
1045                1
1046            } else {
1047                0
1048            })
1049            .inner()
1050        );
1051    }
1052
1053    #[inline]
1054    fn execute_neg_i32(&mut self, dst_reg: u8, val_reg: u8) {
1055        let val = get_reg!(self, val_reg) as i32;
1056        set_reg!(self, dst_reg, -val);
1057    }
1058
1059    #[inline]
1060    const fn execute_add_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1061        let lhs = get_reg!(self, lhs_reg);
1062        let rhs = get_reg!(self, rhs_reg);
1063
1064        set_reg!(self, dst_reg, lhs.wrapping_add(rhs));
1065    }
1066
1067    #[inline]
1068    fn execute_add_u32_imm(
1069        &mut self,
1070        dst_reg: u8,
1071        lhs_reg: u8,
1072        rhs_1: u8,
1073        rhs_2: u8,
1074        rhs_3: u8,
1075        rhs_4: u8,
1076    ) {
1077        let lhs = get_reg!(self, lhs_reg);
1078        let rhs = u32_from_u8s!(rhs_1, rhs_2, rhs_3, rhs_4);
1079
1080        set_reg!(self, dst_reg, lhs.wrapping_add(rhs));
1081    }
1082
1083    #[inline]
1084    const fn execute_mul_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1085        let lhs = get_reg!(self, lhs_reg);
1086        let rhs = get_reg!(self, rhs_reg);
1087
1088        set_reg!(self, dst_reg, lhs.wrapping_mul(rhs));
1089    }
1090
1091    #[inline]
1092    const fn execute_sub_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1093        let lhs = get_reg!(self, lhs_reg);
1094        let rhs = get_reg!(self, rhs_reg);
1095
1096        set_reg!(self, dst_reg, lhs.wrapping_sub(rhs));
1097    }
1098
1099    /// This is the mathematical modulo, *not* the remainder.
1100    /// Like how it is done in Lua and Python
1101    /// <https://en.wikipedia.org/wiki/Modulo#In_programming_languages>
1102    #[inline]
1103    fn execute_mod_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1104        let lhs = get_reg!(self, lhs_reg) as i32;
1105        let rhs = get_reg!(self, rhs_reg) as i32;
1106
1107        let result = ((lhs % rhs) + rhs) % rhs;
1108        set_reg!(self, dst_reg, result);
1109    }
1110
1111    #[inline]
1112    fn execute_div_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1113        let lhs = get_reg!(self, lhs_reg) as i32;
1114        let rhs = get_reg!(self, rhs_reg) as i32;
1115
1116        let result_option = lhs.checked_div(rhs);
1117
1118        match result_option {
1119            Some(result) => {
1120                set_reg!(self, dst_reg, result);
1121            }
1122            None => {
1123                panic!(
1124                    "VM Runtime Error: Signed 32-bit integer overflow during DIV_I32 (R{} = R{} - R{})",
1125                    dst_reg, lhs_reg, rhs_reg
1126                );
1127            }
1128        }
1129    }
1130
1131    #[inline]
1132    fn execute_lt_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1133        let lhs = get_reg!(self, lhs_reg) as i32;
1134        let rhs = get_reg!(self, rhs_reg) as i32;
1135        set_reg!(self, dest_bool_reg, lhs < rhs);
1136    }
1137
1138    #[inline]
1139    fn execute_le_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1140        let lhs = get_reg!(self, lhs_reg) as i32;
1141        let rhs = get_reg!(self, rhs_reg) as i32;
1142        set_reg!(self, dest_bool_reg, lhs <= rhs);
1143    }
1144
1145    #[inline]
1146    fn execute_gt_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1147        let lhs = get_reg!(self, lhs_reg) as i32;
1148        let rhs = get_reg!(self, rhs_reg) as i32;
1149        set_reg!(self, dest_bool_reg, lhs > rhs);
1150    }
1151
1152    #[inline]
1153    fn execute_ge_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1154        let lhs = get_reg!(self, lhs_reg) as i32;
1155        let rhs = get_reg!(self, rhs_reg) as i32;
1156
1157        set_reg!(self, dest_bool_reg, lhs >= rhs);
1158    }
1159
1160    #[inline]
1161    fn execute_ge_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1162        let lhs = get_reg!(self, lhs_reg);
1163        let rhs = get_reg!(self, rhs_reg);
1164
1165        set_reg!(self, dest_bool_reg, lhs >= rhs);
1166    }
1167
1168    #[inline]
1169    fn execute_lt_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1170        let lhs = get_reg!(self, lhs_reg);
1171        let rhs = get_reg!(self, rhs_reg);
1172
1173        set_reg!(self, dest_bool_reg, lhs < rhs);
1174    }
1175
1176    #[inline]
1177    fn execute_pseudo_random_i32(&mut self, dst_reg: u8, src_reg: u8) {
1178        let src = get_reg!(self, src_reg);
1179        set_reg!(self, dst_reg, squirrel_prng::squirrel_noise5(src, 0) as i32);
1180    }
1181
1182    #[inline]
1183    fn execute_i32_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1184        let val = get_reg!(self, val_reg) as i32;
1185
1186        self.create_string(dst_reg, &val.to_string());
1187    }
1188
1189    #[inline]
1190    fn execute_bool_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1191        let val = get_reg!(self, val_reg) != 0;
1192
1193        self.create_string(dst_reg, &val.to_string());
1194    }
1195
1196    const HEX_DIGITS: &'static [u8; 16] = b"0123456789ABCDEF";
1197
1198    #[inline]
1199    const fn byte_to_prefixed_hex(val: u8, dst: &mut [u8; 4]) {
1200        dst[0] = b'0';
1201        dst[1] = b'x';
1202        dst[2] = Self::HEX_DIGITS[(val >> 4) as usize];
1203        dst[3] = Self::HEX_DIGITS[(val & 0x0F) as usize];
1204    }
1205
1206    #[inline]
1207    fn execute_byte_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1208        let val = get_reg!(self, val_reg);
1209        debug_assert!(val <= 0xff, "byte out of range");
1210
1211        let mut buf = [0u8; 4];
1212        Self::byte_to_prefixed_hex(val as u8, &mut buf);
1213
1214        // Safety: we know buf is valid ASCII
1215        let s = unsafe { std::str::from_utf8_unchecked(&buf) };
1216
1217        self.create_string(dst_reg, s);
1218    }
1219
1220    #[inline]
1221    fn execute_char_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1222        let char_raw = get_reg!(self, val_reg);
1223        let char = std::char::from_u32(char_raw).unwrap();
1224        self.create_string(dst_reg, &char.to_string());
1225    }
1226
1227    #[inline]
1228    fn execute_i32_to_f32(&mut self, float_dest_reg: u8, int_source_reg: u8) {
1229        let int_source = get_reg!(self, int_source_reg) as i32;
1230        set_reg!(self, float_dest_reg, Fp::from(int_source as i16).inner());
1231    }
1232
1233    #[inline]
1234    fn execute_abs_i32(&mut self, dst_reg: u8, val_reg: u8) {
1235        let val = get_reg!(self, val_reg) as i32;
1236        set_reg!(self, dst_reg, if val < 0 { -val } else { val });
1237    }
1238
1239    #[inline]
1240    fn execute_min_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1241        let lhs = get_reg!(self, lhs_reg) as i32;
1242        let rhs = get_reg!(self, rhs_reg) as i32;
1243
1244        set_reg!(self, dst_reg, if lhs < rhs { lhs } else { rhs });
1245    }
1246
1247    #[inline]
1248    fn execute_max_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1249        let lhs = get_reg!(self, lhs_reg) as i32;
1250        let rhs = get_reg!(self, rhs_reg) as i32;
1251
1252        set_reg!(self, dst_reg, if lhs > rhs { lhs } else { rhs });
1253    }
1254
1255    #[inline]
1256    fn execute_clamp_i32(&mut self, dst_reg: u8, val_reg: u8, min_reg: u8, max_reg: u8) {
1257        let val = get_reg!(self, val_reg) as i32;
1258        let min_val = get_reg!(self, min_reg) as i32;
1259        let max_val = get_reg!(self, max_reg) as i32;
1260
1261        set_reg!(
1262            self,
1263            dst_reg,
1264            if val < min_val {
1265                min_val
1266            } else if val > max_val {
1267                max_val
1268            } else {
1269                val
1270            }
1271        );
1272    }
1273
1274    // Sort of the same as `sub`
1275    #[inline]
1276    fn execute_cmp_reg(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1277        set_reg!(
1278            self,
1279            dest_bool_reg,
1280            self.registers[lhs_reg as usize] == self.registers[rhs_reg as usize]
1281        );
1282    }
1283
1284    // Sort of the same as `sub`
1285    #[inline]
1286    fn execute_eq_8_imm(&mut self, dest_bool_reg: u8, val_reg: u8, octet: u8) {
1287        let compare = get_reg!(self, val_reg);
1288        set_reg!(self, dest_bool_reg, compare == octet as u32);
1289    }
1290
1291    #[inline]
1292    fn execute_trap_on_less_than(&mut self, a_reg: u8, b_reg: u8) {
1293        let a = get_reg!(self, a_reg);
1294        let b = get_reg!(self, b_reg);
1295        if a < b {
1296            self.internal_trap(TrapCode::LessThanTrap { a: a, b: b })
1297        }
1298    }
1299
1300    #[inline]
1301    fn execute_move_equal_to_zero(&mut self, dst_reg: u8, src_reg: u8) {
1302        set_reg!(self, dst_reg, get_reg!(self, src_reg) == 0);
1303    }
1304
1305    #[inline]
1306    const fn execute_branch_if_false(
1307        &mut self,
1308        test_reg: u8,
1309        branch_offset_0: u8,
1310        branch_offset_1: u8,
1311    ) {
1312        if get_reg!(self, test_reg) == 0 {
1313            self.pc =
1314                (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1315        }
1316    }
1317
1318    #[inline]
1319    const fn execute_branch_if_true(
1320        &mut self,
1321        test_reg: u8,
1322        branch_offset_0: u8,
1323        branch_offset_1: u8,
1324    ) {
1325        if get_reg!(self, test_reg) != 0 {
1326            self.pc =
1327                (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1328        }
1329    }
1330
1331    #[inline]
1332    fn execute_b(&mut self, branch_offset_0: u8, branch_offset_1: u8) {
1333        self.pc =
1334            (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1335    }
1336
1337    #[inline]
1338    fn execute_hlt(&mut self) {
1339        self.execution_complete = true;
1340        #[cfg(feature = "debug_vm")]
1341        if self.debug_opcodes_enabled {
1342            self.debug_output();
1343        }
1344    }
1345
1346    #[inline]
1347    fn execute_user_halt(&mut self) {
1348        self.execution_complete = true;
1349        self.state = VmState::Halt;
1350        #[cfg(feature = "debug_vm")]
1351        if self.debug_opcodes_enabled {
1352            self.debug_output();
1353        }
1354    }
1355
1356    #[inline]
1357    fn execute_step(&mut self) {
1358        self.execution_complete = true;
1359        self.state = VmState::Step;
1360        #[cfg(feature = "debug_vm")]
1361        if self.debug_opcodes_enabled {
1362            self.debug_output();
1363        }
1364    }
1365
1366    #[inline]
1367    fn execute_trap(&mut self, trap_code: u8) {
1368        self.internal_trap(TrapCode::try_from(trap_code).unwrap());
1369    }
1370
1371    pub fn internal_trap(&mut self, trap_code: TrapCode) {
1372        self.execution_complete = true;
1373
1374        #[cfg(feature = "debug_vm")]
1375        if self.debug_operations_enabled {
1376            eprintln!("vm trap: '{trap_code}'");
1377        }
1378        self.state = VmState::Trap(trap_code);
1379
1380        #[cfg(feature = "debug_vm")]
1381        if self.debug_opcodes_enabled {
1382            self.debug_output();
1383        }
1384    }
1385
1386    #[inline]
1387    fn execute_panic(&mut self, panic_reason_reg: u8) {
1388        self.execution_complete = true;
1389
1390        #[cfg(feature = "debug_vm")]
1391        if self.debug_opcodes_enabled {
1392            self.debug_output();
1393        }
1394
1395        let heap_addr = get_reg!(self, panic_reason_reg);
1396        let str = self.read_string(heap_addr, &self.memory);
1397
1398        #[cfg(feature = "debug_vm")]
1399        if self.debug_stats_enabled {
1400            eprintln!("panic: {str}");
1401        }
1402
1403        self.state = VmState::Panic(str.to_string());
1404    }
1405
1406    fn debug_output(&self) {
1407        eprintln!(
1408            "total opcodes executed: {}, call_stack_depth: {}, max_call_depth:{}",
1409            self.debug.opcodes_executed, self.debug.call_depth, self.debug.max_call_depth
1410        );
1411    }
1412
1413    #[inline]
1414    fn execute_mov_reg(&mut self, dst_reg: u8, src_reg: u8) {
1415        self.registers[dst_reg as usize] = self.registers[src_reg as usize];
1416    }
1417
1418    #[inline]
1419    fn execute_st_regs_to_frame(
1420        &mut self,
1421        frame_offset_0: u8,
1422        frame_offset_1: u8,
1423        frame_offset_2: u8,
1424        frame_offset_3: u8,
1425        start_reg: u8,
1426        count: u8,
1427    ) {
1428        let frame_offset = u32_from_u8s!(
1429            frame_offset_0,
1430            frame_offset_1,
1431            frame_offset_2,
1432            frame_offset_3
1433        );
1434        let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1435        let target_ptr = self.memory.get_frame_ptr_as_u32(frame_offset);
1436        unsafe {
1437            ptr::copy_nonoverlapping(const_reg_ptr, target_ptr, count as usize);
1438        }
1439    }
1440
1441    #[inline]
1442    fn execute_st_regs_to_frame_using_mask(
1443        &mut self,
1444        frame_offset_0: u8,
1445        frame_offset_1: u8,
1446        frame_offset_2: u8,
1447        frame_offset_3: u8,
1448        reg_mask: u8,
1449    ) {
1450        let frame_offset = u32_from_u8s!(
1451            frame_offset_0,
1452            frame_offset_1,
1453            frame_offset_2,
1454            frame_offset_3
1455        );
1456
1457        let mut target_ptr = self.memory.get_frame_ptr_as_u32(frame_offset);
1458        let mut const_reg_ptr = &self.registers[0usize] as *const u32;
1459        let mut mask = reg_mask;
1460        for _ in 0..8 {
1461            if (mask & 0x1) != 0 {
1462                unsafe {
1463                    ptr::write(target_ptr, *const_reg_ptr);
1464                    target_ptr = target_ptr.add(1);
1465                }
1466            }
1467            mask >>= 1;
1468            unsafe {
1469                const_reg_ptr = const_reg_ptr.add(1);
1470            }
1471        }
1472    }
1473
1474    #[inline]
1475    fn execute_stw_using_base_ptr_and_offset(
1476        &mut self,
1477        base_ptr_reg: u8,
1478        offset_0: u8,
1479        offset_1: u8,
1480        offset_2: u8,
1481        offset_3: u8,
1482        src_reg: u8,
1483    ) {
1484        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1485        //let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1486        let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset) as *mut u32;
1487        let value_to_copy = get_reg!(self, src_reg);
1488
1489        unsafe {
1490            ptr::write(ptr_to_write_to, value_to_copy);
1491        }
1492    }
1493
1494    #[inline]
1495    fn execute_sth_using_base_ptr_and_offset(
1496        &mut self,
1497        base_ptr_reg: u8,
1498        offset_0: u8,
1499        offset_1: u8,
1500        offset_2: u8,
1501        offset_3: u8,
1502        src_reg: u8,
1503    ) {
1504        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1505        //let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1506        let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset) as *mut u16;
1507        let value_to_copy = get_reg!(self, src_reg) as u16;
1508
1509        unsafe {
1510            ptr::write(ptr_to_write_to, value_to_copy);
1511        }
1512    }
1513    #[inline]
1514    fn execute_stb_using_base_ptr_and_offset(
1515        &mut self,
1516        base_ptr_reg: u8,
1517        offset_0: u8,
1518        offset_1: u8,
1519        offset_2: u8,
1520        offset_3: u8,
1521        src_reg: u8,
1522    ) {
1523        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1524        //let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1525        let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset);
1526        let value_to_copy = get_reg!(self, src_reg) as u8;
1527
1528        unsafe {
1529            ptr::write(ptr_to_write_to, value_to_copy);
1530        }
1531    }
1532
1533    #[inline]
1534    pub fn execute_ldb_from_base_ptr_and_offset(
1535        &mut self,
1536        dst_reg: u8,
1537        base_ptr_reg: u8,
1538        offset_0: u8,
1539        offset_1: u8,
1540        offset_2: u8,
1541        offset_3: u8,
1542    ) {
1543        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1544        let ptr_to_read_from = self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset);
1545        unsafe {
1546            set_reg!(self, dst_reg, *ptr_to_read_from);
1547        }
1548    }
1549
1550    #[inline]
1551    pub fn execute_ldw_from_base_ptr_and_offset(
1552        &mut self,
1553        dst_reg: u8,
1554        base_ptr_reg: u8,
1555        offset_0: u8,
1556        offset_1: u8,
1557        offset_2: u8,
1558        offset_3: u8,
1559    ) {
1560        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1561        let ptr_to_read_from =
1562            self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset) as *const u32;
1563        unsafe {
1564            set_reg!(self, dst_reg, *ptr_to_read_from);
1565        }
1566    }
1567
1568    #[inline]
1569    fn execute_ldw_from_absolute_address(
1570        &mut self,
1571        dst_reg: u8,
1572        addr_0: u8,
1573        addr_1: u8,
1574        addr_2: u8,
1575        addr_3: u8,
1576    ) {
1577        let absolute_addr = u32_from_u8s!(addr_0, addr_1, addr_2, addr_3);
1578
1579        let ptr_to_read_from = self.memory.get_heap_const_ptr(absolute_addr as usize) as *const u32;
1580
1581        unsafe {
1582            set_reg!(self, dst_reg, *ptr_to_read_from);
1583        }
1584    }
1585
1586    #[inline]
1587    fn execute_ldb_from_absolute_address(
1588        &mut self,
1589        dst_reg: u8,
1590        addr_0: u8,
1591        addr_1: u8,
1592        addr_2: u8,
1593        addr_3: u8,
1594    ) {
1595        let absolute_addr = u32_from_u8s!(addr_0, addr_1, addr_2, addr_3);
1596
1597        let ptr_to_read_from = self.memory.get_heap_const_ptr(absolute_addr as usize);
1598
1599        unsafe {
1600            set_reg!(self, dst_reg, *ptr_to_read_from);
1601        }
1602    }
1603
1604    #[inline]
1605    pub fn execute_ldh_from_base_ptr_and_offset(
1606        &mut self,
1607        dst_reg: u8,
1608        base_ptr_reg: u8,
1609        offset_0: u8,
1610        offset_1: u8,
1611        offset_2: u8,
1612        offset_3: u8,
1613    ) {
1614        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1615        let ptr_to_read_from =
1616            self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset) as *const u16;
1617        unsafe {
1618            set_reg!(self, dst_reg, *ptr_to_read_from);
1619        }
1620    }
1621
1622    #[inline]
1623    pub fn execute_ld_regs_from_frame(
1624        &mut self,
1625        start_reg: u8,
1626        offset_0: u8,
1627        offset_1: u8,
1628        offset_2: u8,
1629        offset_3: u8,
1630        count: u8,
1631    ) {
1632        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1633        let target_reg_ptr = &mut self.registers[start_reg as usize] as *mut u32;
1634        let source_frame_start = self.memory.get_frame_const_ptr_as_u32(offset);
1635        unsafe {
1636            ptr::copy_nonoverlapping(source_frame_start, target_reg_ptr, count as usize);
1637        }
1638    }
1639
1640    #[inline]
1641    pub fn execute_ld_regs_from_frame_using_mask(
1642        &mut self,
1643        reg_mask: u8,
1644        offset_0: u8,
1645        offset_1: u8,
1646        offset_2: u8,
1647        offset_3: u8,
1648    ) {
1649        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1650        let mut target_reg_ptr = &mut self.registers[0usize] as *mut u32;
1651        let mut source_frame_start = self.memory.get_frame_const_ptr_as_u32(offset);
1652        let mut mask = reg_mask;
1653        for _ in 0..8 {
1654            if mask & 0x01 != 0 {
1655                unsafe {
1656                    ptr::write(target_reg_ptr, *source_frame_start);
1657                    source_frame_start = source_frame_start.add(1);
1658                }
1659            }
1660            mask >>= 1;
1661            unsafe {
1662                target_reg_ptr = target_reg_ptr.add(1);
1663            }
1664        }
1665    }
1666
1667    #[inline]
1668    fn execute_lea(&mut self, dst_reg: u8, offset_0: u8, offset_1: u8, offset_2: u8, offset_3: u8) {
1669        let current_fp_addr = self.memory.frame_offset as u32;
1670        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1671        set_reg!(self, dst_reg, current_fp_addr + offset);
1672    }
1673
1674    #[inline]
1675    pub fn execute_frame_memory_clear(
1676        &mut self,
1677        dst_pointer_0: u8,
1678        dst_pointer_1: u8,
1679        dst_pointer_2: u8,
1680        dst_pointer_3: u8,
1681        memory_size_0: u8,
1682        memory_size_1: u8,
1683        memory_size_2: u8,
1684        memory_size_3: u8,
1685    ) {
1686        let frame_offset =
1687            u32_from_u8s!(dst_pointer_0, dst_pointer_1, dst_pointer_2, dst_pointer_3);
1688        let total_bytes = u32_from_u8s!(memory_size_0, memory_size_1, memory_size_2, memory_size_3);
1689
1690        assert!(
1691            frame_offset + total_bytes < self.memory.memory_size as u32,
1692            "trying to overwrite memory!"
1693        );
1694        let dst_ptr = self.memory.get_frame_ptr(frame_offset);
1695
1696        unsafe {
1697            ptr::write_bytes(dst_ptr, 0, total_bytes as usize);
1698        }
1699    }
1700
1701    #[inline]
1702    fn execute_mov_mem_with_offsets(
1703        &mut self,
1704        dst_pointer_reg: u8,
1705        src_pointer_reg: u8,
1706        memory_size_reg: u8,
1707    ) {
1708        let dest_addr = get_reg!(self, dst_pointer_reg);
1709        let src_addr = get_reg!(self, src_pointer_reg);
1710        let memory_size = get_reg!(self, memory_size_reg);
1711
1712        // Check for overlapping memory regions
1713        let dest_end = dest_addr + memory_size;
1714        let src_end = src_addr + memory_size;
1715
1716        if (dest_addr < src_end && src_addr < dest_end) {
1717            return self.internal_trap(TrapCode::OverlappingMemoryCopy);
1718        }
1719
1720        #[cfg(feature = "debug_vm")]
1721        if self.debug_operations_enabled {
1722            eprintln!(
1723                "{:04X}> BLKCPY Size={:08X} \n  \
1724                DST_ADDR=0x{:08X}\n  \
1725                SRC_ADDR=0x{:08X}",
1726                self.pc - 1,
1727                memory_size,
1728                dest_addr,
1729                src_addr,
1730            );
1731        }
1732
1733        let dst_ptr = self.memory.get_heap_ptr(dest_addr as usize);
1734        let src_ptr = self.memory.get_heap_const_ptr(src_addr as usize);
1735
1736        unsafe {
1737            ptr::copy_nonoverlapping(src_ptr, dst_ptr, memory_size as usize);
1738        }
1739    }
1740
1741    #[inline]
1742    fn execute_mov_mem_with_offsets_with_variable_size(
1743        &mut self,
1744        dst_pointer_reg: u8,
1745        src_pointer_reg: u8,
1746        memory_size_reg: u8,
1747    ) {
1748        let dest_addr = get_reg!(self, dst_pointer_reg);
1749        let src_addr = get_reg!(self, src_pointer_reg);
1750        let memory_size = get_reg!(self, memory_size_reg);
1751
1752        // Check for overlapping memory regions
1753        let dest_end = dest_addr + memory_size;
1754        let src_end = src_addr + memory_size;
1755
1756        if (dest_addr < src_end && src_addr < dest_end) {
1757            return self.internal_trap(TrapCode::OverlappingMemoryCopy);
1758        }
1759
1760        #[cfg(feature = "debug_vm")]
1761        if self.debug_operations_enabled {
1762            eprintln!(
1763                "{:04X}> BLKCPY Size={:08X} \n  \
1764                DST_ADDR=0x{:08X}\n  \
1765                SRC_ADDR=0x{:08X}",
1766                self.pc - 1,
1767                memory_size,
1768                dest_addr,
1769                src_addr,
1770            );
1771        }
1772
1773        let dst_ptr = self.memory.get_heap_ptr(dest_addr as usize);
1774        let src_ptr = self.memory.get_heap_const_ptr(src_addr as usize);
1775
1776        unsafe {
1777            ptr::copy_nonoverlapping(src_ptr, dst_ptr, memory_size as usize);
1778        }
1779    }
1780
1781    #[inline]
1782    fn execute_mov_mem_with_immediate_size(
1783        &mut self,
1784        dst_pointer_reg: u8,
1785        src_pointer_reg: u8,
1786        memory_size_0: u8,
1787        memory_size_1: u8,
1788        memory_size_2: u8,
1789        memory_size_3: u8,
1790    ) {
1791        let dest_addr = get_reg!(self, dst_pointer_reg);
1792        let src_addr = get_reg!(self, src_pointer_reg);
1793        let memory_size = u32_from_u8s!(memory_size_0, memory_size_1, memory_size_2, memory_size_3);
1794        assert!(
1795            src_addr + memory_size < self.memory.memory_size as u32,
1796            "trying to overwrite memory"
1797        );
1798
1799        // Check for overlapping memory regions
1800        let dest_end = dest_addr + memory_size;
1801        let src_end = src_addr + memory_size;
1802
1803        if (dest_addr < src_end && src_addr < dest_end) {
1804            return self.internal_trap(TrapCode::OverlappingMemoryCopy);
1805        }
1806
1807        #[cfg(feature = "debug_vm")]
1808        if self.debug_operations_enabled {
1809            eprintln!(
1810                "{:04X}> BLKCPY Size={:08X} \n  \
1811                DST_ADDR=0x{:08X}\n  \
1812                SRC_ADDR=0x{:08X}",
1813                self.pc - 1,
1814                memory_size,
1815                dest_addr,
1816                src_addr,
1817            );
1818        }
1819
1820        let dst_ptr = self.memory.get_heap_ptr(dest_addr as usize);
1821        let src_ptr = self.memory.get_heap_const_ptr(src_addr as usize);
1822
1823        unsafe {
1824            ptr::copy_nonoverlapping(src_ptr, dst_ptr, memory_size as usize);
1825        }
1826    }
1827
1828    #[inline]
1829    fn execute_cmp_block(
1830        &mut self,
1831        dest_bool_reg: u8,
1832        src_addr_reg_a: u8,
1833        src_addr_reg_b: u8,
1834        size_lower: u8,
1835        size_upper: u8,
1836    ) {
1837        let size = u16_from_u8s!(size_lower, size_upper) as usize;
1838
1839        let arc_addr_a = get_reg!(self, src_addr_reg_a);
1840        let src_addr_b = get_reg!(self, src_addr_reg_b);
1841
1842        let src_ptr_a = self.memory.get_heap_const_ptr(arc_addr_a as usize);
1843        let src_ptr_b = self.memory.get_heap_const_ptr(src_addr_b as usize);
1844
1845        unsafe {
1846            let slice_a = std::slice::from_raw_parts(src_ptr_a, size);
1847            let slice_b = std::slice::from_raw_parts(src_ptr_b, size);
1848
1849            set_reg!(self, dest_bool_reg, slice_a == slice_b);
1850        }
1851    }
1852
1853    #[cfg(feature = "debug_vm")]
1854    pub fn debug_opcode(&self, opcode: u8, operands: &[u8; 8]) {
1855        eprintln!(
1856            "{:8} {}",
1857            OpCode::from(opcode),
1858            match self.handlers[opcode as usize] {
1859                HandlerType::Args0(_) => String::new(),
1860                HandlerType::Args1(_) => format!("{:04X}", operands[0]),
1861                HandlerType::Args2(_) => format!("{:04X}, {:04X}", operands[0], operands[1]),
1862                HandlerType::Args3(_) => format!(
1863                    "{:04X}, {:04X}, {:04X}",
1864                    operands[0], operands[1], operands[2]
1865                ),
1866                HandlerType::Args4(_) => format!(
1867                    "{:04X}, {:04X}, {:04X}, {:04X}",
1868                    operands[0], operands[1], operands[2], operands[3]
1869                ),
1870                HandlerType::Args5(_) => format!(
1871                    "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1872                    operands[0], operands[1], operands[2], operands[3], operands[4],
1873                ),
1874                HandlerType::Args6(_) => format!(
1875                    "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1876                    operands[0], operands[1], operands[2], operands[3], operands[4], operands[5],
1877                ),
1878                HandlerType::Args7(_) => format!(
1879                    "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1880                    operands[0],
1881                    operands[1],
1882                    operands[2],
1883                    operands[3],
1884                    operands[4],
1885                    operands[5],
1886                    operands[6],
1887                ),
1888                HandlerType::Args8(_) => format!(
1889                    "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1890                    operands[0],
1891                    operands[1],
1892                    operands[2],
1893                    operands[3],
1894                    operands[4],
1895                    operands[5],
1896                    operands[6],
1897                    operands[7],
1898                ),
1899            }
1900        );
1901    }
1902
1903    fn execute_call(
1904        &mut self,
1905        absolute_pc_a: u8,
1906        absolute_pc_b: u8,
1907        absolute_pc_c: u8,
1908        absolute_pc_d: u8,
1909    ) {
1910        let absolute_pc = u32_from_u8s!(absolute_pc_a, absolute_pc_b, absolute_pc_c, absolute_pc_d);
1911        let return_info = CallFrame {
1912            return_address: self.pc + 1,
1913            previous_frame_offset: self.memory.frame_offset,
1914            previous_stack_offset: self.memory.stack_offset,
1915        };
1916
1917        //self.memory.set_fp(); // we do not modify fp
1918        self.call_stack.push(return_info);
1919        self.pc = absolute_pc as usize;
1920
1921        #[cfg(feature = "debug_vm")]
1922        if self.debug_stats_enabled {
1923            self.debug.call_depth += 1;
1924            if self.debug.call_depth > self.debug.max_call_depth {
1925                self.debug.max_call_depth = self.debug.call_depth;
1926            }
1927        }
1928    }
1929
1930    #[inline]
1931    fn execute_host_call(
1932        &mut self,
1933        function_id_lower: u8,
1934        function_id_upper: u8,
1935        register_count: u8,
1936        mut callback: &mut dyn HostFunctionCallback,
1937    ) {
1938        let heap = self.memory();
1939
1940        let function_id = u8s_to_u16!(function_id_lower, function_id_upper);
1941
1942        unsafe {
1943            let host_args = HostArgs::new(
1944                function_id,
1945                heap.memory,
1946                heap.memory_size,
1947                heap.stack_offset,
1948                self.registers.as_mut_ptr(),
1949                register_count as usize + 1,
1950            );
1951
1952            callback.dispatch_host_call(host_args);
1953        }
1954    }
1955
1956    #[allow(clippy::missing_const_for_fn)]
1957    #[inline(always)]
1958    fn execute_enter(
1959        &mut self,
1960        frame_size_0: u8,
1961        frame_size_1: u8,
1962        frame_size_2: u8,
1963        frame_size_3: u8,
1964    ) {
1965        let frame_size = u32_from_u8s!(frame_size_0, frame_size_1, frame_size_2, frame_size_3);
1966        self.memory.set_fp_from_sp(); // set the frame pointer to what sp is now
1967        self.memory.inc_sp(frame_size as usize);
1968    }
1969
1970    #[inline]
1971    fn execute_ret(&mut self) {
1972        let call_frame = self.call_stack.pop().unwrap();
1973
1974        self.memory.pop(
1975            call_frame.previous_frame_offset,
1976            call_frame.previous_stack_offset,
1977        );
1978
1979        // going back to the old instruction
1980        self.pc = call_frame.return_address;
1981        self.pc -= 1; // Adjust for automatic increment
1982
1983        // NOTE: Any return value is always at frame_offset + 0
1984
1985        #[cfg(feature = "debug_vm")]
1986        if self.debug_stats_enabled {
1987            self.debug.call_depth -= 1;
1988        }
1989    }
1990
1991    #[inline]
1992    const fn u8s_to_32(a: u8, b: u8, c: u8, d: u8) -> u32 {
1993        u32::from_le_bytes([a, b, c, d])
1994    }
1995
1996    #[inline]
1997    pub fn get_const_ptr_from_reg(&self, reg: u8) -> *const u8 {
1998        let ptr_addr = get_reg!(self, reg);
1999        self.memory.get_heap_const_ptr(ptr_addr as usize)
2000    }
2001
2002    #[inline]
2003    pub fn get_const_ptr_from_reg_with_offset(&self, reg: u8, offset: u32) -> *const u8 {
2004        let ptr_addr = get_reg!(self, reg) + offset;
2005        self.memory.get_heap_const_ptr(ptr_addr as usize)
2006    }
2007
2008    #[inline]
2009    pub fn get_ptr_from_reg(&self, reg: u8) -> *mut u8 {
2010        let ptr_addr = get_reg!(self, reg);
2011        self.memory.get_heap_ptr(ptr_addr as usize)
2012    }
2013
2014    #[inline]
2015    pub fn get_ptr_and_addr_from_reg(&self, reg: u8) -> (*mut u8, u32) {
2016        let ptr_addr = get_reg!(self, reg);
2017        (self.memory.get_heap_ptr(ptr_addr as usize), ptr_addr)
2018    }
2019
2020    #[inline]
2021    pub fn get_ptr_from_reg_with_offset(&self, reg: u8, offset: u32) -> *mut u8 {
2022        let ptr_addr = get_reg!(self, reg) + offset;
2023        self.memory.get_heap_ptr(ptr_addr as usize)
2024    }
2025}