swamp_vm/
lib.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5extern crate core;
6
7use crate::VmState::Normal;
8use crate::host::{HostArgs, HostFunctionCallback};
9use crate::memory::ExecutionMode::NormalExecution;
10use crate::memory::{Memory, MemoryDebug};
11use fixed32::Fp;
12use std::error::Error;
13use std::fmt::{Display, Formatter};
14use std::mem::discriminant;
15use std::ptr;
16use std::str::FromStr;
17use swamp_vm_isa::opcode::OpCode;
18use swamp_vm_isa::{BinaryInstruction, InstructionPosition};
19
20mod grid;
21pub mod host;
22pub mod map_open;
23pub mod memory;
24pub mod prelude;
25mod range;
26mod sparse;
27mod string;
28mod vec;
29
30#[macro_export]
31macro_rules! u8s_to_u16 {
32    ($lsb:expr, $msb:expr) => {
33        // Cast bytes to u16 before shifting to prevent overflow and ensure correct bit manipulation.
34        // The most significant byte ($msb) is shifted left by 8 bits.
35        // The least significant byte ($lsb) remains in the lower 8 bits.
36        // The results are combined using a bitwise OR.
37        (($msb as u16) << 8) | ($lsb as u16)
38    };
39}
40
41#[macro_export]
42macro_rules! i16_from_u8s {
43    ($lsb:expr, $msb:expr) => {
44        // Cast bytes to u16 before shifting to prevent overflow and ensure correct bit manipulation.
45        // The most significant byte ($msb) is shifted left by 8 bits.
46        // The least significant byte ($lsb) remains in the lower 8 bits.
47        // The results are combined using a bitwise OR.
48        ((($msb as u16) << 8) | ($lsb as u16)) as i16
49    };
50}
51
52#[macro_export]
53macro_rules! u32_from_u8s {
54    ($lsb:expr, $msb:expr, $msb2:expr, $msb3:expr) => {
55        (($msb3 as u32) << 24) | (($msb2 as u32) << 16) | (($msb as u32) << 8) | ($lsb as u32)
56    };
57}
58
59#[macro_export]
60macro_rules! u16_from_u8s {
61    ($lsb:expr, $msb:expr) => {
62        (($msb as u16) << 8) | ($lsb as u16)
63    };
64}
65
66#[macro_export]
67macro_rules! get_reg {
68    ($vm:expr, $reg_idx:expr) => {
69        $vm.registers[$reg_idx as usize]
70    };
71}
72
73#[macro_export]
74macro_rules! set_reg {
75    // Stores a value into a register, converting it to u32
76    // $vm:expr is the VM state (e.g., `&mut self`)
77    // $reg_idx:expr is the destination register index (e.g., `dst_reg`)
78    // $value:expr is the value to store (must be convertible to u32)
79    ($vm:expr, $reg_idx:expr, $value:expr) => {
80        // Use `as u32` to convert the value to the register's storage type
81        $vm.registers[$reg_idx as usize] = $value as u32
82    };
83}
84
85type Handler0 = fn(&mut Vm);
86type Handler1 = fn(&mut Vm, u8);
87type Handler2 = fn(&mut Vm, u8, u8);
88type Handler3 = fn(&mut Vm, u8, u8, u8);
89type Handler4 = fn(&mut Vm, u8, u8, u8, u8);
90type Handler5 = fn(&mut Vm, u8, u8, u8, u8, u8);
91type Handler6 = fn(&mut Vm, u8, u8, u8, u8, u8, u8);
92type Handler7 = fn(&mut Vm, u8, u8, u8, u8, u8, u8, u8);
93type Handler8 = fn(&mut Vm, u8, u8, u8, u8, u8, u8, u8, u8);
94
95#[derive(Copy, Clone)]
96enum HandlerType {
97    Args0(Handler0),
98    Args1(Handler1),
99    Args2(Handler2),
100    Args3(Handler3),
101    Args4(Handler4),
102    Args5(Handler5),
103    Args6(Handler6),
104    Args7(Handler7),
105    Args8(Handler8),
106}
107
108#[derive(Debug, Default)]
109pub struct Debug {
110    pub opcodes_executed: usize,
111    pub call_depth: usize,
112    pub max_call_depth: usize,
113    pub max_stack_offset: usize,
114}
115
116pub struct CallFrame {
117    pub return_address: usize,        // Instruction to return to
118    pub previous_frame_offset: usize, // Previous frame position
119    pub previous_stack_offset: usize, // Size of this frame
120}
121
122type RegContents = u32;
123
124#[derive(Clone, Copy, Eq, PartialEq, Debug)]
125pub enum TrapCode {
126    StoppedByTestHarness,
127    VecBoundsFail {
128        encountered: usize,
129        element_count: usize,
130    },
131    MapOutOfSpace,
132    MapEntryNotFound,
133    MapEntryNotFoundAndCouldNotBeCreated,
134    MapEntryNotFoundForRemoval,
135    LessThanTrap {
136        a: u32,
137        b: u32,
138    },
139    SparseOutOfSpace,
140    SparseRemoveFailed,
141    SparseGetFailed,
142    MapCouldNotBeCopied,
143    OverlappingMemoryCopy,
144    MemoryCorruption,
145    VecOutOfCapacity {
146        encountered: u16,
147        capacity: u16,
148    },
149    VecEmpty,
150    VecNeverInitialized,
151    GridBoundsXFail {
152        x: u32,
153        width: u16,
154    },
155    GridBoundsYFail {
156        y: u32,
157        height: u16,
158    },
159    GridBoundsFail,
160    InvalidUtf8Sequence,
161    UnalignedAccess,
162    ReverseRangeNotAllowedHere,
163    U8CheckFailed,
164    Misaligned,
165}
166
167impl TrapCode {
168    pub fn is_sort_of_equal(&self, other: &Self) -> bool {
169        discriminant(self) == discriminant(other)
170    }
171}
172
173impl TryFrom<u8> for TrapCode {
174    type Error = ();
175
176    fn try_from(value: u8) -> Result<Self, Self::Error> {
177        let code = match value {
178            0 => Self::StoppedByTestHarness,
179            1 => Self::VecBoundsFail {
180                encountered: 0,
181                element_count: 0,
182            }, // TODO: Fix this
183            2 => Self::MapOutOfSpace,
184            3 => Self::MapEntryNotFound,
185            4 => Self::MapEntryNotFoundAndCouldNotBeCreated,
186            5 => Self::MapEntryNotFoundForRemoval,
187            6 => Self::LessThanTrap { a: 0, b: 0 },
188            7 => Self::SparseOutOfSpace,
189            8 => Self::SparseRemoveFailed,
190            9 => Self::SparseGetFailed,
191            10 => Self::MapCouldNotBeCopied,
192            11 => Self::OverlappingMemoryCopy,
193            _ => return Err(()),
194        };
195        Ok(code)
196    }
197}
198
199#[derive(Debug, PartialEq, Eq)]
200pub struct ParseTrapCodeError;
201
202impl Display for ParseTrapCodeError {
203    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
204        write!(f, "Unable to parse string into a valid TrapCode")
205    }
206}
207impl Error for ParseTrapCodeError {}
208impl FromStr for TrapCode {
209    type Err = ParseTrapCodeError;
210
211    fn from_str(s: &str) -> Result<Self, Self::Err> {
212        let code = match s {
213            "stopped_by_test_harness" => Self::StoppedByTestHarness,
214            "vec_bounds_fail" => Self::VecBoundsFail {
215                encountered: 0,
216                element_count: 0,
217            }, // TODO: FIX
218            "vec_out_of_capacity" => Self::VecOutOfCapacity {
219                encountered: 0,
220                capacity: 0,
221            }, // TODO: FIX
222            "reverse_range_not_allowed_here" => Self::ReverseRangeNotAllowedHere,
223            "map_out_of_space" => Self::MapOutOfSpace,
224            "map_entry_not_found" => Self::MapEntryNotFound,
225            "map_entry_or_create_failed" => Self::MapEntryNotFoundAndCouldNotBeCreated,
226            "map_entry_remove_failed" => Self::MapEntryNotFoundForRemoval,
227            "less_than_trap" => Self::LessThanTrap { a: 0, b: 0 },
228            "sparse_out_of_space" => Self::SparseOutOfSpace,
229            "sparse_remove_failed" => Self::SparseRemoveFailed,
230            "sparse_get_failed" => Self::SparseGetFailed,
231            "map_could_not_be_copied" => Self::MapCouldNotBeCopied,
232            "overlapping_memory_copy" => Self::OverlappingMemoryCopy,
233            _ => return Err(ParseTrapCodeError),
234        };
235
236        Ok(code)
237    }
238}
239
240impl Display for TrapCode {
241    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
242        write!(f, "trap {self:?}")
243    }
244}
245
246#[derive(Eq, Debug, PartialEq)]
247pub enum VmState {
248    Normal,
249    Panic(String),
250    Trap(TrapCode),
251    Halt,
252    Step,
253}
254
255pub struct Vm {
256    // Memory
257    memory: Memory,
258
259    // Execution state
260    pc: usize,                            // Instruction pointer
261    instructions: Vec<BinaryInstruction>, // Bytecode
262    execution_complete: bool,             // Flag for completion
263
264    // Function call management
265    call_stack: Vec<CallFrame>, // Track function calls
266
267    handlers: [HandlerType; 256],
268
269    pub registers: [u32; 256], // Normal CPUs have around 31 general purpose registers
270
271    // TODO: Error state
272    pub debug: Debug,
273    pub debug_stats_enabled: bool,
274    pub debug_opcodes_enabled: bool,
275    pub debug_operations_enabled: bool,
276    pub state: VmState,
277}
278
279impl Vm {
280    #[must_use]
281    pub const fn is_execution_complete(&self) -> bool {
282        self.execution_complete
283    }
284}
285
286const ALIGNMENT: usize = 8;
287const ALIGNMENT_REST: usize = ALIGNMENT - 1;
288const ALIGNMENT_MASK: usize = !ALIGNMENT_REST;
289
290pub struct VmSetup {
291    pub stack_memory_size: usize,
292    pub heap_memory_size: usize,
293    pub constant_memory: Vec<u8>,
294    pub debug_stats_enabled: bool,
295    pub debug_opcodes_enabled: bool,
296    pub debug_operations_enabled: bool,
297}
298
299impl Vm {
300    #[allow(clippy::too_many_lines)]
301    pub fn new(instructions: Vec<BinaryInstruction>, setup: VmSetup) -> Self {
302        let memory = Memory::new(
303            &setup.constant_memory,
304            setup.stack_memory_size,
305            setup.heap_memory_size,
306        );
307
308        assert!(
309            setup.constant_memory.len() < setup.stack_memory_size / 2,
310            "too much constant memory"
311        );
312
313        let mut vm = Self {
314            memory, // Raw memory pointer
315            pc: 0,
316            instructions,
317            execution_complete: false,
318            call_stack: vec![],
319            handlers: [const { HandlerType::Args0(Self::execute_unimplemented) }; 256],
320            registers: [const { 0 }; 256],
321            debug: Debug {
322                opcodes_executed: 0,
323                call_depth: 0,
324                max_call_depth: 0,
325                max_stack_offset: 0,
326            },
327            debug_stats_enabled: setup.debug_stats_enabled,
328            debug_opcodes_enabled: setup.debug_opcodes_enabled,
329            debug_operations_enabled: setup.debug_operations_enabled,
330            state: Normal,
331        };
332
333        /*
334            TODO: @idea: Instead of storing function pointers, the instructions vector itself
335            includes the pointer to the instruction's handler code.
336
337            type HandlerPointer = fn(&mut Vm, &FixedSizeOperandBlock);
338            type FixedSizeOperandBlock = [u8; 8];
339
340            BinaryInstruction has a field for HandlerPointer.
341        */
342
343        //vm.handlers[OpCode::Alloc as usize] = HandlerType::Args3(Self::execute_alloc);
344
345        // Store
346        vm.handlers[OpCode::StRegToFrame as usize] =
347            HandlerType::Args6(Self::execute_st_regs_to_frame);
348        vm.handlers[OpCode::StRegToFrameUsingMask as usize] =
349            HandlerType::Args5(Self::execute_st_regs_to_frame_using_mask);
350
351        vm.handlers[OpCode::St32UsingPtrWithOffset as usize] =
352            HandlerType::Args6(Self::execute_stw_using_base_ptr_and_offset);
353        vm.handlers[OpCode::St16UsingPtrWithOffset as usize] =
354            HandlerType::Args6(Self::execute_sth_using_base_ptr_and_offset);
355        vm.handlers[OpCode::St8UsingPtrWithOffset as usize] =
356            HandlerType::Args6(Self::execute_stb_using_base_ptr_and_offset);
357
358        // Load
359        vm.handlers[OpCode::LdRegFromFrameRange as usize] =
360            HandlerType::Args6(Self::execute_ld_regs_from_frame);
361        vm.handlers[OpCode::LdRegFromFrameUsingMask as usize] =
362            HandlerType::Args5(Self::execute_ld_regs_from_frame_using_mask);
363
364        vm.handlers[OpCode::Ld32FromPointerWithOffset as usize] =
365            HandlerType::Args6(Self::execute_ldw_from_base_ptr_and_offset);
366        vm.handlers[OpCode::Ld16FromPointerWithOffset as usize] =
367            HandlerType::Args6(Self::execute_ldh_from_base_ptr_and_offset);
368        vm.handlers[OpCode::Ld8FromPointerWithOffset as usize] =
369            HandlerType::Args6(Self::execute_ldb_from_base_ptr_and_offset);
370
371        // Load immediate
372        vm.handlers[OpCode::Mov8FromImmediateValue as usize] =
373            HandlerType::Args2(Self::execute_mov_8);
374        vm.handlers[OpCode::Mov16FromImmediateValue as usize] =
375            HandlerType::Args3(Self::execute_mov_16);
376        vm.handlers[OpCode::Mov32FromImmediateValue as usize] =
377            HandlerType::Args5(Self::execute_mov_32);
378
379        // Copy data in frame memory
380        vm.handlers[OpCode::MovReg as usize] = HandlerType::Args2(Self::execute_mov_reg);
381        vm.handlers[OpCode::LdPtrFromEffectiveFrameAddress as usize] =
382            HandlerType::Args5(Self::execute_lea);
383
384        vm.handlers[OpCode::Ld32FromAbsoluteAddress as usize] =
385            HandlerType::Args5(Self::execute_ldw_from_absolute_address);
386
387        vm.handlers[OpCode::Ld8FromAbsoluteAddress as usize] =
388            HandlerType::Args5(Self::execute_ldb_from_absolute_address);
389
390        // Copy to and from heap
391        vm.handlers[OpCode::BlockCopy as usize] =
392            HandlerType::Args6(Self::execute_mov_mem_with_immediate_size);
393
394        vm.handlers[OpCode::FrameMemClr as usize] =
395            HandlerType::Args8(Self::execute_frame_memory_clear);
396
397        // Comparisons - Int
398        vm.handlers[OpCode::LtI32 as usize] = HandlerType::Args3(Self::execute_lt_i32);
399        vm.handlers[OpCode::LeI32 as usize] = HandlerType::Args3(Self::execute_le_i32);
400        vm.handlers[OpCode::GtI32 as usize] = HandlerType::Args3(Self::execute_gt_i32);
401        vm.handlers[OpCode::GeI32 as usize] = HandlerType::Args3(Self::execute_ge_i32);
402
403        // Comparison u32
404        vm.handlers[OpCode::LtU32 as usize] = HandlerType::Args3(Self::execute_lt_u32);
405        vm.handlers[OpCode::LeU32 as usize] = HandlerType::Args3(Self::execute_le_u32);
406        vm.handlers[OpCode::GtU32 as usize] = HandlerType::Args3(Self::execute_gt_u32);
407        vm.handlers[OpCode::GeU32 as usize] = HandlerType::Args3(Self::execute_ge_u32);
408
409        // Comparison
410        vm.handlers[OpCode::CmpReg as usize] = HandlerType::Args3(Self::execute_cmp_reg);
411        vm.handlers[OpCode::CmpBlock as usize] = HandlerType::Args5(Self::execute_cmp_block);
412
413        vm.handlers[OpCode::Eq8Imm as usize] = HandlerType::Args3(Self::execute_eq_8_imm);
414        vm.handlers[OpCode::TrapOnLessThan as usize] =
415            HandlerType::Args2(Self::execute_trap_on_less_than);
416
417        // Logical Operations
418        vm.handlers[OpCode::MovEqualToZero as usize] =
419            HandlerType::Args2(Self::execute_move_equal_to_zero);
420
421        // Conditional jumps
422        vm.handlers[OpCode::BFalse as usize] = HandlerType::Args3(Self::execute_branch_if_false);
423        vm.handlers[OpCode::BTrue as usize] = HandlerType::Args3(Self::execute_branch_if_true);
424
425        // Unconditional jump
426        vm.handlers[OpCode::B as usize] = HandlerType::Args2(Self::execute_b);
427
428        // Operators - Int
429        vm.handlers[OpCode::AddU32 as usize] = HandlerType::Args3(Self::execute_add_u32);
430        vm.handlers[OpCode::AddU32Imm as usize] = HandlerType::Args6(Self::execute_add_u32_imm);
431        vm.handlers[OpCode::MulU32 as usize] = HandlerType::Args3(Self::execute_mul_u32);
432        vm.handlers[OpCode::SubU32 as usize] = HandlerType::Args3(Self::execute_sub_u32);
433
434        vm.handlers[OpCode::NegI32 as usize] = HandlerType::Args2(Self::execute_neg_i32);
435        vm.handlers[OpCode::ModI32 as usize] = HandlerType::Args3(Self::execute_mod_i32);
436        vm.handlers[OpCode::DivI32 as usize] = HandlerType::Args3(Self::execute_div_i32);
437
438        // Operators - Float (Fixed Point)
439        vm.handlers[OpCode::DivF32 as usize] = HandlerType::Args3(Self::execute_div_f32);
440        vm.handlers[OpCode::MulF32 as usize] = HandlerType::Args3(Self::execute_mul_f32);
441
442        // Call, enter, ret
443        vm.handlers[OpCode::Call as usize] = HandlerType::Args4(Self::execute_call);
444        vm.handlers[OpCode::Enter as usize] = HandlerType::Args4(Self::execute_enter);
445        vm.handlers[OpCode::Ret as usize] = HandlerType::Args0(Self::execute_ret);
446
447        //vm.handlers[OpCode::HostCall as usize] = HandlerType::Args3(Self::execute_host_call);
448
449        // Halt - return to host
450        vm.handlers[OpCode::Hlt as usize] = HandlerType::Args0(Self::execute_hlt);
451        vm.handlers[OpCode::UserHalt as usize] = HandlerType::Args0(Self::execute_user_halt);
452        vm.handlers[OpCode::Step as usize] = HandlerType::Args0(Self::execute_step);
453        vm.handlers[OpCode::Trap as usize] = HandlerType::Args1(Self::execute_trap);
454        vm.handlers[OpCode::Panic as usize] = HandlerType::Args1(Self::execute_panic);
455
456        vm.handlers[OpCode::CheckU8 as usize] = HandlerType::Args1(Self::execute_check_u8);
457
458        // Codepoint
459        vm.handlers[OpCode::CodepointToString as usize] =
460            HandlerType::Args2(Self::execute_codepoint_to_string);
461
462        // Bool
463        vm.handlers[OpCode::BoolToString as usize] =
464            HandlerType::Args2(Self::execute_bool_to_string);
465
466        // String
467        vm.handlers[OpCode::StringAppend as usize] =
468            HandlerType::Args3(Self::execute_string_append);
469
470        vm.handlers[OpCode::StringDuplicate as usize] =
471            HandlerType::Args2(Self::execute_string_duplicate);
472        vm.handlers[OpCode::BytesToString as usize] =
473            HandlerType::Args2(Self::execute_string_from_bytes);
474        vm.handlers[OpCode::BytesToStringStorage as usize] =
475            HandlerType::Args2(Self::execute_string_storage_from_bytes);
476
477        vm.handlers[OpCode::StringRepeat as usize] =
478            HandlerType::Args3(Self::execute_string_repeat);
479
480        vm.handlers[OpCode::StringCmp as usize] = HandlerType::Args3(Self::execute_string_cmp);
481        vm.handlers[OpCode::StringToString as usize] =
482            HandlerType::Args2(Self::execute_string_to_string);
483        vm.handlers[OpCode::StringStartsWith as usize] =
484            HandlerType::Args3(Self::execute_string_starts_with);
485        vm.handlers[OpCode::StringToInt as usize] = HandlerType::Args2(Self::execute_string_to_int);
486        vm.handlers[OpCode::StringToFloat as usize] =
487            HandlerType::Args2(Self::execute_string_to_float);
488
489        vm.handlers[OpCode::StringIterInit as usize] =
490            HandlerType::Args2(Self::execute_string_iter_init);
491        vm.handlers[OpCode::StringIterNext as usize] =
492            HandlerType::Args4(Self::execute_string_iter_next);
493        vm.handlers[OpCode::StringIterNextPair as usize] =
494            HandlerType::Args5(Self::execute_string_iter_next_pair);
495
496        vm.handlers[OpCode::ByteToString as usize] =
497            HandlerType::Args2(Self::execute_byte_to_string);
498
499        // Int
500        vm.handlers[OpCode::IntToRnd as usize] =
501            HandlerType::Args2(Self::execute_pseudo_random_i32);
502        vm.handlers[OpCode::IntMin as usize] = HandlerType::Args3(Self::execute_min_i32);
503        vm.handlers[OpCode::IntMax as usize] = HandlerType::Args3(Self::execute_max_i32);
504        vm.handlers[OpCode::IntClamp as usize] = HandlerType::Args4(Self::execute_clamp_i32);
505
506        vm.handlers[OpCode::IntAbs as usize] = HandlerType::Args2(Self::execute_abs_i32);
507
508        vm.handlers[OpCode::IntToString as usize] = HandlerType::Args2(Self::execute_i32_to_string);
509        vm.handlers[OpCode::IntToFloat as usize] = HandlerType::Args2(Self::execute_i32_to_f32);
510
511        // Float (Fixed Point)
512        vm.handlers[OpCode::FloatPseudoRandom as usize] =
513            HandlerType::Args2(Self::execute_pseudo_random_i32);
514        vm.handlers[OpCode::FloatMin as usize] = HandlerType::Args3(Self::execute_min_i32);
515        vm.handlers[OpCode::FloatMax as usize] = HandlerType::Args3(Self::execute_max_i32);
516        vm.handlers[OpCode::FloatClamp as usize] = HandlerType::Args4(Self::execute_clamp_i32);
517
518        vm.handlers[OpCode::FloatRound as usize] = HandlerType::Args2(Self::execute_f32_round);
519        vm.handlers[OpCode::FloatFloor as usize] = HandlerType::Args2(Self::execute_f32_floor);
520        vm.handlers[OpCode::FloatSqrt as usize] = HandlerType::Args2(Self::execute_f32_sqrt);
521        vm.handlers[OpCode::FloatSign as usize] = HandlerType::Args2(Self::execute_f32_sign);
522        vm.handlers[OpCode::FloatAbs as usize] = HandlerType::Args2(Self::execute_abs_i32);
523        vm.handlers[OpCode::FloatSin as usize] = HandlerType::Args2(Self::execute_f32_sin);
524        vm.handlers[OpCode::FloatCos as usize] = HandlerType::Args2(Self::execute_f32_cos);
525        vm.handlers[OpCode::FloatAsin as usize] = HandlerType::Args2(Self::execute_f32_asin);
526        vm.handlers[OpCode::FloatAcos as usize] = HandlerType::Args2(Self::execute_f32_acos);
527        // vm.handlers[OpCode::FloatAtan2 as usize] = HandlerType::Args3(Self::execute_f32_atan2); // TODO:
528        vm.handlers[OpCode::FloatToString as usize] =
529            HandlerType::Args2(Self::execute_f32_to_string);
530        vm.handlers[OpCode::FloatPseudoRandom as usize] =
531            HandlerType::Args2(Self::execute_pseudo_random_i32);
532
533        // Collections ==========
534
535        // Range
536        vm.handlers[OpCode::RangeInit as usize] = HandlerType::Args4(Self::execute_range_init);
537        vm.handlers[OpCode::RangeIterInit as usize] =
538            HandlerType::Args2(Self::execute_range_iter_init);
539        vm.handlers[OpCode::RangeIterNext as usize] =
540            HandlerType::Args4(Self::execute_range_iter_next);
541
542        // Array
543        vm.handlers[OpCode::ArrayInitWithLenAndCapacity as usize] =
544            HandlerType::Args7(Self::execute_array_init);
545
546        // Vec
547        vm.handlers[OpCode::VecInit as usize] = HandlerType::Args7(Self::execute_vec_init);
548        vm.handlers[OpCode::VecCopy as usize] = HandlerType::Args2(Self::execute_vec_copy);
549        vm.handlers[OpCode::VecCopyRange as usize] =
550            HandlerType::Args3(Self::execute_vec_copy_range);
551        vm.handlers[OpCode::VecCmp as usize] = HandlerType::Args3(Self::execute_vec_cmp);
552        vm.handlers[OpCode::VecIterInit as usize] = HandlerType::Args2(Self::execute_vec_iter_init);
553        vm.handlers[OpCode::VecIterNext as usize] = HandlerType::Args4(Self::execute_vec_iter_next);
554        vm.handlers[OpCode::VecIterNextPair as usize] =
555            HandlerType::Args5(Self::execute_vec_iter_next_pair);
556        vm.handlers[OpCode::VecPushAddr as usize] = HandlerType::Args2(Self::execute_vec_push_addr);
557        vm.handlers[OpCode::VecExtend as usize] = HandlerType::Args2(Self::execute_vec_extend);
558        vm.handlers[OpCode::VecGet as usize] = HandlerType::Args3(Self::execute_vec_get);
559        vm.handlers[OpCode::VecPop as usize] = HandlerType::Args2(Self::execute_vec_pop);
560        vm.handlers[OpCode::VecRemoveIndex as usize] =
561            HandlerType::Args2(Self::execute_vec_remove_index);
562
563        // Map
564        vm.handlers[OpCode::MapInitWithCapacityAndKeyAndTupleSizeAddr as usize] =
565            HandlerType::Args7(Self::execute_map_open_addressing_init);
566        vm.handlers[OpCode::MapIterInit as usize] = HandlerType::Args2(Self::execute_map_iter_init);
567        vm.handlers[OpCode::MapIterNext as usize] = HandlerType::Args4(Self::execute_map_iter_next);
568        vm.handlers[OpCode::MapIterNextPair as usize] =
569            HandlerType::Args5(Self::execute_map_iter_next_pair);
570        vm.handlers[OpCode::MapGetEntryLocation as usize] =
571            HandlerType::Args3(Self::execute_map_open_addressing_get_entry_location);
572        vm.handlers[OpCode::MapGetOrReserveEntryLocation as usize] =
573            HandlerType::Args3(Self::execute_map_open_addressing_get_or_reserve_entry);
574        vm.handlers[OpCode::MapHas as usize] =
575            HandlerType::Args3(Self::execute_map_open_addressing_has);
576        vm.handlers[OpCode::MapRemove as usize] =
577            HandlerType::Args2(Self::execute_map_open_addressing_remove);
578        vm.handlers[OpCode::MapOverwrite as usize] =
579            HandlerType::Args2(Self::execute_map_overwrite);
580
581        // Sparse
582        vm.handlers[OpCode::SparseInit as usize] = HandlerType::Args7(Self::execute_sparse_init);
583        vm.handlers[OpCode::SparseAddGiveEntryAddress as usize] =
584            HandlerType::Args7(Self::execute_sparse_add_get_entry_addr);
585        vm.handlers[OpCode::SparseRemove as usize] =
586            HandlerType::Args2(Self::execute_sparse_remove);
587        vm.handlers[OpCode::SparseGetEntryAddr as usize] =
588            HandlerType::Args5(Self::execute_sparse_get_entry_addr);
589        vm.handlers[OpCode::SparseIsAlive as usize] =
590            HandlerType::Args3(Self::execute_sparse_is_alive);
591
592        vm.handlers[OpCode::SparseIterInit as usize] =
593            HandlerType::Args2(Self::execute_sparse_iter_init);
594        vm.handlers[OpCode::SparseIterNext as usize] =
595            HandlerType::Args4(Self::execute_sparse_iter_next);
596        vm.handlers[OpCode::SparseIterNextPair as usize] =
597            HandlerType::Args5(Self::execute_sparse_iter_next_pair);
598
599        vm.handlers[OpCode::GridInit as usize] = HandlerType::Args6(Self::execute_grid_init);
600        vm.handlers[OpCode::GridGetEntryAddr as usize] =
601            HandlerType::Args6(Self::execute_grid_get_entry_addr);
602
603        vm
604    }
605    #[must_use]
606    pub const fn memory(&self) -> &Memory {
607        &self.memory
608    }
609
610    pub fn memory_mut(&mut self) -> &mut Memory {
611        &mut self.memory
612    }
613
614    pub fn step(&mut self, host_function_callback: &mut dyn HostFunctionCallback) -> bool {
615        let instruction = &self.instructions[self.pc];
616        let opcode = instruction.opcode;
617
618        if self.memory.execution_mode == NormalExecution {
619            assert!(self.memory.stack_offset >= self.memory.constant_memory_size);
620            assert!(self.memory.stack_offset <= self.memory.heap_start);
621        }
622
623        self.pc += 1; // IP must be added BEFORE handling the instruction
624
625        if opcode == OpCode::HostCall as u8 {
626            self.execute_host_call(
627                instruction.operands[0],
628                instruction.operands[1],
629                instruction.operands[2],
630                host_function_callback,
631            );
632        } else {
633            match self.handlers[opcode as usize] {
634                HandlerType::Args0(handler) => handler(self),
635                HandlerType::Args1(handler) => handler(self, instruction.operands[0]),
636                HandlerType::Args2(handler) => {
637                    handler(self, instruction.operands[0], instruction.operands[1]);
638                }
639                HandlerType::Args3(handler) => handler(
640                    self,
641                    instruction.operands[0],
642                    instruction.operands[1],
643                    instruction.operands[2],
644                ),
645                HandlerType::Args4(handler) => handler(
646                    self,
647                    instruction.operands[0],
648                    instruction.operands[1],
649                    instruction.operands[2],
650                    instruction.operands[3],
651                ),
652                HandlerType::Args5(handler) => handler(
653                    self,
654                    instruction.operands[0],
655                    instruction.operands[1],
656                    instruction.operands[2],
657                    instruction.operands[3],
658                    instruction.operands[4],
659                ),
660                HandlerType::Args6(handler) => handler(
661                    self,
662                    instruction.operands[0],
663                    instruction.operands[1],
664                    instruction.operands[2],
665                    instruction.operands[3],
666                    instruction.operands[4],
667                    instruction.operands[5],
668                ),
669                HandlerType::Args7(handler) => handler(
670                    self,
671                    instruction.operands[0],
672                    instruction.operands[1],
673                    instruction.operands[2],
674                    instruction.operands[3],
675                    instruction.operands[4],
676                    instruction.operands[5],
677                    instruction.operands[6],
678                ),
679                HandlerType::Args8(handler) => handler(
680                    self,
681                    instruction.operands[0],
682                    instruction.operands[1],
683                    instruction.operands[2],
684                    instruction.operands[3],
685                    instruction.operands[4],
686                    instruction.operands[5],
687                    instruction.operands[6],
688                    instruction.operands[7],
689                ),
690            }
691        }
692
693        !self.execution_complete
694    }
695
696    //  Optimization ideas:
697    // ```
698    //    use swamp_vm_types::BinaryInstruction;
699    //
700    //    type Handler = fn(&mut VM, &BinaryInstruction, *mut u32);
701    //
702    //    pub fn run(&mut self) {
703    //         let handlers = unsafe { self.decoded_handlers.get_unchecked(..) };
704    //         let instrs   = unsafe { self.instructions.get_unchecked(..) };
705    //         let regs_ptr = self.regs.as_mut_ptr();
706    //
707    //         while !self.execution_complete {
708    //             let pc = self.pc;
709    //             self.pc += 1;
710    //
711    //             let h: Handler = unsafe { *handlers.get_unchecked(pc) };
712    //
713    //             let inst: &BinaryInstruction = unsafe { &*instrs.as_ptr().add(pc) };
714    //
715    //             h(self, inst, regs_ptr);
716    //         }
717    //     }
718    //```
719    //
720    #[allow(clippy::too_many_lines)]
721    pub fn execute_internal(&mut self, host_function_callback: &mut dyn HostFunctionCallback) {
722        self.execution_complete = false;
723
724        let inst_ptr = self.instructions.as_ptr();
725
726        while !self.execution_complete {
727            let instruction: &BinaryInstruction = unsafe { &*inst_ptr.add(self.pc) };
728            let opcode = instruction.opcode;
729
730            #[cfg(feature = "debug_vm")]
731            if self.debug_opcodes_enabled {
732                let regs = [0, 1, 2, 3, 4, 128, 129, 130];
733
734                for reg in regs {
735                    print!(
736                        "{}",
737                        tinter::bright_black(&format!("{reg:02X}: {:08X}, ", self.registers[reg]))
738                    );
739                }
740                println!();
741
742                let operands = instruction.operands;
743                print!("> {:04X}: ", self.pc);
744                self.debug_opcode(opcode, &operands);
745            }
746
747            #[cfg(feature = "debug_vm")]
748            if self.debug_stats_enabled {
749                self.debug.opcodes_executed += 1;
750            }
751
752            self.pc += 1; // IP must be added BEFORE handling the instruction
753
754            if opcode == OpCode::HostCall as u8 {
755                self.execute_host_call(
756                    instruction.operands[0],
757                    instruction.operands[1],
758                    instruction.operands[2],
759                    host_function_callback,
760                );
761            } else {
762                match self.handlers[opcode as usize] {
763                    HandlerType::Args0(handler) => handler(self),
764                    HandlerType::Args1(handler) => handler(self, instruction.operands[0]),
765                    HandlerType::Args2(handler) => {
766                        handler(self, instruction.operands[0], instruction.operands[1]);
767                    }
768                    HandlerType::Args3(handler) => handler(
769                        self,
770                        instruction.operands[0],
771                        instruction.operands[1],
772                        instruction.operands[2],
773                    ),
774                    HandlerType::Args4(handler) => handler(
775                        self,
776                        instruction.operands[0],
777                        instruction.operands[1],
778                        instruction.operands[2],
779                        instruction.operands[3],
780                    ),
781                    HandlerType::Args5(handler) => handler(
782                        self,
783                        instruction.operands[0],
784                        instruction.operands[1],
785                        instruction.operands[2],
786                        instruction.operands[3],
787                        instruction.operands[4],
788                    ),
789                    HandlerType::Args6(handler) => handler(
790                        self,
791                        instruction.operands[0],
792                        instruction.operands[1],
793                        instruction.operands[2],
794                        instruction.operands[3],
795                        instruction.operands[4],
796                        instruction.operands[5],
797                    ),
798                    HandlerType::Args7(handler) => handler(
799                        self,
800                        instruction.operands[0],
801                        instruction.operands[1],
802                        instruction.operands[2],
803                        instruction.operands[3],
804                        instruction.operands[4],
805                        instruction.operands[5],
806                        instruction.operands[6],
807                    ),
808                    HandlerType::Args8(handler) => handler(
809                        self,
810                        instruction.operands[0],
811                        instruction.operands[1],
812                        instruction.operands[2],
813                        instruction.operands[3],
814                        instruction.operands[4],
815                        instruction.operands[5],
816                        instruction.operands[6],
817                        instruction.operands[7],
818                    ),
819                }
820            }
821        }
822    }
823
824    pub const fn set_return_register_address(&mut self, r0_addr: u32) {
825        set_reg!(self, 0, r0_addr);
826    }
827
828    pub fn set_register_pointer_addr_for_parameter(&mut self, register: u8, addr: u32) {
829        assert!((1..=6).contains(&register), "not a parameter register");
830        set_reg!(self, register, addr);
831    }
832
833    pub fn set_stack_start(&mut self, addr: usize) {
834        if self.debug_operations_enabled {
835            eprintln!("vm: set stack start and frame to: 0x{addr:08X}");
836        }
837        self.memory.set_stack_and_frame(addr);
838    }
839
840    pub fn resume(&mut self, host_function_callback: &mut dyn HostFunctionCallback) {
841        self.execute_internal(host_function_callback);
842    }
843
844    pub fn execute_from_ip(
845        &mut self,
846        ip: &InstructionPosition,
847        host_function_callback: &mut dyn HostFunctionCallback,
848    ) {
849        self.pc = ip.0 as usize;
850        if self.debug_operations_enabled {
851            eprintln!(
852                "starting up the vm, normal_stack_start: {:08X} SP:{:08X} FP:{:08X}",
853                self.memory.stack_start, self.memory.stack_offset, self.memory.frame_offset
854            );
855        }
856
857        self.call_stack.clear();
858        self.memory.reset_offset();
859
860        #[cfg(feature = "debug_vm")]
861        if self.debug_opcodes_enabled {
862            eprintln!(
863                "start executing --------- frame {:X} heap: {:X}",
864                self.memory.frame_offset, self.memory.heap_alloc_offset
865            );
866        }
867
868        self.execute_internal(host_function_callback);
869    }
870
871    pub const fn set_pc(&mut self, pc: &InstructionPosition) {
872        self.pc = pc.0 as usize;
873    }
874
875    pub const fn pc(&self) -> usize {
876        self.pc
877    }
878
879    pub fn fp(&self) -> usize {
880        self.memory.frame_offset
881    }
882
883    pub fn sp(&self) -> usize {
884        self.memory.stack_offset
885    }
886
887    pub fn call_stack(&self) -> &[CallFrame] {
888        &self.call_stack
889    }
890
891    fn execute_unimplemented(&mut self) {
892        let unknown_opcode = OpCode::from(self.instructions[self.pc - 1].opcode);
893        eprintln!("error: opcode not implemented: {unknown_opcode} {unknown_opcode:?}");
894        eprintln!("VM runtime halted.");
895        self.debug_output();
896        panic!("unknown OPCODE! {unknown_opcode} {unknown_opcode:?}");
897    }
898
899    pub fn frame_memory(&self) -> &[u8] {
900        unsafe { std::slice::from_raw_parts(self.memory.frame_ptr(), self.memory.memory_size) }
901    }
902
903    pub fn heap_memory(&self) -> &[u8] {
904        unsafe { std::slice::from_raw_parts(self.memory.get_heap_ptr(0), self.memory.memory_size) }
905    }
906
907    pub fn constant_memory(&self) -> &[u8] {
908        unsafe {
909            std::slice::from_raw_parts(
910                self.memory.get_heap_ptr(0),
911                self.memory.constant_memory_size,
912            )
913        }
914    }
915
916    pub fn all_memory_up_to(&self, offset: usize) -> &[u8] {
917        unsafe { std::slice::from_raw_parts(self.memory.get_heap_ptr(0), offset) }
918    }
919
920    pub fn constant_size(&self) -> usize {
921        self.memory.constant_memory_size
922    }
923    #[must_use]
924    #[allow(clippy::missing_const_for_fn)]
925    pub fn instructions(&self) -> &[BinaryInstruction] {
926        &self.instructions
927    }
928    pub fn reset(&mut self) {
929        self.memory.reset();
930        self.memory.reset_allocator();
931
932        self.pc = 0;
933        self.execution_complete = false;
934        self.call_stack.clear();
935    }
936
937    pub fn reset_heap_allocator(&mut self) {
938        self.memory.reset_allocator();
939        //self.pc = 0;
940    }
941
942    pub fn reset_minimal_stack_and_fp(&mut self) {
943        self.memory.reset_stack_and_fp();
944        self.reset_call_stack();
945        self.execution_complete = false;
946    }
947
948    pub fn reset_call_stack(&mut self) {
949        //self.memory.reset();
950        self.call_stack.clear();
951    }
952
953    pub fn reset_debug(&mut self) {
954        self.debug = Debug::default();
955        self.memory.debug = MemoryDebug {
956            max_heap_alloc_offset: 0,
957        }
958    }
959
960    #[must_use]
961    pub fn frame_offset(&self) -> usize {
962        self.memory.frame_offset
963    }
964
965    pub fn load_bytecode(&mut self, instructions: Vec<BinaryInstruction>) {
966        self.instructions = instructions;
967        self.pc = 0;
968        self.execution_complete = false;
969    }
970
971    #[inline]
972    fn execute_mov_32(&mut self, dst_reg: u8, a: u8, b: u8, c: u8, d: u8) {
973        set_reg!(self, dst_reg, Self::u8s_to_32(a, b, c, d));
974    }
975
976    #[inline]
977    fn execute_mov_16(&mut self, dst_reg: u8, a: u8, b: u8) {
978        set_reg!(self, dst_reg, u16_from_u8s!(a, b));
979    }
980
981    #[inline]
982    fn execute_mov_8(&mut self, dst_reg: u8, octet: u8) {
983        set_reg!(self, dst_reg, octet);
984    }
985
986    // Fixed Point special methods
987    #[inline]
988    fn execute_mul_f32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
989        let lhs = Fp::from_raw(get_reg!(self, lhs_reg) as i32);
990        let rhs = Fp::from_raw(get_reg!(self, rhs_reg) as i32);
991        set_reg!(self, dst_reg, (lhs * rhs).inner());
992    }
993
994    #[inline]
995    fn execute_div_f32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
996        let lhs = Fp::from_raw(get_reg!(self, lhs_reg) as i32);
997        let rhs = Fp::from_raw(get_reg!(self, rhs_reg) as i32);
998
999        set_reg!(self, dst_reg, (lhs / rhs).inner());
1000    }
1001
1002    #[inline]
1003    fn execute_f32_round(&mut self, dst_reg: u8, val_reg: u8) {
1004        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1005
1006        let int_val: i16 = val.round().into();
1007        set_reg!(self, dst_reg, int_val);
1008    }
1009
1010    #[inline]
1011    fn execute_f32_floor(&mut self, dst_reg: u8, val_reg: u8) {
1012        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1013
1014        let floored: i16 = val.floor().into();
1015        set_reg!(self, dst_reg, floored);
1016    }
1017
1018    #[inline]
1019    fn execute_f32_sqrt(&mut self, dst_reg: u8, val_reg: u8) {
1020        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1021
1022        set_reg!(self, dst_reg, val.sqrt().inner());
1023    }
1024
1025    #[inline]
1026    fn execute_f32_sin(&mut self, dst_reg: u8, val_reg: u8) {
1027        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1028
1029        set_reg!(self, dst_reg, val.sin().inner());
1030    }
1031
1032    #[inline]
1033    fn execute_f32_asin(&mut self, dst_reg: u8, val_reg: u8) {
1034        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1035
1036        set_reg!(self, dst_reg, val.asin().inner());
1037    }
1038
1039    #[inline]
1040    fn execute_f32_cos(&mut self, dst_reg: u8, val_reg: u8) {
1041        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1042
1043        set_reg!(self, dst_reg, val.cos().inner());
1044    }
1045
1046    #[inline]
1047    fn execute_f32_acos(&mut self, dst_reg: u8, val_reg: u8) {
1048        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1049
1050        set_reg!(self, dst_reg, val.acos().inner());
1051    }
1052
1053    /*
1054    #[inline]
1055    fn execute_f32_atan2(&mut self, dst_reg: u8, val_reg: u8, y_reg: u8) {
1056        // TODO: Implement atan2 in fixed32
1057        todo!()
1058    }
1059
1060     */
1061
1062    #[inline]
1063    fn execute_f32_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1064        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1065
1066        self.create_string(dst_reg, &val.to_string())
1067    }
1068
1069    #[inline]
1070    fn execute_f32_sign(&mut self, dst_reg: u8, val_reg: u8) {
1071        let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1072        // TODO: signum() is/was incorrect in Fixed32 crate
1073        set_reg!(
1074            self,
1075            dst_reg,
1076            Fp::from(if val < 0 {
1077                -1
1078            } else if val > 0 {
1079                1
1080            } else {
1081                0
1082            })
1083            .inner()
1084        );
1085    }
1086
1087    #[inline]
1088    fn execute_neg_i32(&mut self, dst_reg: u8, val_reg: u8) {
1089        let val = get_reg!(self, val_reg) as i32;
1090        set_reg!(self, dst_reg, -val);
1091    }
1092
1093    #[inline]
1094    const fn execute_add_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1095        let lhs = get_reg!(self, lhs_reg);
1096        let rhs = get_reg!(self, rhs_reg);
1097
1098        set_reg!(self, dst_reg, lhs.wrapping_add(rhs));
1099    }
1100
1101    #[inline]
1102    fn execute_add_u32_imm(
1103        &mut self,
1104        dst_reg: u8,
1105        lhs_reg: u8,
1106        rhs_1: u8,
1107        rhs_2: u8,
1108        rhs_3: u8,
1109        rhs_4: u8,
1110    ) {
1111        let lhs = get_reg!(self, lhs_reg);
1112        let rhs = u32_from_u8s!(rhs_1, rhs_2, rhs_3, rhs_4);
1113
1114        set_reg!(self, dst_reg, lhs.wrapping_add(rhs));
1115    }
1116
1117    #[inline]
1118    const fn execute_mul_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1119        let lhs = get_reg!(self, lhs_reg);
1120        let rhs = get_reg!(self, rhs_reg);
1121
1122        set_reg!(self, dst_reg, lhs.wrapping_mul(rhs));
1123    }
1124
1125    #[inline]
1126    const fn execute_sub_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1127        let lhs = get_reg!(self, lhs_reg);
1128        let rhs = get_reg!(self, rhs_reg);
1129
1130        set_reg!(self, dst_reg, lhs.wrapping_sub(rhs));
1131    }
1132
1133    /// This is the mathematical modulo, *not* the remainder.
1134    /// Like how it is done in Lua and Python
1135    /// <https://en.wikipedia.org/wiki/Modulo#In_programming_languages>
1136    #[inline]
1137    fn execute_mod_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1138        let lhs = get_reg!(self, lhs_reg) as i32;
1139        let rhs = get_reg!(self, rhs_reg) as i32;
1140
1141        let result = ((lhs % rhs) + rhs) % rhs;
1142        set_reg!(self, dst_reg, result);
1143    }
1144
1145    #[inline]
1146    fn execute_div_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1147        let lhs = get_reg!(self, lhs_reg) as i32;
1148        let rhs = get_reg!(self, rhs_reg) as i32;
1149
1150        let result_option = lhs.checked_div(rhs);
1151
1152        match result_option {
1153            Some(result) => {
1154                set_reg!(self, dst_reg, result);
1155            }
1156            None => {
1157                panic!(
1158                    "VM Runtime Error: Signed 32-bit integer overflow during DIV_I32 (R{dst_reg} = R{lhs_reg} - R{rhs_reg})"
1159                );
1160            }
1161        }
1162    }
1163
1164    #[inline]
1165    fn execute_lt_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1166        let lhs = get_reg!(self, lhs_reg) as i32;
1167        let rhs = get_reg!(self, rhs_reg) as i32;
1168        set_reg!(self, dest_bool_reg, lhs < rhs);
1169    }
1170
1171    #[inline]
1172    fn execute_le_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1173        let lhs = get_reg!(self, lhs_reg) as i32;
1174        let rhs = get_reg!(self, rhs_reg) as i32;
1175        set_reg!(self, dest_bool_reg, lhs <= rhs);
1176    }
1177
1178    #[inline]
1179    fn execute_gt_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1180        let lhs = get_reg!(self, lhs_reg) as i32;
1181        let rhs = get_reg!(self, rhs_reg) as i32;
1182        set_reg!(self, dest_bool_reg, lhs > rhs);
1183    }
1184
1185    #[inline]
1186    fn execute_ge_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1187        let lhs = get_reg!(self, lhs_reg) as i32;
1188        let rhs = get_reg!(self, rhs_reg) as i32;
1189
1190        set_reg!(self, dest_bool_reg, lhs >= rhs);
1191    }
1192
1193    #[inline]
1194    fn execute_ge_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1195        let lhs = get_reg!(self, lhs_reg);
1196        let rhs = get_reg!(self, rhs_reg);
1197
1198        set_reg!(self, dest_bool_reg, lhs >= rhs);
1199    }
1200
1201    #[inline]
1202    fn execute_lt_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1203        let lhs = get_reg!(self, lhs_reg);
1204        let rhs = get_reg!(self, rhs_reg);
1205
1206        set_reg!(self, dest_bool_reg, lhs < rhs);
1207    }
1208
1209    #[inline]
1210    fn execute_le_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1211        let lhs = get_reg!(self, lhs_reg);
1212        let rhs = get_reg!(self, rhs_reg);
1213
1214        set_reg!(self, dest_bool_reg, lhs <= rhs);
1215    }
1216
1217    #[inline]
1218    fn execute_gt_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1219        let lhs = get_reg!(self, lhs_reg);
1220        let rhs = get_reg!(self, rhs_reg);
1221
1222        set_reg!(self, dest_bool_reg, lhs > rhs);
1223    }
1224
1225    #[inline]
1226    fn execute_pseudo_random_i32(&mut self, dst_reg: u8, src_reg: u8) {
1227        let src = get_reg!(self, src_reg);
1228        set_reg!(self, dst_reg, squirrel_prng::squirrel_noise5(src, 0) as i32);
1229    }
1230
1231    #[inline]
1232    fn execute_i32_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1233        let val = get_reg!(self, val_reg) as i32;
1234
1235        self.create_string(dst_reg, &val.to_string());
1236
1237        #[cfg(feature = "debug_vm")]
1238        if self.debug_operations_enabled {
1239            let read_back_string = self.read_string(get_reg!(self, dst_reg), self.memory());
1240            eprintln!("i32_to_string: {val}, {dst_reg} '{read_back_string}'");
1241        }
1242    }
1243
1244    #[inline]
1245    fn execute_bool_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1246        let val = get_reg!(self, val_reg) != 0;
1247
1248        self.create_string(dst_reg, &val.to_string());
1249    }
1250
1251    const HEX_DIGITS: &'static [u8; 16] = b"0123456789ABCDEF";
1252
1253    #[inline]
1254    const fn byte_to_prefixed_hex(val: u8, dst: &mut [u8; 4]) {
1255        dst[0] = b'0';
1256        dst[1] = b'x';
1257        dst[2] = Self::HEX_DIGITS[(val >> 4) as usize];
1258        dst[3] = Self::HEX_DIGITS[(val & 0x0F) as usize];
1259    }
1260
1261    #[inline]
1262    fn execute_byte_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1263        let val = get_reg!(self, val_reg);
1264        debug_assert!(val <= 0xff, "byte out of range");
1265
1266        let mut buf = [0u8; 4];
1267        Self::byte_to_prefixed_hex(val as u8, &mut buf);
1268
1269        // Safety: we know buf is valid ASCII
1270        let s = unsafe { std::str::from_utf8_unchecked(&buf) };
1271
1272        self.create_string(dst_reg, s);
1273    }
1274
1275    #[inline]
1276    fn execute_codepoint_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1277        let char_raw = get_reg!(self, val_reg);
1278        let char = std::char::from_u32(char_raw).unwrap();
1279        self.create_string(dst_reg, &char.to_string());
1280    }
1281
1282    #[inline]
1283    fn execute_i32_to_f32(&mut self, float_dest_reg: u8, int_source_reg: u8) {
1284        let int_source = get_reg!(self, int_source_reg) as i32;
1285        set_reg!(self, float_dest_reg, Fp::from(int_source as i16).inner());
1286    }
1287
1288    #[inline]
1289    fn execute_abs_i32(&mut self, dst_reg: u8, val_reg: u8) {
1290        let val = get_reg!(self, val_reg) as i32;
1291        set_reg!(self, dst_reg, if val < 0 { -val } else { val });
1292    }
1293
1294    #[inline]
1295    fn execute_min_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1296        let lhs = get_reg!(self, lhs_reg) as i32;
1297        let rhs = get_reg!(self, rhs_reg) as i32;
1298
1299        set_reg!(self, dst_reg, if lhs < rhs { lhs } else { rhs });
1300    }
1301
1302    #[inline]
1303    fn execute_max_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1304        let lhs = get_reg!(self, lhs_reg) as i32;
1305        let rhs = get_reg!(self, rhs_reg) as i32;
1306
1307        set_reg!(self, dst_reg, if lhs > rhs { lhs } else { rhs });
1308    }
1309
1310    #[inline]
1311    fn execute_clamp_i32(&mut self, dst_reg: u8, val_reg: u8, min_reg: u8, max_reg: u8) {
1312        let val = get_reg!(self, val_reg) as i32;
1313        let min_val = get_reg!(self, min_reg) as i32;
1314        let max_val = get_reg!(self, max_reg) as i32;
1315
1316        set_reg!(
1317            self,
1318            dst_reg,
1319            if val < min_val {
1320                min_val
1321            } else if val > max_val {
1322                max_val
1323            } else {
1324                val
1325            }
1326        );
1327    }
1328
1329    // Sort of the same as `sub`
1330    #[inline]
1331    fn execute_cmp_reg(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1332        set_reg!(
1333            self,
1334            dest_bool_reg,
1335            self.registers[lhs_reg as usize] == self.registers[rhs_reg as usize]
1336        );
1337    }
1338
1339    // Sort of the same as `sub`
1340    #[inline]
1341    fn execute_eq_8_imm(&mut self, dest_bool_reg: u8, val_reg: u8, octet: u8) {
1342        let compare = get_reg!(self, val_reg);
1343        set_reg!(self, dest_bool_reg, compare == octet as u32);
1344        #[cfg(feature = "debug_vm")]
1345        if self.debug_operations_enabled {
1346            eprintln!(
1347                "{compare} {octet} result: {}",
1348                get_reg!(self, dest_bool_reg)
1349            )
1350        }
1351    }
1352
1353    #[inline]
1354    fn execute_check_u8(&mut self, check_u8_reg: u8) {
1355        let compare = get_reg!(self, check_u8_reg);
1356        if compare > 0xff {
1357            self.internal_trap(TrapCode::U8CheckFailed);
1358        }
1359    }
1360
1361    #[inline]
1362    fn execute_trap_on_less_than(&mut self, a_reg: u8, b_reg: u8) {
1363        let a = get_reg!(self, a_reg);
1364        let b = get_reg!(self, b_reg);
1365        if a < b {
1366            self.internal_trap(TrapCode::LessThanTrap { a, b })
1367        }
1368    }
1369
1370    #[inline]
1371    fn execute_move_equal_to_zero(&mut self, dst_reg: u8, src_reg: u8) {
1372        set_reg!(self, dst_reg, get_reg!(self, src_reg) == 0);
1373    }
1374
1375    #[inline]
1376    const fn execute_branch_if_false(
1377        &mut self,
1378        test_reg: u8,
1379        branch_offset_0: u8,
1380        branch_offset_1: u8,
1381    ) {
1382        if get_reg!(self, test_reg) == 0 {
1383            self.pc =
1384                (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1385        }
1386    }
1387
1388    #[inline]
1389    const fn execute_branch_if_true(
1390        &mut self,
1391        test_reg: u8,
1392        branch_offset_0: u8,
1393        branch_offset_1: u8,
1394    ) {
1395        if get_reg!(self, test_reg) != 0 {
1396            self.pc =
1397                (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1398        }
1399    }
1400
1401    #[inline]
1402    fn execute_b(&mut self, branch_offset_0: u8, branch_offset_1: u8) {
1403        self.pc =
1404            (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1405    }
1406
1407    #[inline]
1408    fn execute_hlt(&mut self) {
1409        self.execution_complete = true;
1410        #[cfg(feature = "debug_vm")]
1411        if self.debug_opcodes_enabled {
1412            self.debug_output();
1413        }
1414    }
1415
1416    #[inline]
1417    fn execute_user_halt(&mut self) {
1418        self.execution_complete = true;
1419        self.state = VmState::Halt;
1420        #[cfg(feature = "debug_vm")]
1421        if self.debug_opcodes_enabled {
1422            self.debug_output();
1423        }
1424    }
1425
1426    #[inline]
1427    fn execute_step(&mut self) {
1428        self.execution_complete = true;
1429        self.state = VmState::Step;
1430        #[cfg(feature = "debug_vm")]
1431        if self.debug_opcodes_enabled {
1432            self.debug_output();
1433        }
1434    }
1435
1436    #[inline]
1437    fn execute_trap(&mut self, trap_code: u8) {
1438        self.internal_trap(TrapCode::try_from(trap_code).unwrap());
1439    }
1440
1441    pub fn internal_trap(&mut self, trap_code: TrapCode) {
1442        self.execution_complete = true;
1443
1444        #[cfg(feature = "debug_vm")]
1445        if self.debug_operations_enabled {
1446            eprintln!("vm trap: '{trap_code}'");
1447        }
1448        self.state = VmState::Trap(trap_code);
1449
1450        #[cfg(feature = "debug_vm")]
1451        if self.debug_opcodes_enabled {
1452            self.debug_output();
1453        }
1454    }
1455
1456    #[inline]
1457    fn execute_panic(&mut self, panic_reason_reg: u8) {
1458        self.execution_complete = true;
1459
1460        #[cfg(feature = "debug_vm")]
1461        if self.debug_opcodes_enabled {
1462            self.debug_output();
1463        }
1464
1465        let heap_addr = get_reg!(self, panic_reason_reg);
1466        let str = self.read_string(heap_addr, &self.memory);
1467
1468        #[cfg(feature = "debug_vm")]
1469        if self.debug_stats_enabled {
1470            eprintln!("panic: {str}");
1471        }
1472
1473        self.state = VmState::Panic(str.to_string());
1474    }
1475
1476    fn debug_output(&self) {
1477        eprintln!(
1478            "total opcodes executed: {}, call_stack_depth: {}, max_call_depth:{}",
1479            self.debug.opcodes_executed, self.debug.call_depth, self.debug.max_call_depth
1480        );
1481    }
1482
1483    #[inline]
1484    fn execute_mov_reg(&mut self, dst_reg: u8, src_reg: u8) {
1485        self.registers[dst_reg as usize] = self.registers[src_reg as usize];
1486    }
1487
1488    #[inline]
1489    fn execute_st_regs_to_frame(
1490        &mut self,
1491        frame_offset_0: u8,
1492        frame_offset_1: u8,
1493        frame_offset_2: u8,
1494        frame_offset_3: u8,
1495        start_reg: u8,
1496        count: u8,
1497    ) {
1498        let frame_offset = u32_from_u8s!(
1499            frame_offset_0,
1500            frame_offset_1,
1501            frame_offset_2,
1502            frame_offset_3
1503        );
1504        let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1505        let target_ptr = self.memory.get_frame_ptr_as_u32(frame_offset);
1506        unsafe {
1507            ptr::copy_nonoverlapping(const_reg_ptr, target_ptr, count as usize);
1508        }
1509    }
1510
1511    #[inline]
1512    fn execute_st_regs_to_frame_using_mask(
1513        &mut self,
1514        frame_offset_0: u8,
1515        frame_offset_1: u8,
1516        frame_offset_2: u8,
1517        frame_offset_3: u8,
1518        reg_mask: u8,
1519    ) {
1520        let frame_offset = u32_from_u8s!(
1521            frame_offset_0,
1522            frame_offset_1,
1523            frame_offset_2,
1524            frame_offset_3
1525        );
1526
1527        let mut target_ptr = self.memory.get_frame_ptr_as_u32(frame_offset);
1528        let mut const_reg_ptr = &self.registers[0usize] as *const u32;
1529        let mut mask = reg_mask;
1530        for _ in 0..8 {
1531            if (mask & 0x1) != 0 {
1532                unsafe {
1533                    ptr::write(target_ptr, *const_reg_ptr);
1534                    target_ptr = target_ptr.add(1);
1535                }
1536            }
1537            mask >>= 1;
1538            unsafe {
1539                const_reg_ptr = const_reg_ptr.add(1);
1540            }
1541        }
1542    }
1543
1544    #[inline]
1545    fn execute_stw_using_base_ptr_and_offset(
1546        &mut self,
1547        base_ptr_reg: u8,
1548        offset_0: u8,
1549        offset_1: u8,
1550        offset_2: u8,
1551        offset_3: u8,
1552        src_reg: u8,
1553    ) {
1554        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1555        //let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1556        let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset) as *mut u32;
1557        let value_to_copy = get_reg!(self, src_reg);
1558
1559        unsafe {
1560            ptr::write(ptr_to_write_to, value_to_copy);
1561        }
1562    }
1563
1564    #[inline]
1565    fn execute_sth_using_base_ptr_and_offset(
1566        &mut self,
1567        base_ptr_reg: u8,
1568        offset_0: u8,
1569        offset_1: u8,
1570        offset_2: u8,
1571        offset_3: u8,
1572        src_reg: u8,
1573    ) {
1574        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1575        //let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1576        let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset) as *mut u16;
1577        let value_to_copy = get_reg!(self, src_reg) as u16;
1578
1579        unsafe {
1580            ptr::write(ptr_to_write_to, value_to_copy);
1581        }
1582    }
1583    #[inline]
1584    fn execute_stb_using_base_ptr_and_offset(
1585        &mut self,
1586        base_ptr_reg: u8,
1587        offset_0: u8,
1588        offset_1: u8,
1589        offset_2: u8,
1590        offset_3: u8,
1591        src_reg: u8,
1592    ) {
1593        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1594        //let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1595        let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset);
1596        let value_to_copy = get_reg!(self, src_reg) as u8;
1597
1598        unsafe {
1599            ptr::write(ptr_to_write_to, value_to_copy);
1600        }
1601    }
1602
1603    #[inline]
1604    pub fn execute_ldb_from_base_ptr_and_offset(
1605        &mut self,
1606        dst_reg: u8,
1607        base_ptr_reg: u8,
1608        offset_0: u8,
1609        offset_1: u8,
1610        offset_2: u8,
1611        offset_3: u8,
1612    ) {
1613        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1614        let ptr_to_read_from = self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset);
1615        unsafe {
1616            set_reg!(self, dst_reg, *ptr_to_read_from);
1617        }
1618    }
1619
1620    #[inline]
1621    pub fn execute_ldw_from_base_ptr_and_offset(
1622        &mut self,
1623        dst_reg: u8,
1624        base_ptr_reg: u8,
1625        offset_0: u8,
1626        offset_1: u8,
1627        offset_2: u8,
1628        offset_3: u8,
1629    ) {
1630        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1631        let ptr_to_read_from =
1632            self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset) as *const u32;
1633
1634        // u32 must be 4-byte aligned
1635        let raw_ptr = self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset) as usize;
1636
1637        let base_addr = get_reg!(self, base_ptr_reg);
1638        const ALIGN: usize = std::mem::align_of::<u32>(); // == 4
1639        if (base_addr as usize) & (ALIGN - 1) != 0 {
1640            eprintln!("base_addr {base_addr} offset {offset}");
1641            self.internal_trap(TrapCode::Misaligned);
1642            return;
1643        }
1644
1645        if raw_ptr & (ALIGN - 1) != 0 {
1646            self.internal_trap(TrapCode::Misaligned);
1647            return;
1648        }
1649
1650        unsafe {
1651            set_reg!(self, dst_reg, *ptr_to_read_from);
1652        }
1653    }
1654
1655    #[inline]
1656    fn execute_ldw_from_absolute_address(
1657        &mut self,
1658        dst_reg: u8,
1659        addr_0: u8,
1660        addr_1: u8,
1661        addr_2: u8,
1662        addr_3: u8,
1663    ) {
1664        let absolute_addr = u32_from_u8s!(addr_0, addr_1, addr_2, addr_3);
1665
1666        let ptr_to_read_from = self.memory.get_heap_const_ptr(absolute_addr as usize) as *const u32;
1667
1668        unsafe {
1669            set_reg!(self, dst_reg, *ptr_to_read_from);
1670        }
1671    }
1672
1673    #[inline]
1674    fn execute_ldb_from_absolute_address(
1675        &mut self,
1676        dst_reg: u8,
1677        addr_0: u8,
1678        addr_1: u8,
1679        addr_2: u8,
1680        addr_3: u8,
1681    ) {
1682        let absolute_addr = u32_from_u8s!(addr_0, addr_1, addr_2, addr_3);
1683
1684        let ptr_to_read_from = self.memory.get_heap_const_ptr(absolute_addr as usize);
1685
1686        unsafe {
1687            set_reg!(self, dst_reg, *ptr_to_read_from);
1688        }
1689    }
1690
1691    #[inline]
1692    pub fn execute_ldh_from_base_ptr_and_offset(
1693        &mut self,
1694        dst_reg: u8,
1695        base_ptr_reg: u8,
1696        offset_0: u8,
1697        offset_1: u8,
1698        offset_2: u8,
1699        offset_3: u8,
1700    ) {
1701        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1702        let ptr_to_read_from =
1703            self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset) as *const u16;
1704        unsafe {
1705            set_reg!(self, dst_reg, *ptr_to_read_from);
1706        }
1707    }
1708
1709    #[inline]
1710    pub fn execute_ld_regs_from_frame(
1711        &mut self,
1712        start_reg: u8,
1713        offset_0: u8,
1714        offset_1: u8,
1715        offset_2: u8,
1716        offset_3: u8,
1717        count: u8,
1718    ) {
1719        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1720        let target_reg_ptr = &mut self.registers[start_reg as usize] as *mut u32;
1721        let source_frame_start = self.memory.get_frame_const_ptr_as_u32(offset);
1722        unsafe {
1723            ptr::copy_nonoverlapping(source_frame_start, target_reg_ptr, count as usize);
1724        }
1725    }
1726
1727    #[inline]
1728    pub fn execute_ld_regs_from_frame_using_mask(
1729        &mut self,
1730        reg_mask: u8,
1731        offset_0: u8,
1732        offset_1: u8,
1733        offset_2: u8,
1734        offset_3: u8,
1735    ) {
1736        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1737        let mut target_reg_ptr = &mut self.registers[0usize] as *mut u32;
1738        let mut source_frame_start = self.memory.get_frame_const_ptr_as_u32(offset);
1739        let mut mask = reg_mask;
1740        for _ in 0..8 {
1741            if mask & 0x01 != 0 {
1742                unsafe {
1743                    ptr::write(target_reg_ptr, *source_frame_start);
1744                    source_frame_start = source_frame_start.add(1);
1745                }
1746            }
1747            mask >>= 1;
1748            unsafe {
1749                target_reg_ptr = target_reg_ptr.add(1);
1750            }
1751        }
1752    }
1753
1754    #[inline]
1755    fn execute_lea(&mut self, dst_reg: u8, offset_0: u8, offset_1: u8, offset_2: u8, offset_3: u8) {
1756        let current_fp_addr = self.memory.frame_offset as u32;
1757        let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1758        set_reg!(self, dst_reg, current_fp_addr + offset);
1759    }
1760
1761    #[inline]
1762    pub fn execute_frame_memory_clear(
1763        &mut self,
1764        dst_pointer_0: u8,
1765        dst_pointer_1: u8,
1766        dst_pointer_2: u8,
1767        dst_pointer_3: u8,
1768        memory_size_0: u8,
1769        memory_size_1: u8,
1770        memory_size_2: u8,
1771        memory_size_3: u8,
1772    ) {
1773        let frame_offset =
1774            u32_from_u8s!(dst_pointer_0, dst_pointer_1, dst_pointer_2, dst_pointer_3);
1775        let total_bytes = u32_from_u8s!(memory_size_0, memory_size_1, memory_size_2, memory_size_3);
1776
1777        assert!(
1778            frame_offset + total_bytes < self.memory.memory_size as u32,
1779            "trying to overwrite memory!"
1780        );
1781        let dst_ptr = self.memory.get_frame_ptr(frame_offset);
1782
1783        unsafe {
1784            ptr::write_bytes(dst_ptr, 0, total_bytes as usize);
1785        }
1786    }
1787
1788    #[inline]
1789    fn execute_mov_mem_with_immediate_size(
1790        &mut self,
1791        dst_pointer_reg: u8,
1792        src_pointer_reg: u8,
1793        memory_size_0: u8,
1794        memory_size_1: u8,
1795        memory_size_2: u8,
1796        memory_size_3: u8,
1797    ) {
1798        let dest_addr = get_reg!(self, dst_pointer_reg);
1799        let src_addr = get_reg!(self, src_pointer_reg);
1800        let memory_size = u32_from_u8s!(memory_size_0, memory_size_1, memory_size_2, memory_size_3);
1801        assert!(
1802            src_addr + memory_size < self.memory.memory_size as u32,
1803            "trying to overwrite memory"
1804        );
1805
1806        // Check for overlapping memory regions
1807        let dest_end = dest_addr + memory_size;
1808        let src_end = src_addr + memory_size;
1809
1810        if dest_addr < src_end && src_addr < dest_end {
1811            return self.internal_trap(TrapCode::OverlappingMemoryCopy);
1812        }
1813
1814        #[cfg(feature = "debug_vm")]
1815        if self.debug_operations_enabled {
1816            eprintln!(
1817                "{:04X}> BLKCPY Size={:08X} \n  \
1818                DST_ADDR=0x{:08X}\n  \
1819                SRC_ADDR=0x{:08X}",
1820                self.pc - 1,
1821                memory_size,
1822                dest_addr,
1823                src_addr,
1824            );
1825        }
1826
1827        let dst_ptr = self.memory.get_heap_ptr(dest_addr as usize);
1828        let src_ptr = self.memory.get_heap_const_ptr(src_addr as usize);
1829
1830        unsafe {
1831            ptr::copy_nonoverlapping(src_ptr, dst_ptr, memory_size as usize);
1832        }
1833    }
1834
1835    #[inline]
1836    fn execute_cmp_block(
1837        &mut self,
1838        dest_bool_reg: u8,
1839        src_addr_reg_a: u8,
1840        src_addr_reg_b: u8,
1841        size_lower: u8,
1842        size_upper: u8,
1843    ) {
1844        let size = u16_from_u8s!(size_lower, size_upper) as usize;
1845
1846        let arc_addr_a = get_reg!(self, src_addr_reg_a);
1847        let src_addr_b = get_reg!(self, src_addr_reg_b);
1848
1849        let src_ptr_a = self.memory.get_heap_const_ptr(arc_addr_a as usize);
1850        let src_ptr_b = self.memory.get_heap_const_ptr(src_addr_b as usize);
1851
1852        unsafe {
1853            let slice_a = std::slice::from_raw_parts(src_ptr_a, size);
1854            let slice_b = std::slice::from_raw_parts(src_ptr_b, size);
1855
1856            set_reg!(self, dest_bool_reg, slice_a == slice_b);
1857        }
1858    }
1859
1860    #[cfg(feature = "debug_vm")]
1861    pub fn debug_opcode(&self, opcode: u8, operands: &[u8; 8]) {
1862        eprintln!(
1863            "{:8} {}",
1864            OpCode::from(opcode),
1865            match self.handlers[opcode as usize] {
1866                HandlerType::Args0(_) => String::new(),
1867                HandlerType::Args1(_) => format!("{:04X}", operands[0]),
1868                HandlerType::Args2(_) => format!("{:04X}, {:04X}", operands[0], operands[1]),
1869                HandlerType::Args3(_) => format!(
1870                    "{:04X}, {:04X}, {:04X}",
1871                    operands[0], operands[1], operands[2]
1872                ),
1873                HandlerType::Args4(_) => format!(
1874                    "{:04X}, {:04X}, {:04X}, {:04X}",
1875                    operands[0], operands[1], operands[2], operands[3]
1876                ),
1877                HandlerType::Args5(_) => format!(
1878                    "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1879                    operands[0], operands[1], operands[2], operands[3], operands[4],
1880                ),
1881                HandlerType::Args6(_) => format!(
1882                    "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1883                    operands[0], operands[1], operands[2], operands[3], operands[4], operands[5],
1884                ),
1885                HandlerType::Args7(_) => format!(
1886                    "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1887                    operands[0],
1888                    operands[1],
1889                    operands[2],
1890                    operands[3],
1891                    operands[4],
1892                    operands[5],
1893                    operands[6],
1894                ),
1895                HandlerType::Args8(_) => format!(
1896                    "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1897                    operands[0],
1898                    operands[1],
1899                    operands[2],
1900                    operands[3],
1901                    operands[4],
1902                    operands[5],
1903                    operands[6],
1904                    operands[7],
1905                ),
1906            }
1907        );
1908    }
1909
1910    fn execute_call(
1911        &mut self,
1912        absolute_pc_a: u8,
1913        absolute_pc_b: u8,
1914        absolute_pc_c: u8,
1915        absolute_pc_d: u8,
1916    ) {
1917        let absolute_pc = u32_from_u8s!(absolute_pc_a, absolute_pc_b, absolute_pc_c, absolute_pc_d);
1918        let return_info = CallFrame {
1919            return_address: self.pc + 1,
1920            previous_frame_offset: self.memory.frame_offset,
1921            previous_stack_offset: self.memory.stack_offset,
1922        };
1923
1924        //self.memory.set_fp(); // we do not modify fp
1925        self.call_stack.push(return_info);
1926        self.pc = absolute_pc as usize;
1927
1928        #[cfg(feature = "debug_vm")]
1929        if self.debug_stats_enabled {
1930            self.debug.call_depth += 1;
1931            if self.debug.call_depth > self.debug.max_call_depth {
1932                self.debug.max_call_depth = self.debug.call_depth;
1933            }
1934        }
1935    }
1936
1937    #[inline]
1938    fn execute_host_call(
1939        &mut self,
1940        function_id_lower: u8,
1941        function_id_upper: u8,
1942        register_count: u8,
1943        callback: &mut dyn HostFunctionCallback,
1944    ) {
1945        let heap = self.memory();
1946
1947        let function_id = u8s_to_u16!(function_id_lower, function_id_upper);
1948
1949        unsafe {
1950            let host_args = HostArgs::new(
1951                function_id,
1952                heap.memory,
1953                heap.memory_size,
1954                heap.stack_offset,
1955                self.registers.as_mut_ptr(),
1956                register_count as usize + 1,
1957            );
1958
1959            callback.dispatch_host_call(host_args);
1960        }
1961    }
1962
1963    #[allow(clippy::missing_const_for_fn)]
1964    #[inline(always)]
1965    fn execute_enter(
1966        &mut self,
1967        frame_size_0: u8,
1968        frame_size_1: u8,
1969        frame_size_2: u8,
1970        frame_size_3: u8,
1971    ) {
1972        let frame_size = u32_from_u8s!(frame_size_0, frame_size_1, frame_size_2, frame_size_3);
1973        self.memory.set_fp_from_sp(); // set the frame pointer to what sp is now
1974        self.memory.inc_sp(frame_size as usize);
1975        #[cfg(feature = "debug_vm")]
1976        if self.debug_stats_enabled && self.memory.stack_offset > self.debug.max_stack_offset {
1977            self.debug.max_stack_offset = self.memory.stack_offset - self.memory.stack_start;
1978        }
1979    }
1980
1981    #[inline]
1982    fn execute_ret(&mut self) {
1983        let call_frame = self.call_stack.pop().unwrap();
1984
1985        self.memory.pop(
1986            call_frame.previous_frame_offset,
1987            call_frame.previous_stack_offset,
1988        );
1989
1990        // going back to the old instruction
1991        self.pc = call_frame.return_address;
1992        self.pc -= 1; // Adjust for automatic increment
1993
1994        // NOTE: Any return value is always at frame_offset + 0
1995
1996        #[cfg(feature = "debug_vm")]
1997        if self.debug_stats_enabled {
1998            self.debug.call_depth -= 1;
1999        }
2000    }
2001
2002    #[inline]
2003    const fn u8s_to_32(a: u8, b: u8, c: u8, d: u8) -> u32 {
2004        u32::from_le_bytes([a, b, c, d])
2005    }
2006
2007    #[inline]
2008    pub fn get_const_ptr_from_reg(&self, reg: u8) -> *const u8 {
2009        let ptr_addr = get_reg!(self, reg);
2010        self.memory.get_heap_const_ptr(ptr_addr as usize)
2011    }
2012
2013    #[inline]
2014    pub fn get_const_ptr_from_reg_with_offset(&self, reg: u8, offset: u32) -> *const u8 {
2015        let ptr_addr = get_reg!(self, reg) + offset;
2016        self.memory.get_heap_const_ptr(ptr_addr as usize)
2017    }
2018
2019    #[inline]
2020    pub fn get_ptr_from_reg(&self, reg: u8) -> *mut u8 {
2021        let ptr_addr = get_reg!(self, reg);
2022        self.memory.get_heap_ptr(ptr_addr as usize)
2023    }
2024
2025    #[inline]
2026    pub fn get_ptr_and_addr_from_reg(&self, reg: u8) -> (*mut u8, u32) {
2027        let ptr_addr = get_reg!(self, reg);
2028        (self.memory.get_heap_ptr(ptr_addr as usize), ptr_addr)
2029    }
2030
2031    #[inline]
2032    pub fn get_ptr_from_reg_with_offset(&self, reg: u8, offset: u32) -> *mut u8 {
2033        let ptr_addr = get_reg!(self, reg) + offset;
2034        self.memory.get_heap_ptr(ptr_addr as usize)
2035    }
2036}