1extern crate core;
6
7use crate::host::{HostArgs, HostFunctionCallback};
8use crate::memory::ExecutionMode::NormalExecution;
9use crate::memory::{Memory, MemoryDebug};
10use crate::VmState::Normal;
11use fixed32::Fp;
12use std::error::Error;
13use std::fmt::{Display, Formatter};
14use std::mem::discriminant;
15use std::ptr;
16use std::str::FromStr;
17use swamp_vm_types::opcode::OpCode;
18use swamp_vm_types::{BinaryInstruction, InstructionPosition};
19
20mod grid;
21pub mod host;
22pub mod map_open;
23pub mod memory;
24pub mod prelude;
25mod range;
26mod sparse;
27mod string;
28mod vec;
29
30#[macro_export]
31macro_rules! u8s_to_u16 {
32 ($lsb:expr, $msb:expr) => {
33 (($msb as u16) << 8) | ($lsb as u16)
38 };
39}
40
41#[macro_export]
42macro_rules! i16_from_u8s {
43 ($lsb:expr, $msb:expr) => {
44 ((($msb as u16) << 8) | ($lsb as u16)) as i16
49 };
50}
51
52#[macro_export]
53macro_rules! u32_from_u8s {
54 ($lsb:expr, $msb:expr, $msb2:expr, $msb3:expr) => {
55 (($msb3 as u32) << 24) | (($msb2 as u32) << 16) | (($msb as u32) << 8) | ($lsb as u32)
56 };
57}
58
59#[macro_export]
60macro_rules! u16_from_u8s {
61 ($lsb:expr, $msb:expr) => {
62 (($msb as u16) << 8) | ($lsb as u16)
63 };
64}
65
66#[macro_export]
67macro_rules! get_reg {
68 ($vm:expr, $reg_idx:expr) => {
69 $vm.registers[$reg_idx as usize]
70 };
71}
72
73#[macro_export]
74macro_rules! set_reg {
75 ($vm:expr, $reg_idx:expr, $value:expr) => {
80 $vm.registers[$reg_idx as usize] = $value as u32;
82 };
83}
84
85type Handler0 = fn(&mut Vm);
86type Handler1 = fn(&mut Vm, u8);
87type Handler2 = fn(&mut Vm, u8, u8);
88type Handler3 = fn(&mut Vm, u8, u8, u8);
89type Handler4 = fn(&mut Vm, u8, u8, u8, u8);
90type Handler5 = fn(&mut Vm, u8, u8, u8, u8, u8);
91type Handler6 = fn(&mut Vm, u8, u8, u8, u8, u8, u8);
92type Handler7 = fn(&mut Vm, u8, u8, u8, u8, u8, u8, u8);
93type Handler8 = fn(&mut Vm, u8, u8, u8, u8, u8, u8, u8, u8);
94
95#[derive(Copy, Clone)]
96enum HandlerType {
97 Args0(Handler0),
98 Args1(Handler1),
99 Args2(Handler2),
100 Args3(Handler3),
101 Args4(Handler4),
102 Args5(Handler5),
103 Args6(Handler6),
104 Args7(Handler7),
105 Args8(Handler8),
106}
107
108#[derive(Debug, Default)]
109pub struct Debug {
110 pub opcodes_executed: usize,
111 pub call_depth: usize,
112 pub max_call_depth: usize,
113 pub max_stack_offset: usize,
114}
115
116pub struct CallFrame {
117 pub return_address: usize, pub previous_frame_offset: usize, pub previous_stack_offset: usize, }
121
122type RegContents = u32;
123
124#[derive(Clone, Copy, Eq, PartialEq, Debug)]
125pub enum TrapCode {
126 StoppedByTestHarness,
127 VecBoundsFail {
128 encountered: usize,
129 element_count: usize,
130 },
131 MapOutOfSpace,
132 MapEntryNotFound,
133 MapEntryNotFoundAndCouldNotBeCreated,
134 MapEntryNotFoundForRemoval,
135 LessThanTrap {
136 a: u32,
137 b: u32,
138 },
139 SparseOutOfSpace,
140 SparseRemoveFailed,
141 SparseGetFailed,
142 MapCouldNotBeCopied,
143 OverlappingMemoryCopy,
144 MemoryCorruption,
145 VecOutOfCapacity {
146 encountered: u16,
147 capacity: u16,
148 },
149 VecEmpty,
150 VecNeverInitialized,
151 GridBoundsXFail {
152 x: u32,
153 width: u16,
154 },
155 GridBoundsYFail {
156 y: u32,
157 height: u16,
158 },
159 GridBoundsFail,
160 InvalidUtf8Sequence,
161 UnalignedAccess,
162 ReverseRangeNotAllowedHere,
163}
164
165impl TrapCode {
166 pub fn is_sort_of_equal(&self, other: &Self) -> bool {
167 discriminant(self) == discriminant(other)
168 }
169}
170
171impl TryFrom<u8> for TrapCode {
172 type Error = ();
173
174 fn try_from(value: u8) -> Result<Self, Self::Error> {
175 let code = match value {
176 0 => Self::StoppedByTestHarness,
177 1 => Self::VecBoundsFail {
178 encountered: 0,
179 element_count: 0,
180 }, 2 => Self::MapOutOfSpace,
182 3 => Self::MapEntryNotFound,
183 4 => Self::MapEntryNotFoundAndCouldNotBeCreated,
184 5 => Self::MapEntryNotFoundForRemoval,
185 6 => Self::LessThanTrap { a: 0, b: 0 },
186 7 => Self::SparseOutOfSpace,
187 8 => Self::SparseRemoveFailed,
188 9 => Self::SparseGetFailed,
189 10 => Self::MapCouldNotBeCopied,
190 11 => Self::OverlappingMemoryCopy,
191 _ => return Err(()),
192 };
193 Ok(code)
194 }
195}
196
197#[derive(Debug, PartialEq, Eq)]
198pub struct ParseTrapCodeError;
199
200impl Display for ParseTrapCodeError {
201 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
202 write!(f, "Unable to parse string into a valid TrapCode")
203 }
204}
205impl Error for ParseTrapCodeError {}
206impl FromStr for TrapCode {
207 type Err = ParseTrapCodeError;
208
209 fn from_str(s: &str) -> Result<Self, Self::Err> {
210 let code = match s {
211 "stopped_by_test_harness" => Self::StoppedByTestHarness,
212 "vec_bounds_fail" => Self::VecBoundsFail {
213 encountered: 0,
214 element_count: 0,
215 }, "vec_out_of_capacity" => Self::VecOutOfCapacity {
217 encountered: 0,
218 capacity: 0,
219 }, "reverse_range_not_allowed_here" => Self::ReverseRangeNotAllowedHere,
221 "map_out_of_space" => Self::MapOutOfSpace,
222 "map_entry_not_found" => Self::MapEntryNotFound,
223 "map_entry_or_create_failed" => Self::MapEntryNotFoundAndCouldNotBeCreated,
224 "map_entry_remove_failed" => Self::MapEntryNotFoundForRemoval,
225 "less_than_trap" => Self::LessThanTrap { a: 0, b: 0 },
226 "sparse_out_of_space" => Self::SparseOutOfSpace,
227 "sparse_remove_failed" => Self::SparseRemoveFailed,
228 "sparse_get_failed" => Self::SparseGetFailed,
229 "map_could_not_be_copied" => Self::MapCouldNotBeCopied,
230 "overlapping_memory_copy" => Self::OverlappingMemoryCopy,
231 _ => return Err(ParseTrapCodeError),
232 };
233
234 Ok(code)
235 }
236}
237
238impl Display for TrapCode {
239 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
240 write!(f, "trap {self:?}")
241 }
242}
243
244#[derive(Eq, Debug, PartialEq)]
245pub enum VmState {
246 Normal,
247 Panic(String),
248 Trap(TrapCode),
249 Halt,
250 Step,
251}
252
253pub struct Vm {
254 memory: Memory,
256
257 pc: usize, instructions: Vec<BinaryInstruction>, execution_complete: bool, call_stack: Vec<CallFrame>, handlers: [HandlerType; 256],
266
267 pub registers: [u32; 256], pub debug: Debug,
271 pub debug_stats_enabled: bool,
272 pub debug_opcodes_enabled: bool,
273 pub debug_operations_enabled: bool,
274 pub state: VmState,
275}
276
277impl Vm {
278 #[must_use]
279 pub const fn is_execution_complete(&self) -> bool {
280 self.execution_complete
281 }
282}
283
284const ALIGNMENT: usize = 8;
285const ALIGNMENT_REST: usize = ALIGNMENT - 1;
286const ALIGNMENT_MASK: usize = !ALIGNMENT_REST;
287
288pub struct VmSetup {
289 pub stack_memory_size: usize,
290 pub heap_memory_size: usize,
291 pub constant_memory: Vec<u8>,
292 pub debug_stats_enabled: bool,
293 pub debug_opcodes_enabled: bool,
294 pub debug_operations_enabled: bool,
295}
296
297impl Vm {
298 #[allow(clippy::too_many_lines)]
299 pub fn new(instructions: Vec<BinaryInstruction>, setup: VmSetup) -> Self {
300 let memory = Memory::new(
301 &setup.constant_memory,
302 setup.stack_memory_size,
303 setup.heap_memory_size,
304 );
305
306 assert!(
307 setup.constant_memory.len() < setup.stack_memory_size / 2,
308 "too much constant memory"
309 );
310
311 let mut vm = Self {
312 memory, pc: 0,
314 instructions,
315 execution_complete: false,
316 call_stack: vec![],
317 handlers: [const { HandlerType::Args0(Self::execute_unimplemented) }; 256],
318 registers: [const { 0 }; 256],
319 debug: Debug {
320 opcodes_executed: 0,
321 call_depth: 0,
322 max_call_depth: 0,
323 max_stack_offset: 0,
324 },
325 debug_stats_enabled: setup.debug_stats_enabled,
326 debug_opcodes_enabled: setup.debug_opcodes_enabled,
327 debug_operations_enabled: setup.debug_operations_enabled,
328 state: Normal,
329 };
330
331 vm.handlers[OpCode::StRegToFrame as usize] =
345 HandlerType::Args6(Self::execute_st_regs_to_frame);
346 vm.handlers[OpCode::StRegToFrameUsingMask as usize] =
347 HandlerType::Args5(Self::execute_st_regs_to_frame_using_mask);
348
349 vm.handlers[OpCode::St32UsingPtrWithOffset as usize] =
350 HandlerType::Args6(Self::execute_stw_using_base_ptr_and_offset);
351 vm.handlers[OpCode::St16UsingPtrWithOffset as usize] =
352 HandlerType::Args6(Self::execute_sth_using_base_ptr_and_offset);
353 vm.handlers[OpCode::St8UsingPtrWithOffset as usize] =
354 HandlerType::Args6(Self::execute_stb_using_base_ptr_and_offset);
355
356 vm.handlers[OpCode::LdRegFromFrameRange as usize] =
358 HandlerType::Args6(Self::execute_ld_regs_from_frame);
359 vm.handlers[OpCode::LdRegFromFrameUsingMask as usize] =
360 HandlerType::Args5(Self::execute_ld_regs_from_frame_using_mask);
361
362 vm.handlers[OpCode::Ld32FromPointerWithOffset as usize] =
363 HandlerType::Args6(Self::execute_ldw_from_base_ptr_and_offset);
364 vm.handlers[OpCode::Ld16FromPointerWithOffset as usize] =
365 HandlerType::Args6(Self::execute_ldh_from_base_ptr_and_offset);
366 vm.handlers[OpCode::Ld8FromPointerWithOffset as usize] =
367 HandlerType::Args6(Self::execute_ldb_from_base_ptr_and_offset);
368
369 vm.handlers[OpCode::Mov8FromImmediateValue as usize] =
371 HandlerType::Args2(Self::execute_mov_8);
372 vm.handlers[OpCode::Mov16FromImmediateValue as usize] =
373 HandlerType::Args3(Self::execute_mov_16);
374 vm.handlers[OpCode::Mov32FromImmediateValue as usize] =
375 HandlerType::Args5(Self::execute_mov_32);
376
377 vm.handlers[OpCode::MovReg as usize] = HandlerType::Args2(Self::execute_mov_reg);
379 vm.handlers[OpCode::LdPtrFromEffectiveFrameAddress as usize] =
380 HandlerType::Args5(Self::execute_lea);
381
382 vm.handlers[OpCode::Ld32FromAbsoluteAddress as usize] =
383 HandlerType::Args5(Self::execute_ldw_from_absolute_address);
384
385 vm.handlers[OpCode::Ld8FromAbsoluteAddress as usize] =
386 HandlerType::Args5(Self::execute_ldb_from_absolute_address);
387
388 vm.handlers[OpCode::BlockCopy as usize] =
390 HandlerType::Args6(Self::execute_mov_mem_with_immediate_size);
391
392 vm.handlers[OpCode::FrameMemClr as usize] =
393 HandlerType::Args8(Self::execute_frame_memory_clear);
394
395 vm.handlers[OpCode::LtI32 as usize] = HandlerType::Args3(Self::execute_lt_i32);
397 vm.handlers[OpCode::LeI32 as usize] = HandlerType::Args3(Self::execute_le_i32);
398 vm.handlers[OpCode::GtI32 as usize] = HandlerType::Args3(Self::execute_gt_i32);
399 vm.handlers[OpCode::GeI32 as usize] = HandlerType::Args3(Self::execute_ge_i32);
400
401 vm.handlers[OpCode::GeU32 as usize] = HandlerType::Args3(Self::execute_ge_u32);
402 vm.handlers[OpCode::LtU32 as usize] = HandlerType::Args3(Self::execute_lt_u32);
403
404 vm.handlers[OpCode::CmpReg as usize] = HandlerType::Args3(Self::execute_cmp_reg);
406 vm.handlers[OpCode::CmpBlock as usize] = HandlerType::Args5(Self::execute_cmp_block);
407
408 vm.handlers[OpCode::Eq8Imm as usize] = HandlerType::Args3(Self::execute_eq_8_imm);
409 vm.handlers[OpCode::TrapOnLessThan as usize] =
410 HandlerType::Args2(Self::execute_trap_on_less_than);
411
412 vm.handlers[OpCode::MovEqualToZero as usize] =
414 HandlerType::Args2(Self::execute_move_equal_to_zero);
415
416 vm.handlers[OpCode::BFalse as usize] = HandlerType::Args3(Self::execute_branch_if_false);
418 vm.handlers[OpCode::BTrue as usize] = HandlerType::Args3(Self::execute_branch_if_true);
419
420 vm.handlers[OpCode::B as usize] = HandlerType::Args2(Self::execute_b);
422
423 vm.handlers[OpCode::AddU32 as usize] = HandlerType::Args3(Self::execute_add_u32);
425 vm.handlers[OpCode::AddU32Imm as usize] = HandlerType::Args6(Self::execute_add_u32_imm);
426 vm.handlers[OpCode::MulU32 as usize] = HandlerType::Args3(Self::execute_mul_u32);
427 vm.handlers[OpCode::SubU32 as usize] = HandlerType::Args3(Self::execute_sub_u32);
428
429 vm.handlers[OpCode::NegI32 as usize] = HandlerType::Args2(Self::execute_neg_i32);
430 vm.handlers[OpCode::ModI32 as usize] = HandlerType::Args3(Self::execute_mod_i32);
431 vm.handlers[OpCode::DivI32 as usize] = HandlerType::Args3(Self::execute_div_i32);
432
433 vm.handlers[OpCode::DivF32 as usize] = HandlerType::Args3(Self::execute_div_f32);
435 vm.handlers[OpCode::MulF32 as usize] = HandlerType::Args3(Self::execute_mul_f32);
436
437 vm.handlers[OpCode::Call as usize] = HandlerType::Args4(Self::execute_call);
439 vm.handlers[OpCode::Enter as usize] = HandlerType::Args4(Self::execute_enter);
440 vm.handlers[OpCode::Ret as usize] = HandlerType::Args0(Self::execute_ret);
441
442 vm.handlers[OpCode::Hlt as usize] = HandlerType::Args0(Self::execute_hlt);
446 vm.handlers[OpCode::UserHalt as usize] = HandlerType::Args0(Self::execute_user_halt);
447 vm.handlers[OpCode::Step as usize] = HandlerType::Args0(Self::execute_step);
448 vm.handlers[OpCode::Trap as usize] = HandlerType::Args1(Self::execute_trap);
449 vm.handlers[OpCode::Panic as usize] = HandlerType::Args1(Self::execute_panic);
450
451 vm.handlers[OpCode::CodepointToString as usize] =
453 HandlerType::Args2(Self::execute_codepoint_to_string);
454
455 vm.handlers[OpCode::BoolToString as usize] =
457 HandlerType::Args2(Self::execute_bool_to_string);
458
459 vm.handlers[OpCode::StringAppend as usize] =
461 HandlerType::Args3(Self::execute_string_append);
462 vm.handlers[OpCode::StringRepeat as usize] =
463 HandlerType::Args3(Self::execute_string_repeat);
464
465 vm.handlers[OpCode::StringCmp as usize] = HandlerType::Args3(Self::execute_string_cmp);
466 vm.handlers[OpCode::StringToString as usize] =
467 HandlerType::Args2(Self::execute_string_to_string);
468
469 vm.handlers[OpCode::StringIterInit as usize] =
470 HandlerType::Args2(Self::execute_string_iter_init);
471 vm.handlers[OpCode::StringIterNext as usize] =
472 HandlerType::Args4(Self::execute_string_iter_next);
473 vm.handlers[OpCode::StringIterNextPair as usize] =
474 HandlerType::Args5(Self::execute_string_iter_next_pair);
475
476 vm.handlers[OpCode::ByteToString as usize] =
477 HandlerType::Args2(Self::execute_byte_to_string);
478
479 vm.handlers[OpCode::IntToRnd as usize] =
481 HandlerType::Args2(Self::execute_pseudo_random_i32);
482 vm.handlers[OpCode::IntMin as usize] = HandlerType::Args3(Self::execute_min_i32);
483 vm.handlers[OpCode::IntMax as usize] = HandlerType::Args3(Self::execute_max_i32);
484 vm.handlers[OpCode::IntClamp as usize] = HandlerType::Args4(Self::execute_clamp_i32);
485
486 vm.handlers[OpCode::IntAbs as usize] = HandlerType::Args2(Self::execute_abs_i32);
487
488 vm.handlers[OpCode::IntToString as usize] = HandlerType::Args2(Self::execute_i32_to_string);
489 vm.handlers[OpCode::IntToFloat as usize] = HandlerType::Args2(Self::execute_i32_to_f32);
490
491 vm.handlers[OpCode::FloatPseudoRandom as usize] =
493 HandlerType::Args2(Self::execute_pseudo_random_i32);
494 vm.handlers[OpCode::FloatMin as usize] = HandlerType::Args3(Self::execute_min_i32);
495 vm.handlers[OpCode::FloatMax as usize] = HandlerType::Args3(Self::execute_max_i32);
496 vm.handlers[OpCode::FloatClamp as usize] = HandlerType::Args4(Self::execute_clamp_i32);
497
498 vm.handlers[OpCode::FloatRound as usize] = HandlerType::Args2(Self::execute_f32_round);
499 vm.handlers[OpCode::FloatFloor as usize] = HandlerType::Args2(Self::execute_f32_floor);
500 vm.handlers[OpCode::FloatSqrt as usize] = HandlerType::Args2(Self::execute_f32_sqrt);
501 vm.handlers[OpCode::FloatSign as usize] = HandlerType::Args2(Self::execute_f32_sign);
502 vm.handlers[OpCode::FloatAbs as usize] = HandlerType::Args2(Self::execute_abs_i32);
503 vm.handlers[OpCode::FloatSin as usize] = HandlerType::Args2(Self::execute_f32_sin);
504 vm.handlers[OpCode::FloatCos as usize] = HandlerType::Args2(Self::execute_f32_cos);
505 vm.handlers[OpCode::FloatAsin as usize] = HandlerType::Args2(Self::execute_f32_asin);
506 vm.handlers[OpCode::FloatAcos as usize] = HandlerType::Args2(Self::execute_f32_acos);
507 vm.handlers[OpCode::FloatToString as usize] =
509 HandlerType::Args2(Self::execute_f32_to_string);
510 vm.handlers[OpCode::FloatPseudoRandom as usize] =
511 HandlerType::Args2(Self::execute_pseudo_random_i32);
512
513 vm.handlers[OpCode::RangeInit as usize] = HandlerType::Args4(Self::execute_range_init);
517 vm.handlers[OpCode::RangeIterInit as usize] =
518 HandlerType::Args2(Self::execute_range_iter_init);
519 vm.handlers[OpCode::RangeIterNext as usize] =
520 HandlerType::Args4(Self::execute_range_iter_next);
521
522 vm.handlers[OpCode::ArrayInitWithLenAndCapacity as usize] =
524 HandlerType::Args7(Self::execute_array_init);
525
526 vm.handlers[OpCode::VecInit as usize] = HandlerType::Args7(Self::execute_vec_init);
528 vm.handlers[OpCode::VecCopy as usize] = HandlerType::Args2(Self::execute_vec_copy);
529 vm.handlers[OpCode::VecCopyRange as usize] = HandlerType::Args3(Self::execute_vec_copy_range);
530 vm.handlers[OpCode::VecCmp as usize] = HandlerType::Args3(Self::execute_vec_cmp);
531 vm.handlers[OpCode::VecIterInit as usize] = HandlerType::Args2(Self::execute_vec_iter_init);
532 vm.handlers[OpCode::VecIterNext as usize] = HandlerType::Args4(Self::execute_vec_iter_next);
533 vm.handlers[OpCode::VecIterNextPair as usize] =
534 HandlerType::Args5(Self::execute_vec_iter_next_pair);
535 vm.handlers[OpCode::VecPushAddr as usize] = HandlerType::Args2(Self::execute_vec_push_addr);
536 vm.handlers[OpCode::VecGet as usize] = HandlerType::Args3(Self::execute_vec_get);
537 vm.handlers[OpCode::VecPop as usize] = HandlerType::Args2(Self::execute_vec_pop);
538 vm.handlers[OpCode::VecRemoveIndex as usize] =
539 HandlerType::Args2(Self::execute_vec_remove_index);
540
541 vm.handlers[OpCode::MapInitWithCapacityAndKeyAndTupleSizeAddr as usize] =
543 HandlerType::Args7(Self::execute_map_open_addressing_init);
544 vm.handlers[OpCode::MapIterInit as usize] = HandlerType::Args2(Self::execute_map_iter_init);
545 vm.handlers[OpCode::MapIterNext as usize] = HandlerType::Args4(Self::execute_map_iter_next);
546 vm.handlers[OpCode::MapIterNextPair as usize] =
547 HandlerType::Args5(Self::execute_map_iter_next_pair);
548 vm.handlers[OpCode::MapGetEntryLocation as usize] =
549 HandlerType::Args3(Self::execute_map_open_addressing_get_entry_location);
550 vm.handlers[OpCode::MapGetOrReserveEntryLocation as usize] =
551 HandlerType::Args3(Self::execute_map_open_addressing_get_or_reserve_entry);
552 vm.handlers[OpCode::MapHas as usize] =
553 HandlerType::Args3(Self::execute_map_open_addressing_has);
554 vm.handlers[OpCode::MapRemove as usize] =
555 HandlerType::Args2(Self::execute_map_open_addressing_remove);
556 vm.handlers[OpCode::MapOverwrite as usize] =
557 HandlerType::Args2(Self::execute_map_overwrite);
558
559 vm.handlers[OpCode::SparseInit as usize] = HandlerType::Args7(Self::execute_sparse_init);
561 vm.handlers[OpCode::SparseAddGiveEntryAddress as usize] =
562 HandlerType::Args7(Self::execute_sparse_add_get_entry_addr);
563 vm.handlers[OpCode::SparseRemove as usize] =
564 HandlerType::Args2(Self::execute_sparse_remove);
565 vm.handlers[OpCode::SparseGetEntryAddr as usize] =
566 HandlerType::Args5(Self::execute_sparse_get_entry_addr);
567 vm.handlers[OpCode::SparseIsAlive as usize] =
568 HandlerType::Args3(Self::execute_sparse_is_alive);
569
570 vm.handlers[OpCode::SparseIterInit as usize] =
571 HandlerType::Args2(Self::execute_sparse_iter_init);
572 vm.handlers[OpCode::SparseIterNext as usize] =
573 HandlerType::Args4(Self::execute_sparse_iter_next);
574 vm.handlers[OpCode::SparseIterNextPair as usize] =
575 HandlerType::Args5(Self::execute_sparse_iter_next_pair);
576
577 vm.handlers[OpCode::GridInit as usize] = HandlerType::Args6(Self::execute_grid_init);
578 vm.handlers[OpCode::GridGetEntryAddr as usize] =
579 HandlerType::Args6(Self::execute_grid_get_entry_addr);
580
581 vm
582 }
583 #[must_use]
584 pub const fn memory(&self) -> &Memory {
585 &self.memory
586 }
587
588 pub fn memory_mut(&mut self) -> &mut Memory {
589 &mut self.memory
590 }
591
592 pub fn step(&mut self, host_function_callback: &mut dyn HostFunctionCallback) -> bool {
593 let instruction = &self.instructions[self.pc];
594 let opcode = instruction.opcode;
595
596 if self.memory.execution_mode == NormalExecution {
597 assert!(self.memory.stack_offset >= self.memory.constant_memory_size);
598 assert!(self.memory.stack_offset <= self.memory.heap_start);
599 }
600
601 self.pc += 1; if opcode == OpCode::HostCall as u8 {
604 self.execute_host_call(
605 instruction.operands[0],
606 instruction.operands[1],
607 instruction.operands[2],
608 host_function_callback,
609 );
610 } else {
611 match self.handlers[opcode as usize] {
612 HandlerType::Args0(handler) => handler(self),
613 HandlerType::Args1(handler) => handler(self, instruction.operands[0]),
614 HandlerType::Args2(handler) => {
615 handler(self, instruction.operands[0], instruction.operands[1]);
616 }
617 HandlerType::Args3(handler) => handler(
618 self,
619 instruction.operands[0],
620 instruction.operands[1],
621 instruction.operands[2],
622 ),
623 HandlerType::Args4(handler) => handler(
624 self,
625 instruction.operands[0],
626 instruction.operands[1],
627 instruction.operands[2],
628 instruction.operands[3],
629 ),
630 HandlerType::Args5(handler) => handler(
631 self,
632 instruction.operands[0],
633 instruction.operands[1],
634 instruction.operands[2],
635 instruction.operands[3],
636 instruction.operands[4],
637 ),
638 HandlerType::Args6(handler) => handler(
639 self,
640 instruction.operands[0],
641 instruction.operands[1],
642 instruction.operands[2],
643 instruction.operands[3],
644 instruction.operands[4],
645 instruction.operands[5],
646 ),
647 HandlerType::Args7(handler) => handler(
648 self,
649 instruction.operands[0],
650 instruction.operands[1],
651 instruction.operands[2],
652 instruction.operands[3],
653 instruction.operands[4],
654 instruction.operands[5],
655 instruction.operands[6],
656 ),
657 HandlerType::Args8(handler) => handler(
658 self,
659 instruction.operands[0],
660 instruction.operands[1],
661 instruction.operands[2],
662 instruction.operands[3],
663 instruction.operands[4],
664 instruction.operands[5],
665 instruction.operands[6],
666 instruction.operands[7],
667 ),
668 }
669 }
670
671 !self.execution_complete
672 }
673
674 #[allow(clippy::too_many_lines)]
675 pub fn execute_internal(&mut self, host_function_callback: &mut dyn HostFunctionCallback) {
676 self.execution_complete = false;
677
678 while !self.execution_complete {
679 let instruction = &self.instructions[self.pc];
680 let opcode = instruction.opcode;
681
682 #[cfg(feature = "debug_vm")]
683 if self.debug_opcodes_enabled {
684 let regs = [0, 1, 2, 3, 4, 128, 129, 130];
685
686 for reg in regs {
687 print!(
688 "{}",
689 tinter::bright_black(&format!("{reg:02X}: {:08X}, ", self.registers[reg]))
690 );
691 }
692 println!();
693
694 let operands = instruction.operands;
695 print!("> {:04X}: ", self.pc);
696 self.debug_opcode(opcode, &operands);
697 }
698
699 #[cfg(feature = "debug_vm")]
700 if self.debug_stats_enabled {
701 self.debug.opcodes_executed += 1;
702 }
703
704 self.pc += 1; if opcode == OpCode::HostCall as u8 {
707 self.execute_host_call(
708 instruction.operands[0],
709 instruction.operands[1],
710 instruction.operands[2],
711 host_function_callback,
712 );
713 } else {
714 match self.handlers[opcode as usize] {
715 HandlerType::Args0(handler) => handler(self),
716 HandlerType::Args1(handler) => handler(self, instruction.operands[0]),
717 HandlerType::Args2(handler) => {
718 handler(self, instruction.operands[0], instruction.operands[1]);
719 }
720 HandlerType::Args3(handler) => handler(
721 self,
722 instruction.operands[0],
723 instruction.operands[1],
724 instruction.operands[2],
725 ),
726 HandlerType::Args4(handler) => handler(
727 self,
728 instruction.operands[0],
729 instruction.operands[1],
730 instruction.operands[2],
731 instruction.operands[3],
732 ),
733 HandlerType::Args5(handler) => handler(
734 self,
735 instruction.operands[0],
736 instruction.operands[1],
737 instruction.operands[2],
738 instruction.operands[3],
739 instruction.operands[4],
740 ),
741 HandlerType::Args6(handler) => handler(
742 self,
743 instruction.operands[0],
744 instruction.operands[1],
745 instruction.operands[2],
746 instruction.operands[3],
747 instruction.operands[4],
748 instruction.operands[5],
749 ),
750 HandlerType::Args7(handler) => handler(
751 self,
752 instruction.operands[0],
753 instruction.operands[1],
754 instruction.operands[2],
755 instruction.operands[3],
756 instruction.operands[4],
757 instruction.operands[5],
758 instruction.operands[6],
759 ),
760 HandlerType::Args8(handler) => handler(
761 self,
762 instruction.operands[0],
763 instruction.operands[1],
764 instruction.operands[2],
765 instruction.operands[3],
766 instruction.operands[4],
767 instruction.operands[5],
768 instruction.operands[6],
769 instruction.operands[7],
770 ),
771 }
772 }
773 }
774 }
775
776 pub const fn set_return_register_address(&mut self, r0_addr: u32) {
777 set_reg!(self, 0, r0_addr);
778 }
779
780 pub fn set_register_pointer_addr_for_parameter(&mut self, register: u8, addr: u32) {
781 assert!((1..=6).contains(®ister), "not a parameter register");
782 set_reg!(self, register, addr);
783 }
784
785 pub fn set_stack_start(&mut self, addr: usize) {
786 if self.debug_operations_enabled {
787 eprintln!("vm: set stack start and frame to: 0x{addr:08X}");
788 }
789 self.memory.set_stack_and_frame(addr);
790 }
791
792 pub fn resume(&mut self, host_function_callback: &mut dyn HostFunctionCallback) {
793 self.execute_internal(host_function_callback);
794 }
795
796 pub fn execute_from_ip(
797 &mut self,
798 ip: &InstructionPosition,
799 host_function_callback: &mut dyn HostFunctionCallback,
800 ) {
801 self.pc = ip.0 as usize;
802 if self.debug_operations_enabled {
803 eprintln!(
804 "starting up the vm, normal_stack_start: {:08X} SP:{:08X} FP:{:08X}",
805 self.memory.stack_start, self.memory.stack_offset, self.memory.frame_offset
806 );
807 }
808
809 self.call_stack.clear();
810 self.memory.reset_offset();
811
812 #[cfg(feature = "debug_vm")]
813 if self.debug_opcodes_enabled {
814 eprintln!(
815 "start executing --------- frame {:X} heap: {:X}",
816 self.memory.frame_offset, self.memory.heap_alloc_offset
817 );
818 }
819
820 self.execute_internal(host_function_callback);
821 }
822
823 pub const fn set_pc(&mut self, pc: &InstructionPosition) {
824 self.pc = pc.0 as usize;
825 }
826
827 pub const fn pc(&self) -> usize {
828 self.pc
829 }
830
831 pub fn fp(&self) -> usize {
832 self.memory.frame_offset
833 }
834
835 pub fn sp(&self) -> usize {
836 self.memory.stack_offset
837 }
838
839 pub fn call_stack(&self) -> &[CallFrame] {
840 &self.call_stack
841 }
842
843 fn execute_unimplemented(&mut self) {
844 let unknown_opcode = OpCode::from(self.instructions[self.pc - 1].opcode);
845 eprintln!("error: opcode not implemented: {unknown_opcode} {unknown_opcode:?}");
846 eprintln!("VM runtime halted.");
847 self.debug_output();
848 panic!("unknown OPCODE! {unknown_opcode} {unknown_opcode:?}");
849 }
850
851 pub fn frame_memory(&self) -> &[u8] {
852 unsafe { std::slice::from_raw_parts(self.memory.frame_ptr(), self.memory.memory_size) }
853 }
854
855 pub fn heap_memory(&self) -> &[u8] {
856 unsafe { std::slice::from_raw_parts(self.memory.get_heap_ptr(0), self.memory.memory_size) }
857 }
858
859 pub fn constant_memory(&self) -> &[u8] {
860 unsafe {
861 std::slice::from_raw_parts(
862 self.memory.get_heap_ptr(0),
863 self.memory.constant_memory_size,
864 )
865 }
866 }
867
868 pub fn all_memory_up_to(&self, offset: usize) -> &[u8] {
869 unsafe { std::slice::from_raw_parts(self.memory.get_heap_ptr(0), offset) }
870 }
871
872 pub fn constant_size(&self) -> usize {
873 self.memory.constant_memory_size
874 }
875 #[must_use]
876 #[allow(clippy::missing_const_for_fn)]
877 pub fn instructions(&self) -> &[BinaryInstruction] {
878 &self.instructions
879 }
880 pub fn reset(&mut self) {
881 self.memory.reset();
882 self.memory.reset_allocator();
883
884 self.pc = 0;
885 self.execution_complete = false;
886 self.call_stack.clear();
887 }
888
889 pub fn reset_stack_and_heap_to_constant_limit(&mut self) {
890 self.memory.reset_allocator();
891 self.memory.reset_stack_and_fp();
892 self.reset_call_stack();
893 self.execution_complete = false;
894 }
896
897 pub fn reset_call_stack(&mut self) {
898 self.call_stack.clear();
900 }
901
902 pub fn reset_debug(&mut self) {
903 self.debug = Debug::default();
904 self.memory.debug = MemoryDebug {
905 max_heap_alloc_offset: 0,
906 }
907 }
908
909 #[must_use]
910 pub fn frame_offset(&self) -> usize {
911 self.memory.frame_offset
912 }
913
914 pub fn load_bytecode(&mut self, instructions: Vec<BinaryInstruction>) {
915 self.instructions = instructions;
916 self.pc = 0;
917 self.execution_complete = false;
918 }
919
920 #[inline]
921 fn execute_mov_32(&mut self, dst_reg: u8, a: u8, b: u8, c: u8, d: u8) {
922 set_reg!(self, dst_reg, Self::u8s_to_32(a, b, c, d));
923 }
924
925 #[inline]
926 fn execute_mov_16(&mut self, dst_reg: u8, a: u8, b: u8) {
927 set_reg!(self, dst_reg, u16_from_u8s!(a, b));
928 }
929
930 #[inline]
931 fn execute_mov_8(&mut self, dst_reg: u8, octet: u8) {
932 set_reg!(self, dst_reg, octet);
933 }
934
935 #[inline]
937 fn execute_mul_f32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
938 let lhs = Fp::from_raw(get_reg!(self, lhs_reg) as i32);
939 let rhs = Fp::from_raw(get_reg!(self, rhs_reg) as i32);
940 set_reg!(self, dst_reg, (lhs * rhs).inner());
941 }
942
943 #[inline]
944 fn execute_div_f32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
945 let lhs = Fp::from_raw(get_reg!(self, lhs_reg) as i32);
946 let rhs = Fp::from_raw(get_reg!(self, rhs_reg) as i32);
947
948 set_reg!(self, dst_reg, (lhs / rhs).inner());
949 }
950
951 #[inline]
952 fn execute_f32_round(&mut self, dst_reg: u8, val_reg: u8) {
953 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
954
955 let int_val: i16 = val.round().into();
956 set_reg!(self, dst_reg, int_val);
957 }
958
959 #[inline]
960 fn execute_f32_floor(&mut self, dst_reg: u8, val_reg: u8) {
961 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
962
963 let floored: i16 = val.floor().into();
964 set_reg!(self, dst_reg, floored);
965 }
966
967 #[inline]
968 fn execute_f32_sqrt(&mut self, dst_reg: u8, val_reg: u8) {
969 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
970
971 set_reg!(self, dst_reg, val.sqrt().inner());
972 }
973
974 #[inline]
975 fn execute_f32_sin(&mut self, dst_reg: u8, val_reg: u8) {
976 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
977
978 set_reg!(self, dst_reg, val.sin().inner());
979 }
980
981 #[inline]
982 fn execute_f32_asin(&mut self, dst_reg: u8, val_reg: u8) {
983 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
984
985 set_reg!(self, dst_reg, val.asin().inner());
986 }
987
988 #[inline]
989 fn execute_f32_cos(&mut self, dst_reg: u8, val_reg: u8) {
990 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
991
992 set_reg!(self, dst_reg, val.cos().inner());
993 }
994
995 #[inline]
996 fn execute_f32_acos(&mut self, dst_reg: u8, val_reg: u8) {
997 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
998
999 set_reg!(self, dst_reg, val.acos().inner());
1000 }
1001
1002 #[inline]
1012 fn execute_f32_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1013 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1014
1015 self.create_string(dst_reg, &val.to_string())
1016 }
1017
1018 #[inline]
1019 fn execute_f32_sign(&mut self, dst_reg: u8, val_reg: u8) {
1020 let val = Fp::from_raw(get_reg!(self, val_reg) as i32);
1021 set_reg!(
1023 self,
1024 dst_reg,
1025 Fp::from(if val < 0 {
1026 -1
1027 } else if val > 0 {
1028 1
1029 } else {
1030 0
1031 })
1032 .inner()
1033 );
1034 }
1035
1036 #[inline]
1037 fn execute_neg_i32(&mut self, dst_reg: u8, val_reg: u8) {
1038 let val = get_reg!(self, val_reg) as i32;
1039 set_reg!(self, dst_reg, -val);
1040 }
1041
1042 #[inline]
1043 const fn execute_add_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1044 let lhs = get_reg!(self, lhs_reg);
1045 let rhs = get_reg!(self, rhs_reg);
1046
1047 set_reg!(self, dst_reg, lhs.wrapping_add(rhs));
1048 }
1049
1050 #[inline]
1051 fn execute_add_u32_imm(
1052 &mut self,
1053 dst_reg: u8,
1054 lhs_reg: u8,
1055 rhs_1: u8,
1056 rhs_2: u8,
1057 rhs_3: u8,
1058 rhs_4: u8,
1059 ) {
1060 let lhs = get_reg!(self, lhs_reg);
1061 let rhs = u32_from_u8s!(rhs_1, rhs_2, rhs_3, rhs_4);
1062
1063 set_reg!(self, dst_reg, lhs.wrapping_add(rhs));
1064 }
1065
1066 #[inline]
1067 const fn execute_mul_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1068 let lhs = get_reg!(self, lhs_reg);
1069 let rhs = get_reg!(self, rhs_reg);
1070
1071 set_reg!(self, dst_reg, lhs.wrapping_mul(rhs));
1072 }
1073
1074 #[inline]
1075 const fn execute_sub_u32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1076 let lhs = get_reg!(self, lhs_reg);
1077 let rhs = get_reg!(self, rhs_reg);
1078
1079 set_reg!(self, dst_reg, lhs.wrapping_sub(rhs));
1080 }
1081
1082 #[inline]
1086 fn execute_mod_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1087 let lhs = get_reg!(self, lhs_reg) as i32;
1088 let rhs = get_reg!(self, rhs_reg) as i32;
1089
1090 let result = ((lhs % rhs) + rhs) % rhs;
1091 set_reg!(self, dst_reg, result);
1092 }
1093
1094 #[inline]
1095 fn execute_div_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1096 let lhs = get_reg!(self, lhs_reg) as i32;
1097 let rhs = get_reg!(self, rhs_reg) as i32;
1098
1099 let result_option = lhs.checked_div(rhs);
1100
1101 match result_option {
1102 Some(result) => {
1103 set_reg!(self, dst_reg, result);
1104 }
1105 None => {
1106 panic!(
1107 "VM Runtime Error: Signed 32-bit integer overflow during DIV_I32 (R{dst_reg} = R{lhs_reg} - R{rhs_reg})"
1108 );
1109 }
1110 }
1111 }
1112
1113 #[inline]
1114 fn execute_lt_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1115 let lhs = get_reg!(self, lhs_reg) as i32;
1116 let rhs = get_reg!(self, rhs_reg) as i32;
1117 set_reg!(self, dest_bool_reg, lhs < rhs);
1118 }
1119
1120 #[inline]
1121 fn execute_le_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1122 let lhs = get_reg!(self, lhs_reg) as i32;
1123 let rhs = get_reg!(self, rhs_reg) as i32;
1124 set_reg!(self, dest_bool_reg, lhs <= rhs);
1125 }
1126
1127 #[inline]
1128 fn execute_gt_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1129 let lhs = get_reg!(self, lhs_reg) as i32;
1130 let rhs = get_reg!(self, rhs_reg) as i32;
1131 set_reg!(self, dest_bool_reg, lhs > rhs);
1132 }
1133
1134 #[inline]
1135 fn execute_ge_i32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1136 let lhs = get_reg!(self, lhs_reg) as i32;
1137 let rhs = get_reg!(self, rhs_reg) as i32;
1138
1139 set_reg!(self, dest_bool_reg, lhs >= rhs);
1140 }
1141
1142 #[inline]
1143 fn execute_ge_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1144 let lhs = get_reg!(self, lhs_reg);
1145 let rhs = get_reg!(self, rhs_reg);
1146
1147 set_reg!(self, dest_bool_reg, lhs >= rhs);
1148 }
1149
1150 #[inline]
1151 fn execute_lt_u32(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1152 let lhs = get_reg!(self, lhs_reg);
1153 let rhs = get_reg!(self, rhs_reg);
1154
1155 set_reg!(self, dest_bool_reg, lhs < rhs);
1156 }
1157
1158 #[inline]
1159 fn execute_pseudo_random_i32(&mut self, dst_reg: u8, src_reg: u8) {
1160 let src = get_reg!(self, src_reg);
1161 set_reg!(self, dst_reg, squirrel_prng::squirrel_noise5(src, 0) as i32);
1162 }
1163
1164 #[inline]
1165 fn execute_i32_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1166 let val = get_reg!(self, val_reg) as i32;
1167
1168 self.create_string(dst_reg, &val.to_string());
1169 }
1170
1171 #[inline]
1172 fn execute_bool_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1173 let val = get_reg!(self, val_reg) != 0;
1174
1175 self.create_string(dst_reg, &val.to_string());
1176 }
1177
1178 const HEX_DIGITS: &'static [u8; 16] = b"0123456789ABCDEF";
1179
1180 #[inline]
1181 const fn byte_to_prefixed_hex(val: u8, dst: &mut [u8; 4]) {
1182 dst[0] = b'0';
1183 dst[1] = b'x';
1184 dst[2] = Self::HEX_DIGITS[(val >> 4) as usize];
1185 dst[3] = Self::HEX_DIGITS[(val & 0x0F) as usize];
1186 }
1187
1188 #[inline]
1189 fn execute_byte_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1190 let val = get_reg!(self, val_reg);
1191 debug_assert!(val <= 0xff, "byte out of range");
1192
1193 let mut buf = [0u8; 4];
1194 Self::byte_to_prefixed_hex(val as u8, &mut buf);
1195
1196 let s = unsafe { std::str::from_utf8_unchecked(&buf) };
1198
1199 self.create_string(dst_reg, s);
1200 }
1201
1202 #[inline]
1203 fn execute_codepoint_to_string(&mut self, dst_reg: u8, val_reg: u8) {
1204 let char_raw = get_reg!(self, val_reg);
1205 let char = std::char::from_u32(char_raw).unwrap();
1206 self.create_string(dst_reg, &char.to_string());
1207 }
1208
1209 #[inline]
1210 fn execute_i32_to_f32(&mut self, float_dest_reg: u8, int_source_reg: u8) {
1211 let int_source = get_reg!(self, int_source_reg) as i32;
1212 set_reg!(self, float_dest_reg, Fp::from(int_source as i16).inner());
1213 }
1214
1215 #[inline]
1216 fn execute_abs_i32(&mut self, dst_reg: u8, val_reg: u8) {
1217 let val = get_reg!(self, val_reg) as i32;
1218 set_reg!(self, dst_reg, if val < 0 { -val } else { val });
1219 }
1220
1221 #[inline]
1222 fn execute_min_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1223 let lhs = get_reg!(self, lhs_reg) as i32;
1224 let rhs = get_reg!(self, rhs_reg) as i32;
1225
1226 set_reg!(self, dst_reg, if lhs < rhs { lhs } else { rhs });
1227 }
1228
1229 #[inline]
1230 fn execute_max_i32(&mut self, dst_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1231 let lhs = get_reg!(self, lhs_reg) as i32;
1232 let rhs = get_reg!(self, rhs_reg) as i32;
1233
1234 set_reg!(self, dst_reg, if lhs > rhs { lhs } else { rhs });
1235 }
1236
1237 #[inline]
1238 fn execute_clamp_i32(&mut self, dst_reg: u8, val_reg: u8, min_reg: u8, max_reg: u8) {
1239 let val = get_reg!(self, val_reg) as i32;
1240 let min_val = get_reg!(self, min_reg) as i32;
1241 let max_val = get_reg!(self, max_reg) as i32;
1242
1243 set_reg!(
1244 self,
1245 dst_reg,
1246 if val < min_val {
1247 min_val
1248 } else if val > max_val {
1249 max_val
1250 } else {
1251 val
1252 }
1253 );
1254 }
1255
1256 #[inline]
1258 fn execute_cmp_reg(&mut self, dest_bool_reg: u8, lhs_reg: u8, rhs_reg: u8) {
1259 set_reg!(
1260 self,
1261 dest_bool_reg,
1262 self.registers[lhs_reg as usize] == self.registers[rhs_reg as usize]
1263 );
1264 }
1265
1266 #[inline]
1268 fn execute_eq_8_imm(&mut self, dest_bool_reg: u8, val_reg: u8, octet: u8) {
1269 let compare = get_reg!(self, val_reg);
1270 set_reg!(self, dest_bool_reg, compare == octet as u32);
1271 }
1272
1273 #[inline]
1274 fn execute_trap_on_less_than(&mut self, a_reg: u8, b_reg: u8) {
1275 let a = get_reg!(self, a_reg);
1276 let b = get_reg!(self, b_reg);
1277 if a < b {
1278 self.internal_trap(TrapCode::LessThanTrap { a, b })
1279 }
1280 }
1281
1282 #[inline]
1283 fn execute_move_equal_to_zero(&mut self, dst_reg: u8, src_reg: u8) {
1284 set_reg!(self, dst_reg, get_reg!(self, src_reg) == 0);
1285 }
1286
1287 #[inline]
1288 const fn execute_branch_if_false(
1289 &mut self,
1290 test_reg: u8,
1291 branch_offset_0: u8,
1292 branch_offset_1: u8,
1293 ) {
1294 if get_reg!(self, test_reg) == 0 {
1295 self.pc =
1296 (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1297 }
1298 }
1299
1300 #[inline]
1301 const fn execute_branch_if_true(
1302 &mut self,
1303 test_reg: u8,
1304 branch_offset_0: u8,
1305 branch_offset_1: u8,
1306 ) {
1307 if get_reg!(self, test_reg) != 0 {
1308 self.pc =
1309 (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1310 }
1311 }
1312
1313 #[inline]
1314 fn execute_b(&mut self, branch_offset_0: u8, branch_offset_1: u8) {
1315 self.pc =
1316 (self.pc as i32 + i16_from_u8s!(branch_offset_0, branch_offset_1) as i32) as usize;
1317 }
1318
1319 #[inline]
1320 fn execute_hlt(&mut self) {
1321 self.execution_complete = true;
1322 #[cfg(feature = "debug_vm")]
1323 if self.debug_opcodes_enabled {
1324 self.debug_output();
1325 }
1326 }
1327
1328 #[inline]
1329 fn execute_user_halt(&mut self) {
1330 self.execution_complete = true;
1331 self.state = VmState::Halt;
1332 #[cfg(feature = "debug_vm")]
1333 if self.debug_opcodes_enabled {
1334 self.debug_output();
1335 }
1336 }
1337
1338 #[inline]
1339 fn execute_step(&mut self) {
1340 self.execution_complete = true;
1341 self.state = VmState::Step;
1342 #[cfg(feature = "debug_vm")]
1343 if self.debug_opcodes_enabled {
1344 self.debug_output();
1345 }
1346 }
1347
1348 #[inline]
1349 fn execute_trap(&mut self, trap_code: u8) {
1350 self.internal_trap(TrapCode::try_from(trap_code).unwrap());
1351 }
1352
1353 pub fn internal_trap(&mut self, trap_code: TrapCode) {
1354 self.execution_complete = true;
1355
1356 #[cfg(feature = "debug_vm")]
1357 if self.debug_operations_enabled {
1358 eprintln!("vm trap: '{trap_code}'");
1359 }
1360 self.state = VmState::Trap(trap_code);
1361
1362 #[cfg(feature = "debug_vm")]
1363 if self.debug_opcodes_enabled {
1364 self.debug_output();
1365 }
1366 }
1367
1368 #[inline]
1369 fn execute_panic(&mut self, panic_reason_reg: u8) {
1370 self.execution_complete = true;
1371
1372 #[cfg(feature = "debug_vm")]
1373 if self.debug_opcodes_enabled {
1374 self.debug_output();
1375 }
1376
1377 let heap_addr = get_reg!(self, panic_reason_reg);
1378 let str = self.read_string(heap_addr, &self.memory);
1379
1380 #[cfg(feature = "debug_vm")]
1381 if self.debug_stats_enabled {
1382 eprintln!("panic: {str}");
1383 }
1384
1385 self.state = VmState::Panic(str.to_string());
1386 }
1387
1388 fn debug_output(&self) {
1389 eprintln!(
1390 "total opcodes executed: {}, call_stack_depth: {}, max_call_depth:{}",
1391 self.debug.opcodes_executed, self.debug.call_depth, self.debug.max_call_depth
1392 );
1393 }
1394
1395 #[inline]
1396 fn execute_mov_reg(&mut self, dst_reg: u8, src_reg: u8) {
1397 self.registers[dst_reg as usize] = self.registers[src_reg as usize];
1398 }
1399
1400 #[inline]
1401 fn execute_st_regs_to_frame(
1402 &mut self,
1403 frame_offset_0: u8,
1404 frame_offset_1: u8,
1405 frame_offset_2: u8,
1406 frame_offset_3: u8,
1407 start_reg: u8,
1408 count: u8,
1409 ) {
1410 let frame_offset = u32_from_u8s!(
1411 frame_offset_0,
1412 frame_offset_1,
1413 frame_offset_2,
1414 frame_offset_3
1415 );
1416 let const_reg_ptr = &self.registers[start_reg as usize] as *const u32;
1417 let target_ptr = self.memory.get_frame_ptr_as_u32(frame_offset);
1418 unsafe {
1419 ptr::copy_nonoverlapping(const_reg_ptr, target_ptr, count as usize);
1420 }
1421 }
1422
1423 #[inline]
1424 fn execute_st_regs_to_frame_using_mask(
1425 &mut self,
1426 frame_offset_0: u8,
1427 frame_offset_1: u8,
1428 frame_offset_2: u8,
1429 frame_offset_3: u8,
1430 reg_mask: u8,
1431 ) {
1432 let frame_offset = u32_from_u8s!(
1433 frame_offset_0,
1434 frame_offset_1,
1435 frame_offset_2,
1436 frame_offset_3
1437 );
1438
1439 let mut target_ptr = self.memory.get_frame_ptr_as_u32(frame_offset);
1440 let mut const_reg_ptr = &self.registers[0usize] as *const u32;
1441 let mut mask = reg_mask;
1442 for _ in 0..8 {
1443 if (mask & 0x1) != 0 {
1444 unsafe {
1445 ptr::write(target_ptr, *const_reg_ptr);
1446 target_ptr = target_ptr.add(1);
1447 }
1448 }
1449 mask >>= 1;
1450 unsafe {
1451 const_reg_ptr = const_reg_ptr.add(1);
1452 }
1453 }
1454 }
1455
1456 #[inline]
1457 fn execute_stw_using_base_ptr_and_offset(
1458 &mut self,
1459 base_ptr_reg: u8,
1460 offset_0: u8,
1461 offset_1: u8,
1462 offset_2: u8,
1463 offset_3: u8,
1464 src_reg: u8,
1465 ) {
1466 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1467 let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset) as *mut u32;
1469 let value_to_copy = get_reg!(self, src_reg);
1470
1471 unsafe {
1472 ptr::write(ptr_to_write_to, value_to_copy);
1473 }
1474 }
1475
1476 #[inline]
1477 fn execute_sth_using_base_ptr_and_offset(
1478 &mut self,
1479 base_ptr_reg: u8,
1480 offset_0: u8,
1481 offset_1: u8,
1482 offset_2: u8,
1483 offset_3: u8,
1484 src_reg: u8,
1485 ) {
1486 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1487 let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset) as *mut u16;
1489 let value_to_copy = get_reg!(self, src_reg) as u16;
1490
1491 unsafe {
1492 ptr::write(ptr_to_write_to, value_to_copy);
1493 }
1494 }
1495 #[inline]
1496 fn execute_stb_using_base_ptr_and_offset(
1497 &mut self,
1498 base_ptr_reg: u8,
1499 offset_0: u8,
1500 offset_1: u8,
1501 offset_2: u8,
1502 offset_3: u8,
1503 src_reg: u8,
1504 ) {
1505 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1506 let ptr_to_write_to = self.get_ptr_from_reg_with_offset(base_ptr_reg, offset);
1508 let value_to_copy = get_reg!(self, src_reg) as u8;
1509
1510 unsafe {
1511 ptr::write(ptr_to_write_to, value_to_copy);
1512 }
1513 }
1514
1515 #[inline]
1516 pub fn execute_ldb_from_base_ptr_and_offset(
1517 &mut self,
1518 dst_reg: u8,
1519 base_ptr_reg: u8,
1520 offset_0: u8,
1521 offset_1: u8,
1522 offset_2: u8,
1523 offset_3: u8,
1524 ) {
1525 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1526 let ptr_to_read_from = self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset);
1527 unsafe {
1528 set_reg!(self, dst_reg, *ptr_to_read_from);
1529 }
1530 }
1531
1532 #[inline]
1533 pub fn execute_ldw_from_base_ptr_and_offset(
1534 &mut self,
1535 dst_reg: u8,
1536 base_ptr_reg: u8,
1537 offset_0: u8,
1538 offset_1: u8,
1539 offset_2: u8,
1540 offset_3: u8,
1541 ) {
1542 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1543 let ptr_to_read_from =
1544 self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset) as *const u32;
1545 unsafe {
1546 set_reg!(self, dst_reg, *ptr_to_read_from);
1547 }
1548 }
1549
1550 #[inline]
1551 fn execute_ldw_from_absolute_address(
1552 &mut self,
1553 dst_reg: u8,
1554 addr_0: u8,
1555 addr_1: u8,
1556 addr_2: u8,
1557 addr_3: u8,
1558 ) {
1559 let absolute_addr = u32_from_u8s!(addr_0, addr_1, addr_2, addr_3);
1560
1561 let ptr_to_read_from = self.memory.get_heap_const_ptr(absolute_addr as usize) as *const u32;
1562
1563 unsafe {
1564 set_reg!(self, dst_reg, *ptr_to_read_from);
1565 }
1566 }
1567
1568 #[inline]
1569 fn execute_ldb_from_absolute_address(
1570 &mut self,
1571 dst_reg: u8,
1572 addr_0: u8,
1573 addr_1: u8,
1574 addr_2: u8,
1575 addr_3: u8,
1576 ) {
1577 let absolute_addr = u32_from_u8s!(addr_0, addr_1, addr_2, addr_3);
1578
1579 let ptr_to_read_from = self.memory.get_heap_const_ptr(absolute_addr as usize);
1580
1581 unsafe {
1582 set_reg!(self, dst_reg, *ptr_to_read_from);
1583 }
1584 }
1585
1586 #[inline]
1587 pub fn execute_ldh_from_base_ptr_and_offset(
1588 &mut self,
1589 dst_reg: u8,
1590 base_ptr_reg: u8,
1591 offset_0: u8,
1592 offset_1: u8,
1593 offset_2: u8,
1594 offset_3: u8,
1595 ) {
1596 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1597 let ptr_to_read_from =
1598 self.get_const_ptr_from_reg_with_offset(base_ptr_reg, offset) as *const u16;
1599 unsafe {
1600 set_reg!(self, dst_reg, *ptr_to_read_from);
1601 }
1602 }
1603
1604 #[inline]
1605 pub fn execute_ld_regs_from_frame(
1606 &mut self,
1607 start_reg: u8,
1608 offset_0: u8,
1609 offset_1: u8,
1610 offset_2: u8,
1611 offset_3: u8,
1612 count: u8,
1613 ) {
1614 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1615 let target_reg_ptr = &mut self.registers[start_reg as usize] as *mut u32;
1616 let source_frame_start = self.memory.get_frame_const_ptr_as_u32(offset);
1617 unsafe {
1618 ptr::copy_nonoverlapping(source_frame_start, target_reg_ptr, count as usize);
1619 }
1620 }
1621
1622 #[inline]
1623 pub fn execute_ld_regs_from_frame_using_mask(
1624 &mut self,
1625 reg_mask: u8,
1626 offset_0: u8,
1627 offset_1: u8,
1628 offset_2: u8,
1629 offset_3: u8,
1630 ) {
1631 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1632 let mut target_reg_ptr = &mut self.registers[0usize] as *mut u32;
1633 let mut source_frame_start = self.memory.get_frame_const_ptr_as_u32(offset);
1634 let mut mask = reg_mask;
1635 for _ in 0..8 {
1636 if mask & 0x01 != 0 {
1637 unsafe {
1638 ptr::write(target_reg_ptr, *source_frame_start);
1639 source_frame_start = source_frame_start.add(1);
1640 }
1641 }
1642 mask >>= 1;
1643 unsafe {
1644 target_reg_ptr = target_reg_ptr.add(1);
1645 }
1646 }
1647 }
1648
1649 #[inline]
1650 fn execute_lea(&mut self, dst_reg: u8, offset_0: u8, offset_1: u8, offset_2: u8, offset_3: u8) {
1651 let current_fp_addr = self.memory.frame_offset as u32;
1652 let offset = u32_from_u8s!(offset_0, offset_1, offset_2, offset_3);
1653 set_reg!(self, dst_reg, current_fp_addr + offset);
1654 }
1655
1656 #[inline]
1657 pub fn execute_frame_memory_clear(
1658 &mut self,
1659 dst_pointer_0: u8,
1660 dst_pointer_1: u8,
1661 dst_pointer_2: u8,
1662 dst_pointer_3: u8,
1663 memory_size_0: u8,
1664 memory_size_1: u8,
1665 memory_size_2: u8,
1666 memory_size_3: u8,
1667 ) {
1668 let frame_offset =
1669 u32_from_u8s!(dst_pointer_0, dst_pointer_1, dst_pointer_2, dst_pointer_3);
1670 let total_bytes = u32_from_u8s!(memory_size_0, memory_size_1, memory_size_2, memory_size_3);
1671
1672 assert!(
1673 frame_offset + total_bytes < self.memory.memory_size as u32,
1674 "trying to overwrite memory!"
1675 );
1676 let dst_ptr = self.memory.get_frame_ptr(frame_offset);
1677
1678 unsafe {
1679 ptr::write_bytes(dst_ptr, 0, total_bytes as usize);
1680 }
1681 }
1682
1683 #[inline]
1684 fn execute_mov_mem_with_immediate_size(
1685 &mut self,
1686 dst_pointer_reg: u8,
1687 src_pointer_reg: u8,
1688 memory_size_0: u8,
1689 memory_size_1: u8,
1690 memory_size_2: u8,
1691 memory_size_3: u8,
1692 ) {
1693 let dest_addr = get_reg!(self, dst_pointer_reg);
1694 let src_addr = get_reg!(self, src_pointer_reg);
1695 let memory_size = u32_from_u8s!(memory_size_0, memory_size_1, memory_size_2, memory_size_3);
1696 assert!(
1697 src_addr + memory_size < self.memory.memory_size as u32,
1698 "trying to overwrite memory"
1699 );
1700
1701 let dest_end = dest_addr + memory_size;
1703 let src_end = src_addr + memory_size;
1704
1705 if dest_addr < src_end && src_addr < dest_end {
1706 return self.internal_trap(TrapCode::OverlappingMemoryCopy);
1707 }
1708
1709 #[cfg(feature = "debug_vm")]
1710 if self.debug_operations_enabled {
1711 eprintln!(
1712 "{:04X}> BLKCPY Size={:08X} \n \
1713 DST_ADDR=0x{:08X}\n \
1714 SRC_ADDR=0x{:08X}",
1715 self.pc - 1,
1716 memory_size,
1717 dest_addr,
1718 src_addr,
1719 );
1720 }
1721
1722 let dst_ptr = self.memory.get_heap_ptr(dest_addr as usize);
1723 let src_ptr = self.memory.get_heap_const_ptr(src_addr as usize);
1724
1725 unsafe {
1726 ptr::copy_nonoverlapping(src_ptr, dst_ptr, memory_size as usize);
1727 }
1728 }
1729
1730 #[inline]
1731 fn execute_cmp_block(
1732 &mut self,
1733 dest_bool_reg: u8,
1734 src_addr_reg_a: u8,
1735 src_addr_reg_b: u8,
1736 size_lower: u8,
1737 size_upper: u8,
1738 ) {
1739 let size = u16_from_u8s!(size_lower, size_upper) as usize;
1740
1741 let arc_addr_a = get_reg!(self, src_addr_reg_a);
1742 let src_addr_b = get_reg!(self, src_addr_reg_b);
1743
1744 let src_ptr_a = self.memory.get_heap_const_ptr(arc_addr_a as usize);
1745 let src_ptr_b = self.memory.get_heap_const_ptr(src_addr_b as usize);
1746
1747 unsafe {
1748 let slice_a = std::slice::from_raw_parts(src_ptr_a, size);
1749 let slice_b = std::slice::from_raw_parts(src_ptr_b, size);
1750
1751 set_reg!(self, dest_bool_reg, slice_a == slice_b);
1752 }
1753 }
1754
1755 #[cfg(feature = "debug_vm")]
1756 pub fn debug_opcode(&self, opcode: u8, operands: &[u8; 8]) {
1757 eprintln!(
1758 "{:8} {}",
1759 OpCode::from(opcode),
1760 match self.handlers[opcode as usize] {
1761 HandlerType::Args0(_) => String::new(),
1762 HandlerType::Args1(_) => format!("{:04X}", operands[0]),
1763 HandlerType::Args2(_) => format!("{:04X}, {:04X}", operands[0], operands[1]),
1764 HandlerType::Args3(_) => format!(
1765 "{:04X}, {:04X}, {:04X}",
1766 operands[0], operands[1], operands[2]
1767 ),
1768 HandlerType::Args4(_) => format!(
1769 "{:04X}, {:04X}, {:04X}, {:04X}",
1770 operands[0], operands[1], operands[2], operands[3]
1771 ),
1772 HandlerType::Args5(_) => format!(
1773 "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1774 operands[0], operands[1], operands[2], operands[3], operands[4],
1775 ),
1776 HandlerType::Args6(_) => format!(
1777 "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1778 operands[0], operands[1], operands[2], operands[3], operands[4], operands[5],
1779 ),
1780 HandlerType::Args7(_) => format!(
1781 "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1782 operands[0],
1783 operands[1],
1784 operands[2],
1785 operands[3],
1786 operands[4],
1787 operands[5],
1788 operands[6],
1789 ),
1790 HandlerType::Args8(_) => format!(
1791 "{:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}, {:04X}",
1792 operands[0],
1793 operands[1],
1794 operands[2],
1795 operands[3],
1796 operands[4],
1797 operands[5],
1798 operands[6],
1799 operands[7],
1800 ),
1801 }
1802 );
1803 }
1804
1805 fn execute_call(
1806 &mut self,
1807 absolute_pc_a: u8,
1808 absolute_pc_b: u8,
1809 absolute_pc_c: u8,
1810 absolute_pc_d: u8,
1811 ) {
1812 let absolute_pc = u32_from_u8s!(absolute_pc_a, absolute_pc_b, absolute_pc_c, absolute_pc_d);
1813 let return_info = CallFrame {
1814 return_address: self.pc + 1,
1815 previous_frame_offset: self.memory.frame_offset,
1816 previous_stack_offset: self.memory.stack_offset,
1817 };
1818
1819 self.call_stack.push(return_info);
1821 self.pc = absolute_pc as usize;
1822
1823 #[cfg(feature = "debug_vm")]
1824 if self.debug_stats_enabled {
1825 self.debug.call_depth += 1;
1826 if self.debug.call_depth > self.debug.max_call_depth {
1827 self.debug.max_call_depth = self.debug.call_depth;
1828 }
1829 }
1830 }
1831
1832 #[inline]
1833 fn execute_host_call(
1834 &mut self,
1835 function_id_lower: u8,
1836 function_id_upper: u8,
1837 register_count: u8,
1838 callback: &mut dyn HostFunctionCallback,
1839 ) {
1840 let heap = self.memory();
1841
1842 let function_id = u8s_to_u16!(function_id_lower, function_id_upper);
1843
1844 unsafe {
1845 let host_args = HostArgs::new(
1846 function_id,
1847 heap.memory,
1848 heap.memory_size,
1849 heap.stack_offset,
1850 self.registers.as_mut_ptr(),
1851 register_count as usize + 1,
1852 );
1853
1854 callback.dispatch_host_call(host_args);
1855 }
1856 }
1857
1858 #[allow(clippy::missing_const_for_fn)]
1859 #[inline(always)]
1860 fn execute_enter(
1861 &mut self,
1862 frame_size_0: u8,
1863 frame_size_1: u8,
1864 frame_size_2: u8,
1865 frame_size_3: u8,
1866 ) {
1867 let frame_size = u32_from_u8s!(frame_size_0, frame_size_1, frame_size_2, frame_size_3);
1868 self.memory.set_fp_from_sp(); self.memory.inc_sp(frame_size as usize);
1870 #[cfg(feature = "debug_vm")]
1871 if self.debug_stats_enabled
1872 && self.memory.stack_offset > self.debug.max_stack_offset {
1873 self.debug.max_stack_offset = self.memory.stack_offset - self.memory.stack_start;
1874 }
1875 }
1876
1877 #[inline]
1878 fn execute_ret(&mut self) {
1879 let call_frame = self.call_stack.pop().unwrap();
1880
1881 self.memory.pop(
1882 call_frame.previous_frame_offset,
1883 call_frame.previous_stack_offset,
1884 );
1885
1886 self.pc = call_frame.return_address;
1888 self.pc -= 1; #[cfg(feature = "debug_vm")]
1893 if self.debug_stats_enabled {
1894 self.debug.call_depth -= 1;
1895 }
1896 }
1897
1898 #[inline]
1899 const fn u8s_to_32(a: u8, b: u8, c: u8, d: u8) -> u32 {
1900 u32::from_le_bytes([a, b, c, d])
1901 }
1902
1903 #[inline]
1904 pub fn get_const_ptr_from_reg(&self, reg: u8) -> *const u8 {
1905 let ptr_addr = get_reg!(self, reg);
1906 self.memory.get_heap_const_ptr(ptr_addr as usize)
1907 }
1908
1909 #[inline]
1910 pub fn get_const_ptr_from_reg_with_offset(&self, reg: u8, offset: u32) -> *const u8 {
1911 let ptr_addr = get_reg!(self, reg) + offset;
1912 self.memory.get_heap_const_ptr(ptr_addr as usize)
1913 }
1914
1915 #[inline]
1916 pub fn get_ptr_from_reg(&self, reg: u8) -> *mut u8 {
1917 let ptr_addr = get_reg!(self, reg);
1918 self.memory.get_heap_ptr(ptr_addr as usize)
1919 }
1920
1921 #[inline]
1922 pub fn get_ptr_and_addr_from_reg(&self, reg: u8) -> (*mut u8, u32) {
1923 let ptr_addr = get_reg!(self, reg);
1924 (self.memory.get_heap_ptr(ptr_addr as usize), ptr_addr)
1925 }
1926
1927 #[inline]
1928 pub fn get_ptr_from_reg_with_offset(&self, reg: u8, offset: u32) -> *mut u8 {
1929 let ptr_addr = get_reg!(self, reg) + offset;
1930 self.memory.get_heap_ptr(ptr_addr as usize)
1931 }
1932}