1use crate::memory::Memory;
6use crate::set_reg;
7use crate::{get_reg, i16_from_u8s, u16_from_u8s, u32_from_u8s, TrapCode, Vm};
8use std::ptr;
9use swamp_vm_types::{
10 VecHeader, VecIterator, VEC_HEADER_MAGIC_CODE, VEC_HEADER_PAYLOAD_OFFSET, VEC_HEADER_SIZE,
11};
12
13impl Vm {
14 pub fn get_vec_iterator_header_ptr_from_reg(&self, vec_iterator_reg: u8) -> *mut VecIterator {
15 self.get_ptr_from_reg(vec_iterator_reg) as *mut VecIterator
16 }
17
18 #[inline]
19 pub fn execute_array_init(
20 &mut self,
21 target_vec_ptr_reg: u8,
22 capacity_lower: u8,
23 capacity_upper: u8,
24 element_size_0: u8,
25 element_size_1: u8,
26 element_size_2: u8,
27 element_size_3: u8,
28 ) {
29 let vec_addr = get_reg!(self, target_vec_ptr_reg);
30 let mut_vec_ptr = self
31 .memory
32 .get_heap_ptr(vec_addr as usize)
33 .cast::<VecHeader>();
34 let capacity = u16_from_u8s!(capacity_lower, capacity_upper);
35 let element_size = u32_from_u8s!(
36 element_size_0,
37 element_size_1,
38 element_size_2,
39 element_size_3
40 );
41 debug_assert_ne!(capacity, 0, "illegal capacity");
42 unsafe {
43 (*mut_vec_ptr).element_count = capacity;
44 (*mut_vec_ptr).capacity = capacity;
45 (*mut_vec_ptr).element_size = element_size;
46 (*mut_vec_ptr).padding = VEC_HEADER_MAGIC_CODE;
47 }
48
49 if self.debug_operations_enabled {
50 eprintln!("array init element_size:{element_size} into vec_addr: {vec_addr:X}");
51 }
52 }
53
54 #[inline]
55 pub fn execute_vec_cmp(
56 &mut self,
57 bool_target_reg: u8,
58 left_vec_ptr_reg: u8,
59 right_vec_ptr_reg: u8,
60 ) {
61 let left_vec_addr = get_reg!(self, left_vec_ptr_reg);
62 let right_vec_addr = get_reg!(self, right_vec_ptr_reg);
63
64 let left_vec_ptr = self
65 .memory
66 .get_heap_const_ptr(left_vec_addr as usize)
67 .cast::<VecHeader>();
68
69 let right_vec_ptr = self
70 .memory
71 .get_heap_const_ptr(right_vec_addr as usize)
72 .cast::<VecHeader>();
73
74 unsafe {
75 if (*left_vec_ptr).padding != VEC_HEADER_MAGIC_CODE {
76 return self.internal_trap(TrapCode::MemoryCorruption);
77 }
78 if (*left_vec_ptr).capacity == 0 {
79 eprintln!("TARGET IS NOT INITIALIZED");
80 return self.internal_trap(TrapCode::VecNeverInitialized);
81 }
82 if (*right_vec_ptr).padding != VEC_HEADER_MAGIC_CODE {
83 return self.internal_trap(TrapCode::MemoryCorruption);
84 }
85 if (*right_vec_ptr).capacity == 0 {
86 eprintln!("SOURCE IS NOT INITIALIZED");
87 return self.internal_trap(TrapCode::VecNeverInitialized);
88 }
89
90 if (*left_vec_ptr).element_count != (*right_vec_ptr).element_count {
91 set_reg!(self, bool_target_reg, 0);
92 }
93
94 let left_ptr = self
95 .memory
96 .get_heap_const_ptr(left_vec_addr as usize)
97 .add(VEC_HEADER_PAYLOAD_OFFSET.0 as usize);
98
99 let right_ptr = self
100 .memory
101 .get_heap_const_ptr(right_vec_addr as usize)
102 .add(VEC_HEADER_PAYLOAD_OFFSET.0 as usize);
103
104 let byte_size =
105 ((*left_vec_ptr).element_count as usize) * (*left_vec_ptr).element_size as usize;
106 let slice_a = std::slice::from_raw_parts(left_ptr, byte_size);
107 let slice_b = std::slice::from_raw_parts(right_ptr, byte_size);
108
109 set_reg!(self, bool_target_reg, slice_a == slice_b);
110 }
111 }
112
113 #[inline]
114 pub fn execute_vec_copy(&mut self, target_vec_ptr_reg: u8, source_vec_ptr_reg: u8) {
115 let target_vec_addr = get_reg!(self, target_vec_ptr_reg);
116 let source_vec_addr = get_reg!(self, source_vec_ptr_reg);
117
118 let mut_vec_ptr = self
119 .memory
120 .get_heap_ptr(target_vec_addr as usize)
121 .cast::<VecHeader>();
122
123 let src_vec_ptr = self
124 .memory
125 .get_heap_const_ptr(source_vec_addr as usize)
126 .cast::<VecHeader>();
127
128 unsafe {
129 if (*mut_vec_ptr).padding != VEC_HEADER_MAGIC_CODE {
130 return self.internal_trap(TrapCode::MemoryCorruption);
131 }
132 if (*mut_vec_ptr).capacity == 0 {
133 eprintln!("TARGET IS NOT INITIALIZED");
134 return self.internal_trap(TrapCode::VecNeverInitialized);
135 }
136 if (*src_vec_ptr).padding != VEC_HEADER_MAGIC_CODE {
137 return self.internal_trap(TrapCode::MemoryCorruption);
138 }
139 if (*src_vec_ptr).capacity == 0 {
140 eprintln!("SOURCE IS NOT INITIALIZED");
141 return self.internal_trap(TrapCode::VecNeverInitialized);
142 }
143
144 if (*mut_vec_ptr).capacity < (*src_vec_ptr).element_count {
145 return self.internal_trap(TrapCode::VecOutOfCapacity {
146 encountered: (*src_vec_ptr).element_count,
147 capacity: (*mut_vec_ptr).capacity,
148 });
149 }
150
151 let target_capacity = (*mut_vec_ptr).capacity;
152
153 let target_tail = (target_vec_addr + 2) as usize; let target_raw = self.memory.get_heap_ptr(target_tail);
155 let source_tail = (source_vec_addr + 2) as usize; let source_raw = self.memory.get_heap_const_ptr(source_tail);
157
158 let total_bytes_to_copy = (VEC_HEADER_SIZE.0 - 2)
159 + ((*src_vec_ptr).element_count as u32) * (*src_vec_ptr).element_size;
160
161 ptr::copy_nonoverlapping(source_raw, target_raw, total_bytes_to_copy as usize);
162
163 debug_assert_eq!(
164 (*mut_vec_ptr).element_count,
165 (*src_vec_ptr).element_count,
166 "element count differs"
167 );
168 debug_assert_eq!(
169 (*mut_vec_ptr).capacity,
170 target_capacity,
171 "capacity has been modified"
172 );
173 }
174 }
175
176
177 #[inline]
178 pub fn execute_vec_copy_range(&mut self, target_vec_ptr_reg: u8, source_vec_ptr_reg: u8, range_reg: u8) {
179 let target_vec_addr = get_reg!(self, target_vec_ptr_reg);
180 let source_vec_addr = get_reg!(self, source_vec_ptr_reg);
181 let range_header = self.range_header_from_reg(range_reg);
182 eprintln!("=========== {range_header:?} =======");
183
184 let mut_vec_ptr = self
185 .memory
186 .get_heap_ptr(target_vec_addr as usize)
187 .cast::<VecHeader>();
188
189 let src_vec_ptr = self
190 .memory
191 .get_heap_const_ptr(source_vec_addr as usize)
192 .cast::<VecHeader>();
193
194 unsafe {
195 if (*mut_vec_ptr).padding != VEC_HEADER_MAGIC_CODE {
196 return self.internal_trap(TrapCode::MemoryCorruption);
197 }
198 if (*mut_vec_ptr).capacity == 0 {
199 eprintln!("TARGET IS NOT INITIALIZED");
200 return self.internal_trap(TrapCode::VecNeverInitialized);
201 }
202 if (*src_vec_ptr).padding != VEC_HEADER_MAGIC_CODE {
203 return self.internal_trap(TrapCode::MemoryCorruption);
204 }
205 if (*src_vec_ptr).capacity == 0 {
206 eprintln!("SOURCE IS NOT INITIALIZED");
207 return self.internal_trap(TrapCode::VecNeverInitialized);
208 }
209
210 if range_header.max < range_header.min {
211 return self.internal_trap(TrapCode::ReverseRangeNotAllowedHere);
212 }
213
214
215 debug_assert!(range_header.max >= range_header.min);
216
217 let num_elements_to_copy = if range_header.inclusive { (range_header.max - range_header.min + 1) as u32 } else { (range_header.max - range_header.min) as u32 };
218 let source_element_index = range_header.min as u32;
219 let required_source_element_count = source_element_index + num_elements_to_copy;
220
221 if (*mut_vec_ptr).capacity < num_elements_to_copy as u16 {
222 return self.internal_trap(TrapCode::VecOutOfCapacity {
223 encountered: (*src_vec_ptr).element_count,
224 capacity: (*mut_vec_ptr).capacity,
225 });
226 }
227
228 if (*src_vec_ptr).element_count < required_source_element_count as u16 {
229 return self.internal_trap(TrapCode::VecBoundsFail {
230 encountered: required_source_element_count as usize,
231 element_count: (*src_vec_ptr).element_count as usize,
232 });
233 }
234
235 let target_capacity = (*mut_vec_ptr).capacity;
236
237 let target_payload = (target_vec_addr + VEC_HEADER_PAYLOAD_OFFSET.0) as usize;
238 let target_raw = self.memory.get_heap_ptr(target_payload);
239
240 let source_slice_start = (source_vec_addr + VEC_HEADER_PAYLOAD_OFFSET.0 + source_element_index * (*src_vec_ptr).element_size) as usize;
241 let source_raw = self.memory.get_heap_const_ptr(source_slice_start);
242
243 let total_bytes_to_copy = num_elements_to_copy * (*src_vec_ptr).element_size;
244
245 ptr::copy_nonoverlapping(source_raw, target_raw, total_bytes_to_copy as usize);
246 (*mut_vec_ptr).element_count = num_elements_to_copy as u16;
247
248 debug_assert_eq!(
249 (*mut_vec_ptr).capacity,
250 target_capacity,
251 "capacity has been modified"
252 );
253 }
254 }
255
256 #[inline]
257 pub fn execute_vec_init(
258 &mut self,
259 target_vec_ptr_reg: u8,
260 capacity_lower: u8,
261 capacity_upper: u8,
262 element_size_0: u8,
263 element_size_1: u8,
264 element_size_2: u8,
265 element_size_3: u8,
266 ) {
267 let vec_addr = get_reg!(self, target_vec_ptr_reg);
268 let mut_vec_ptr = self
269 .memory
270 .get_heap_ptr(vec_addr as usize)
271 .cast::<VecHeader>();
272 let capacity = u16_from_u8s!(capacity_lower, capacity_upper);
273 let element_size = u32_from_u8s!(
274 element_size_0,
275 element_size_1,
276 element_size_2,
277 element_size_3
278 );
279 debug_assert_ne!(capacity, 0, "illegal capacity");
280 unsafe {
281 (*mut_vec_ptr).capacity = capacity;
283 (*mut_vec_ptr).element_size = element_size;
284 (*mut_vec_ptr).padding = VEC_HEADER_MAGIC_CODE;
285 }
286 }
287
288 #[inline]
289 pub fn execute_vec_iter_init(
290 &mut self,
291 target_vec_iterator_header_reg: u8,
292 vec_header_reg: u8,
293 ) {
294 let vec_header_addr = get_reg!(self, vec_header_reg);
295
296 let vec_header_ptr = self
298 .memory
299 .get_heap_const_ptr(vec_header_addr as usize)
300 .cast::<VecHeader>();
301 let vec_header = unsafe { &*vec_header_ptr };
302
303 if vec_header.padding != VEC_HEADER_MAGIC_CODE {
304 return self.internal_trap(TrapCode::MemoryCorruption);
305 }
306 if vec_header.capacity == 0 {
307 return self.internal_trap(TrapCode::VecNeverInitialized);
308 }
309
310 #[cfg(feature = "debug_vm")]
311 if self.debug_operations_enabled {
312 let iter_addr = get_reg!(self, target_vec_iterator_header_reg);
313 eprintln!(
314 "vec_iter_init: iter_addr: {iter_addr:04X} vec_header_addr:{vec_header_addr:04X} element_size: {}",
315 vec_header.element_size
316 );
317 }
318
319 debug_assert!(vec_header.element_size > 0, "Element size cannot be zero");
321
322 let vec_iterator = VecIterator {
323 vec_header_heap_ptr: vec_header_addr,
324 index: 0,
325 };
326
327 let vec_iterator_mut_ptr =
328 self.get_ptr_from_reg(target_vec_iterator_header_reg) as *mut VecIterator;
329
330 unsafe {
331 ptr::write(vec_iterator_mut_ptr, vec_iterator);
332 }
333 }
334
335 #[inline]
336 pub fn execute_vec_iter_next(
337 &mut self,
338 vec_iterator_header_reg: u8,
339 target_variable: u8,
340 branch_offset_lower: u8,
341 branch_offset_upper: u8,
342 ) {
343 let vec_iterator = self.get_vec_iterator_header_ptr_from_reg(vec_iterator_header_reg);
344
345 unsafe {
346 let vec_header_addr = (*vec_iterator).vec_header_heap_ptr;
347 let vec_header_ptr = self
348 .memory
349 .get_heap_const_ptr(vec_header_addr as usize)
350 .cast::<VecHeader>();
351 let vec_header = &*vec_header_ptr;
352 if vec_header.padding != VEC_HEADER_MAGIC_CODE {
353 return self.internal_trap(TrapCode::MemoryCorruption);
354 }
355
356 #[cfg(feature = "debug_vm")]
357 if self.debug_operations_enabled {
358 let iter_addr = get_reg!(self, vec_iterator_header_reg);
359 let index = (*vec_iterator).index;
360 eprintln!(
361 "vec_iter_next: iter_addr: {iter_addr:04X} addr:{vec_header_addr:04X} index:{index} len: {}, capacity: {}, element_size: {}",
362 vec_header.element_count, vec_header.capacity, vec_header.element_size
363 );
364 }
365
366 if (*vec_iterator).index >= vec_header.element_count {
368 let branch_offset = i16_from_u8s!(branch_offset_lower, branch_offset_upper);
370
371 #[cfg(feature = "debug_vm")]
372 {
373 if self.debug_operations_enabled {
374 eprintln!("vec_iter_next complete. jumping with offset {branch_offset}");
375 }
376 }
377
378 self.pc = (self.pc as i32 + branch_offset as i32) as usize;
379
380 return;
381 }
382
383 let element_addr = (*vec_iterator).vec_header_heap_ptr
385 + VEC_HEADER_PAYLOAD_OFFSET.0
386 + (*vec_iterator).index as u32 * vec_header.element_size;
387
388 #[cfg(feature = "debug_vm")]
389 if self.debug_operations_enabled {
390 eprintln!(
391 "vec_iter_next: element_addr {element_addr:04X} to reg {target_variable}"
392 );
393 }
394
395 set_reg!(self, target_variable, element_addr);
396
397 (*vec_iterator).index += 1;
398 }
399 }
400
401 #[inline]
402 pub fn execute_vec_iter_next_pair(
403 &mut self,
404 vec_iterator_header_reg: u8,
405 target_key_reg: u8,
406 target_value_reg: u8,
407 branch_offset_lower: u8,
408 branch_offset_upper: u8,
409 ) {
410 let vec_iterator = self.get_vec_iterator_header_ptr_from_reg(vec_iterator_header_reg);
411
412 unsafe {
413 let vec_header_addr = (*vec_iterator).vec_header_heap_ptr;
414 let vec_header_ptr =
415 self.memory.get_heap_const_ptr(vec_header_addr as usize) as *const VecHeader;
416 let vec_header = &*vec_header_ptr;
417 if vec_header.padding != VEC_HEADER_MAGIC_CODE {
418 return self.internal_trap(TrapCode::MemoryCorruption);
419 }
420 #[cfg(feature = "debug_vm")]
421 if self.debug_operations_enabled {
422 let iter_addr = get_reg!(self, vec_iterator_header_reg);
423 let index = (*vec_iterator).index;
424 eprintln!(
425 "vec_iter_next: iter_addr: {iter_addr:04X} addr:{vec_header_addr:04X} index:{index} len: {}, capacity: {}",
426 vec_header.element_count, vec_header.capacity
427 );
428 }
429
430 if (*vec_iterator).index >= vec_header.element_count {
432 let branch_offset = i16_from_u8s!(branch_offset_lower, branch_offset_upper);
434
435 #[cfg(feature = "debug_vm")]
436 {
437 if self.debug_operations_enabled {
438 eprintln!(
439 "vec_iter_next_pair complete. jumping with offset {branch_offset}"
440 );
441 }
442 }
443
444 self.pc = (self.pc as i32 + branch_offset as i32) as usize;
445
446 return;
447 }
448
449 let element_addr = (*vec_iterator).vec_header_heap_ptr
451 + VEC_HEADER_PAYLOAD_OFFSET.0
452 + (*vec_iterator).index as u32 * vec_header.element_size;
453
454 #[cfg(feature = "debug_vm")]
455 if self.debug_operations_enabled {
456 eprintln!(
457 "vec_iter_next: element_addr {element_addr:04X} to reg {target_value_reg}"
458 );
459 }
460
461 set_reg!(self, target_key_reg, (*vec_iterator).index);
462 set_reg!(self, target_value_reg, element_addr);
463
464 (*vec_iterator).index += 1;
465 }
466 }
467
468 pub fn vec_header_from_heap(heap: &Memory, heap_offset: u32) -> VecHeader {
469 unsafe { *(heap.get_heap_const_ptr(heap_offset as usize) as *const VecHeader) }
470 }
471
472 pub fn read_vec_header_from_ptr_reg(&self, vec_header_ptr_reg: u8) -> VecHeader {
473 let vec_header_const_ptr =
474 self.get_const_ptr_from_reg(vec_header_ptr_reg) as *const VecHeader;
475 unsafe { *vec_header_const_ptr }
476 }
477
478 pub fn get_vec_header_ptr_from_reg(&self, vec_header_ptr_reg: u8) -> *mut VecHeader {
479 self.get_ptr_from_reg(vec_header_ptr_reg) as *mut VecHeader
480 }
481
482 #[inline]
483 pub fn execute_vec_get(&mut self, element_target_reg: u8, vec_header_ptr_reg: u8, int_reg: u8) {
484 let vec_addr = get_reg!(self, vec_header_ptr_reg);
485
486 let vec_header = Self::vec_header_from_heap(&self.memory, vec_addr);
487 let index = get_reg!(self, int_reg);
488 if vec_header.padding != VEC_HEADER_MAGIC_CODE {
489 return self.internal_trap(TrapCode::MemoryCorruption);
490 }
491
492 #[cfg(feature = "debug_vm")]
493 if self.debug_operations_enabled {
494 eprintln!(
495 "vec_get: vec_header_addr: {vec_addr:04X} index: {index} count: {}, capacity: {} ",
496 vec_header.element_count, vec_header.capacity
497 );
498 }
499
500 #[cfg(feature = "debug_vm")]
501 {
502 if self.debug_operations_enabled {
503 eprintln!(
504 "vec_get {} {} (capacity: {}) ",
505 index, vec_header.element_count, vec_header.capacity
506 );
507 }
508 }
509
510 if index >= vec_header.element_count as u32 {
511 return self.internal_trap(TrapCode::VecBoundsFail {
512 encountered: index as usize,
513 element_count: vec_header.element_count as usize,
514 });
515 }
516
517 let address_of_element =
518 vec_addr + VEC_HEADER_PAYLOAD_OFFSET.0 + index * vec_header.element_size;
519
520 set_reg!(self, element_target_reg, address_of_element);
521 }
522
523 #[inline]
524 pub fn execute_vec_set(&mut self, vec_header_ptr_reg: u8, int_index_reg: u8, item_ptr_reg: u8) {
525 #[cfg(feature = "debug_vm")]
526 {
527 eprintln!("vec_set ");
528 }
529 }
530
531 #[inline]
532 pub fn execute_vec_push_addr(
533 &mut self,
534 destination_entry_addr_reg: u8,
535 src_vec_header_ptr_reg: u8,
536 ) {
537 let vec_addr = get_reg!(self, src_vec_header_ptr_reg);
538
539 let mut_vec_ptr = self.memory.get_heap_ptr(vec_addr as usize) as *mut VecHeader;
540 #[cfg(feature = "debug_vm")]
541 if self.debug_operations_enabled {
542 unsafe {
543 eprintln!(
544 "vec_push_addr: vec_addr: {vec_addr:08X}, payload_offset: {:?}",
545 (*mut_vec_ptr)
546 );
547 }
548 }
549
550 let len = unsafe { (*mut_vec_ptr).element_count };
551
552 unsafe {
553 if (*mut_vec_ptr).padding != VEC_HEADER_MAGIC_CODE {
554 return self.internal_trap(TrapCode::MemoryCorruption);
555 }
556
557 if (*mut_vec_ptr).capacity == 0 {
558 return self.internal_trap(TrapCode::VecNeverInitialized);
559 }
560 if len >= (*mut_vec_ptr).capacity {
561 return self.internal_trap(TrapCode::VecOutOfCapacity {
562 encountered: len,
563 capacity: (*mut_vec_ptr).capacity,
564 });
565 }
566 (*mut_vec_ptr).element_count += 1;
567 }
568
569 let address_of_new_element = unsafe {
570 vec_addr + VEC_HEADER_PAYLOAD_OFFSET.0 + len as u32 * (*mut_vec_ptr).element_size
571 };
572
573 set_reg!(self, destination_entry_addr_reg, address_of_new_element);
574 }
575
576 #[inline]
577 pub fn execute_vec_pop(&mut self, dst_reg: u8, vec_header_ptr_reg: u8) {
578 let vec_addr = get_reg!(self, vec_header_ptr_reg);
579 let mut_vec_ptr = self
580 .memory
581 .get_heap_ptr(vec_addr as usize)
582 .cast::<VecHeader>();
583
584 unsafe {
585 let header = &mut *mut_vec_ptr;
586
587 if header.element_count == 0 {
589 return self.internal_trap(TrapCode::VecEmpty);
590 }
591 let last_index = u32::from(header.element_count) - 1;
593
594 let address_of_element_to_pop =
596 vec_addr + VEC_HEADER_PAYLOAD_OFFSET.0 + last_index * header.element_size;
597
598 header.element_count -= 1;
599
600 set_reg!(self, dst_reg, address_of_element_to_pop);
601 }
602 }
603
604 #[inline]
605 pub fn execute_vec_remove_index(&mut self, vec_header_ptr_reg: u8, remove_index_reg: u8) {
606 let vec_addr = get_reg!(self, vec_header_ptr_reg);
607 let mut_vec_ptr = self
608 .memory
609 .get_heap_ptr(vec_addr as usize)
610 .cast::<VecHeader>();
611
612 let index = get_reg!(self, remove_index_reg);
613
614 unsafe {
615 if index >= u32::from((*mut_vec_ptr).element_count) {
616 return self.internal_trap(TrapCode::VecBoundsFail {
617 encountered: index as usize,
618 element_count: (*mut_vec_ptr).element_count as usize,
619 });
620 }
621 }
622
623 let size_of_each_element = unsafe { (*mut_vec_ptr).element_size };
624 let address_of_element_to_be_removed =
625 vec_addr + VEC_HEADER_PAYLOAD_OFFSET.0 + index * size_of_each_element;
626
627 unsafe {
628 let header = &mut *mut_vec_ptr;
629 let count = u32::from(header.element_count);
630
631 if index < count - 1 {
632 let src_addr = address_of_element_to_be_removed + size_of_each_element;
633 let dst_addr = address_of_element_to_be_removed;
634 let elems_after = (count - index - 1) as usize;
635 let bytes_to_move = elems_after * size_of_each_element as usize;
636
637 let src_ptr = self.memory.get_heap_ptr(src_addr as usize).cast_const();
638 let dst_ptr = self.memory.get_heap_ptr(dst_addr as usize);
639
640 ptr::copy(src_ptr, dst_ptr, bytes_to_move);
642 }
643
644 header.element_count -= 1;
645 }
646 }
647}