1use crate::memory::Memory;
6use crate::{get_reg, i16_from_u8s, TrapCode, Vm};
7use crate::{set_reg, u16_from_u8s};
8use hashmap_mem::MapHeader;
9use std::ptr;
10use swamp_vm_isa::MapIterator;
11
12impl Vm {
13 pub fn get_map_header(&self, header_reg: u8) -> *mut MapHeader {
14 self.get_ptr_from_reg(header_reg) as *mut MapHeader
15 }
16
17 pub fn get_map_header_mut(&self, addr: u32) -> *mut MapHeader {
18 self.memory.get_heap_ptr(addr as usize) as *mut MapHeader
19 }
20
21 pub fn get_map_header_const(&self, addr: u32) -> *const MapHeader {
22 self.memory.get_heap_ptr(addr as usize) as *const MapHeader
23 }
24
25 pub fn read_map_header(&self, header_reg: u8) -> (MapHeader, u32) {
26 let (map_ptr, map_addr) = self.get_ptr_and_addr_from_reg(header_reg);
27 unsafe { (*(map_ptr as *const MapHeader), map_addr) }
28 }
29
30 #[must_use]
31 pub fn read_map_header_from_heap(map_header_heap_addr: u32, heap: &Memory) -> MapHeader {
32 let ptr = heap
33 .get_heap_const_ptr(map_header_heap_addr as usize)
34 .cast::<MapHeader>();
35 unsafe { *ptr }
36 }
37
38 pub const ELEMENT_COUNT_FACTOR: f32 = 1.5;
39
40 #[allow(clippy::too_many_lines)]
41 fn get_or_reserve_entry(memory: &Memory, header: *mut MapHeader, key_ptr_addr: usize) -> u32 {
42 unsafe {
43 let key_ptr = memory.get_heap_ptr(key_ptr_addr);
44 let entry_ptr = hashmap_mem::get_or_reserve_entry(header as *mut u8, key_ptr);
45 if entry_ptr.is_null() {
46 return 0;
47 }
48
49 memory.get_heap_offset(entry_ptr)
50 }
51 }
52 pub fn execute_map_open_addressing_get_entry_location(
53 &mut self,
54 dst_entry_address: u8,
55 self_map_header_reg: u8,
56 key_source: u8,
57 ) {
58 let map_header_addr = get_reg!(self, self_map_header_reg);
59 let map_header_ptr = self.get_map_header_mut(map_header_addr);
60 unsafe {
61 if (*map_header_ptr).padding_and_secret_code != hashmap_mem::SECRET_CODE {
62 return self.internal_trap(TrapCode::MemoryCorruption);
63 }
64 }
65 let key_source_address = get_reg!(self, key_source) as usize;
66 #[cfg(feature = "debug_vm")]
67 if self.debug_operations_enabled {
68 unsafe {
69 eprintln!(
70 "-- map_get_or_reserve_entry start {map_header_addr:X}, key_source: {key_source_address:X} {:?}",
71 *map_header_ptr
72 );
73 }
74 }
75
76 let address_to_entry: u32;
77
78 unsafe {
79 address_to_entry =
80 Self::lookup_open_addressing(&self.memory, map_header_ptr, key_source_address);
81 }
82
83 if address_to_entry == 0 {
84 return self.internal_trap(TrapCode::MapEntryNotFound);
85 }
86
87 set_reg!(self, dst_entry_address, address_to_entry);
88 }
89
90 #[inline]
94
95 pub fn execute_map_open_addressing_init(
96 &mut self,
97 self_map_header_reg: u8,
98 logical_limit_immediate_lower: u8,
99 logical_limit_immediate_upper: u8,
100 key_size_reg: u8,
101 key_alignment: u8,
102 value_size_reg: u8,
103 value_alignment: u8,
104 ) {
105 let map_header_addr = get_reg!(self, self_map_header_reg);
106 let map_header = self.memory.get_heap_ptr(map_header_addr as usize);
107 let logical_limit =
108 u16_from_u8s!(logical_limit_immediate_lower, logical_limit_immediate_upper);
109 let key_size = get_reg!(self, key_size_reg);
110 let value_size = get_reg!(self, value_size_reg);
111 let capacity = logical_limit.next_power_of_two();
112 debug_assert_ne!(capacity, 0);
113 assert!(capacity.is_power_of_two());
114
115 let (bucket_layout, map_init) = hashmap_mem::layout(
116 key_size,
117 key_alignment,
118 value_size,
119 value_alignment,
120 logical_limit,
121 );
122
123 #[cfg(feature = "debug_vm")]
124 if self.debug_operations_enabled {
125 let map_header_addr = get_reg!(self, self_map_header_reg);
126 eprintln!(
127 "map_init {map_header_addr:08X}: logical_limit:{logical_limit} capacity:{capacity}, key_size:{key_size}, key_alignment:{key_alignment}, value_size:{value_size}, value_alignment:{value_alignment}, bucket_size:{}, value_offset:{}",
128 bucket_layout.bucket_size, bucket_layout.value_offset,
129 );
130 }
131
132 unsafe {
133 hashmap_mem::init(map_header, &map_init);
134 }
135 }
136
137 pub fn execute_map_open_addressing_get_or_reserve_entry(
138 &mut self,
139 dst_entry_address: u8,
140 self_map_header_reg: u8,
141 key_source_ptr_reg: u8,
142 ) {
143 let map_header_addr = get_reg!(self, self_map_header_reg);
144 let map_header = self.get_map_header_mut(map_header_addr);
145 unsafe {
146 if (*map_header).padding_and_secret_code != hashmap_mem::SECRET_CODE {
147 return self.internal_trap(TrapCode::MemoryCorruption);
148 }
149 }
150
151 let key_source_address = get_reg!(self, key_source_ptr_reg) as usize;
152
153 #[cfg(feature = "debug_vm")]
154 if self.debug_operations_enabled {
155 eprintln!(
156 "-- map_get_or_reserve_entry start {map_header_addr:X}, key_source: {key_source_address:X} {map_header:?}",
157 );
158 }
159
160 let entry_address;
161
162 #[cfg(feature = "debug_vm")]
164 if self.debug_operations_enabled {
165 eprintln!("map_get_or_reserve_entry: it didn't exist, so try to find a new entry");
166 }
167 unsafe {
168 entry_address =
169 Self::get_or_reserve_entry(&self.memory, map_header, key_source_address);
170 if entry_address == 0 {
171 return self.internal_trap(TrapCode::MapEntryNotFoundAndCouldNotBeCreated);
172 }
173 }
174 set_reg!(self, dst_entry_address, entry_address);
177 }
178
179 pub fn execute_map_open_addressing_has(
180 &mut self,
181 dest_reg: u8,
182 self_const_map_header_reg: u8,
183 key_source_reg: u8,
184 ) {
185 let map_header_addr = get_reg!(self, self_const_map_header_reg) as usize;
186 let map_header_ptr = self.get_map_header_mut(map_header_addr as u32);
187 unsafe {
188 if (*map_header_ptr).padding_and_secret_code != hashmap_mem::SECRET_CODE {
189 return self.internal_trap(TrapCode::MemoryCorruption);
190 }
191 }
192 let key_source_address = get_reg!(self, key_source_reg) as usize;
193
194 unsafe {
195 let found = Self::has_open_addressing(&self.memory, map_header_ptr, key_source_address);
196
197 #[cfg(feature = "debug_vm")]
198 if self.debug_operations_enabled {
199 unsafe {
200 eprintln!(
201 "map.has returned: {found} for key pointer:{:X} map:{:?}",
202 key_source_address, *map_header_ptr
203 );
204 }
205 }
206
207 set_reg!(self, dest_reg, found);
208 }
209 }
210
211 pub fn execute_map_overwrite(&mut self, target_map_header_reg: u8, source_map_header_reg: u8) {
212 let target_map_header_addr = get_reg!(self, target_map_header_reg);
213 let target_map_header = self.get_map_header_mut(target_map_header_addr);
214
215 let source_map_header_addr = get_reg!(self, source_map_header_reg);
216 let source_map_header = self.get_map_header_const(source_map_header_addr);
217
218 unsafe {
219 if (*target_map_header).padding_and_secret_code != hashmap_mem::SECRET_CODE {
220 return self.internal_trap(TrapCode::MemoryCorruption);
221 }
222 if (*source_map_header).padding_and_secret_code != hashmap_mem::SECRET_CODE {
223 return self.internal_trap(TrapCode::MemoryCorruption);
224 }
225 }
226
227 let could_overwrite = unsafe {
228 hashmap_mem::overwrite(target_map_header as *mut u8, source_map_header as *const u8)
229 };
230 if !could_overwrite {
231 self.internal_trap(TrapCode::MapCouldNotBeCopied);
232 }
233 }
234
235 pub fn execute_map_open_addressing_remove(
236 &mut self,
237 self_map_header_reg: u8,
238 key_source_reg: u8,
239 ) {
240 let map_header_addr = get_reg!(self, self_map_header_reg);
241 let map_header = self.get_map_header_mut(map_header_addr);
242 unsafe {
243 if (*map_header).padding_and_secret_code != hashmap_mem::SECRET_CODE {
244 return self.internal_trap(TrapCode::MemoryCorruption);
245 }
246 }
247
248 let key_source_address = get_reg!(self, key_source_reg) as usize;
249
250 unsafe {
251 let found = Self::remove_open_addressing(&self.memory, map_header, key_source_address);
252 if !found {
253 self.internal_trap(TrapCode::MapEntryNotFoundForRemoval);
254 }
255 }
256 }
257
258 unsafe fn has_open_addressing(
262 memory: &Memory,
263 map_header: *mut MapHeader,
264 key_ptr_addr: usize,
265 ) -> bool {
266 unsafe {
267 let key_ptr = memory.get_heap_const_ptr(key_ptr_addr);
268 hashmap_mem::has(map_header as *mut u8, key_ptr)
269 }
270 }
271
272 unsafe fn lookup_open_addressing(
278 memory: &Memory,
279 map_header: *mut MapHeader,
280 key_ptr_addr: usize,
281 ) -> u32 {
282 unsafe {
283 let key_ptr = memory.get_heap_const_ptr(key_ptr_addr);
284
285 let value_ptr = hashmap_mem::lookup(map_header as *mut u8, key_ptr);
286
287 if value_ptr.is_null() {
288 0
289 } else {
290 memory.get_heap_offset(value_ptr)
291 }
292 }
293 }
294
295 unsafe fn remove_open_addressing(
300 memory: &Memory,
301 map_header_ptr: *mut MapHeader,
302 key_ptr_addr: usize,
303 ) -> bool {
304 unsafe {
305 let key_ptr = memory.get_heap_const_ptr(key_ptr_addr);
306
307 hashmap_mem::remove(map_header_ptr as *mut u8, key_ptr)
308 }
309 }
310
311 pub(crate) fn execute_map_iter_init(
312 &mut self,
313 target_map_iterator_header_reg: u8,
314 map_header_reg: u8,
315 ) {
316 let map_header_addr = get_reg!(self, map_header_reg);
317
318 let map_header = Self::read_map_header_from_heap(map_header_addr, &self.memory);
319 if map_header.padding_and_secret_code != hashmap_mem::SECRET_CODE {
320 return self.internal_trap(TrapCode::MemoryCorruption);
321 }
322
323 #[cfg(feature = "debug_vm")]
324 if self.debug_operations_enabled {
325 let iter_addr = get_reg!(self, target_map_iterator_header_reg);
326
327 eprintln!(
328 "pc: {:04X}, map_iter_init: iter_addr: {iter_addr:X} map_header_addr:{map_header_addr:X} key_size:{}, value_offset:{}, value_size:{} bucket_size:{} capacity:{}",
329 self.pc,
330 map_header.key_size,
331 map_header.value_offset,
332 map_header.value_size,
333 (map_header.bucket_size),
334 map_header.capacity,
335 );
336 }
337
338 let map_iterator = MapIterator {
339 map_header_frame_offset: map_header_addr,
340 index: 0,
341 };
342
343 let map_iterator_mut_ptr =
344 self.get_ptr_from_reg(target_map_iterator_header_reg) as *mut MapIterator;
345
346 unsafe {
347 ptr::write(map_iterator_mut_ptr, map_iterator);
348 }
349 }
350
351 pub fn get_map_iterator_header_ptr_from_reg(&self, map_iterator_reg: u8) -> *mut MapIterator {
352 self.get_ptr_from_reg(map_iterator_reg) as *mut MapIterator
353 }
354
355 pub fn execute_map_iter_next_pair(
356 &mut self,
357 map_iterator_header_reg: u8,
358 target_key_reg: u8,
359 target_value_reg: u8,
360 branch_offset_lower: u8,
361 branch_offset_upper: u8,
362 ) {
363 let map_iterator = self.get_map_iterator_header_ptr_from_reg(map_iterator_header_reg);
364
365 unsafe {
366 let map_header_addr = (*map_iterator).map_header_frame_offset;
367 let map_header_ptr =
368 self.memory.get_heap_const_ptr(map_header_addr as usize) as *const MapHeader;
369
370 let map_header = &*map_header_ptr;
371 if map_header.padding_and_secret_code != hashmap_mem::SECRET_CODE {
372 return self.internal_trap(TrapCode::MemoryCorruption);
373 }
374 debug_assert_eq!(map_header.padding_and_secret_code, hashmap_mem::SECRET_CODE);
375
376 #[cfg(feature = "debug_vm")]
377 if self.debug_operations_enabled {
378 let iter_addr = get_reg!(self, map_iterator_header_reg);
379 let index = (*map_iterator).index;
380 eprintln!(
381 "pc: {:04X}, map_iter_next_pair: iter_addr: {iter_addr:X} addr:{map_header_addr:X} index:{index} len: {}, capacity: {}",
382 self.pc, map_header.element_count, map_header.capacity
383 );
384 }
385
386 let index = (*map_iterator).index;
387
388 let (key_ptr, value_ptr, found_index) =
389 hashmap_mem::find_next_valid_entry(map_header_ptr as *mut u8, index as u16);
390 if !key_ptr.is_null() {
391 let key_offset = self.memory.get_heap_offset(key_ptr);
392 let value_offset = self.memory.get_heap_offset(value_ptr);
393 set_reg!(self, target_key_reg, key_offset);
396 set_reg!(self, target_value_reg, value_offset);
397 (*map_iterator).index = (found_index + 1) as u32;
398 } else {
399 let branch_offset = i16_from_u8s!(branch_offset_lower, branch_offset_upper);
401
402 #[cfg(feature = "debug_vm")]
403 {
404 if self.debug_operations_enabled {
405 eprintln!(
406 "pc: {:04X}, map_iter_next_pair complete. jumping with offset {branch_offset}", self.pc
407 );
408 }
409 }
410
411 self.pc = (self.pc as i32 + branch_offset as i32) as usize;
412 }
413 }
414 }
415
416 pub fn execute_map_iter_next(
417 &mut self,
418 map_iterator_header_reg: u8,
419 target_value_reg: u8,
420 branch_offset_lower: u8,
421 branch_offset_upper: u8,
422 ) {
423 let map_iterator = self.get_map_iterator_header_ptr_from_reg(map_iterator_header_reg);
424
425 unsafe {
426 let map_header_addr = (*map_iterator).map_header_frame_offset;
427 let map_header_ptr =
428 self.memory.get_heap_const_ptr(map_header_addr as usize) as *const MapHeader;
429 let map_header = &*map_header_ptr;
430 if map_header.padding_and_secret_code != hashmap_mem::SECRET_CODE {
431 return self.internal_trap(TrapCode::MemoryCorruption);
432 }
433
434 #[cfg(feature = "debug_vm")]
435 if self.debug_operations_enabled {
436 let iter_addr = get_reg!(self, map_iterator_header_reg);
437 let index = (*map_iterator).index;
438 eprintln!(
439 "map_iter_next: iter_addr: {iter_addr:X} addr:{map_header_addr:X} index:{index} len: {}, capacity: {}",
440 map_header.element_count, map_header.capacity
441 );
442 }
443
444 let index = (*map_iterator).index;
445
446 let (_key_ptr, value_ptr, found_index) =
447 hashmap_mem::find_next_valid_entry(map_header_ptr as *mut u8, index as u16);
448
449 if !value_ptr.is_null() {
450 let value_offset = self.memory.get_heap_offset(value_ptr);
451
452 set_reg!(self, target_value_reg, value_offset);
453 (*map_iterator).index = (found_index + 1) as u32;
454 } else {
455 let branch_offset = i16_from_u8s!(branch_offset_lower, branch_offset_upper);
457
458 #[cfg(feature = "debug_vm")]
459 {
460 if self.debug_operations_enabled {
461 eprintln!(
462 "map_iter_next_pair complete. jumping with offset {branch_offset}"
463 );
464 }
465 }
466
467 self.pc = (self.pc as i32 + branch_offset as i32) as usize;
468 }
469 }
470 }
471}