1use crate::DetailedLocationResolved;
6use crate::code_bld::CodeBuilder;
7use source_map_node::Node;
8use swamp_vm_types::types::{BasicTypeKind, Destination, TypedRegister, VmType, u16_type};
9use swamp_vm_types::{COLLECTION_CAPACITY_OFFSET, MemoryLocation};
10
11impl CodeBuilder<'_> {
12 pub(crate) fn emit_transfer_value_to_register(
22 &mut self,
23 target_reg: &TypedRegister,
24 source: &Destination,
25 node: &Node,
26 comment: &str,
27 ) {
28 match source {
29 Destination::Register(source_reg) => {
30 if target_reg.index != source_reg.index {
31 self.emit_copy_register(target_reg, source_reg, node, comment);
32 }
33 }
34 Destination::Memory(memory_location) => {
35 self.emit_load_value_from_memory_source(target_reg, memory_location, node, comment);
36 }
37 Destination::Unit => panic!("Cannot load from Unit destination"),
38 }
39 }
40
41 pub(crate) fn emit_load_value_from_memory_source(
48 &mut self,
49 target_reg: &TypedRegister,
50 source_memory_location: &MemoryLocation,
51 node: &Node,
52 comment: &str,
53 ) {
54 let source_type = source_memory_location.vm_type();
55 if source_type.is_aggregate() {
56 let source_loc = Destination::Memory(source_memory_location.clone());
59
60 self.emit_compute_effective_address_to_target_register(
61 target_reg,
62 &source_loc,
63 node,
64 "copy aggregate pointer to target register",
65 );
66 } else {
67 self.emit_load_scalar_from_memory_offset_instruction(
68 target_reg,
69 source_memory_location,
70 node,
71 &format!("emit primitive value. ptr to primitive reg {comment}"),
72 );
73 }
74 }
75
76 pub(crate) fn emit_load_scalar_or_absolute_aggregate_pointer(
96 &mut self,
97 destination: &Destination,
98 node: &Node,
99 comment: &str,
100 ) -> Option<TypedRegister> {
101 if matches!(destination, Destination::Unit) {
102 None
103 } else {
104 let vm_type = destination.vm_type().unwrap();
105
106 if vm_type.is_scalar() {
107 match destination {
109 Destination::Register(reg) => {
110 Some(reg.clone())
112 }
113 Destination::Memory(memory_location) => {
114 let scalar_temp = self.temp_registers.allocate(
116 memory_location.ty.clone(),
117 &format!("load scalar from memory for intrinsic: {comment}"),
118 );
119
120 self.emit_load_scalar_from_memory_offset_instruction(
121 scalar_temp.register(),
122 memory_location,
123 node,
124 &format!("load scalar value from memory for intrinsic: {comment}"),
125 );
126
127 Some(scalar_temp.register)
128 }
129 Destination::Unit => unreachable!(), }
131 } else {
132 Some(self.emit_compute_effective_address_to_register(
134 destination,
135 node,
136 &format!("flatten aggregate for intrinsic: {comment}"),
137 ))
138 }
139 }
140 }
141
142 pub(crate) fn emit_load_or_calculate_address_from_memory(
150 &mut self,
151 target_reg: &TypedRegister,
152 source_memory_location: &MemoryLocation,
153 node: &Node,
154 comment: &str,
155 ) {
156 let source_type = source_memory_location.vm_type();
157 if source_type.is_aggregate() {
158 self.emit_compute_effective_address_to_target_register(
159 target_reg,
160 &Destination::Memory(source_memory_location.clone()),
161 node,
162 comment,
163 );
164 } else {
165 self.emit_load_scalar_from_memory_offset_instruction(
167 target_reg,
168 source_memory_location,
169 node,
170 &format!("load scalar value {comment}"),
171 );
172 }
173 }
174
175 pub(crate) fn emit_materialize_value_to_register(
186 &mut self,
187 location: &Destination,
188 node: &Node,
189 comment: &str,
190 ) -> DetailedLocationResolved {
191 match location {
192 Destination::Register(reg) => DetailedLocationResolved::Register(reg.clone()),
193 Destination::Memory(memory_location) => {
194 let temp_reg_target = self.temp_registers.allocate(
195 memory_location.ty.clone(),
196 "emit load primitive from location",
197 );
198 self.emit_load_value_from_memory_source(
199 temp_reg_target.register(),
200 memory_location,
201 node,
202 &format!("load primitive from detailed location {comment}"),
203 );
204 DetailedLocationResolved::TempRegister(temp_reg_target)
205 }
206 Destination::Unit => {
207 panic!("")
208 }
209 }
210 }
211
212 pub fn emit_check_that_known_len_is_less_or_equal_to_capacity(
215 &mut self,
216 destination_memory_location: &MemoryLocation,
217 len: usize,
218 node: &Node,
219 comment: &str,
220 ) -> TypedRegister {
221 let destination_capacity_reg = self.temp_registers.allocate(
225 VmType::new_contained_in_register(u16_type()),
226 "destination capacity",
227 );
228 self.builder.add_ld16_from_pointer_from_memory_location(
229 destination_capacity_reg.register(),
230 &destination_memory_location.unsafe_add_offset(COLLECTION_CAPACITY_OFFSET),
231 node,
232 &format!("{comment} - load capacity for destination"),
233 );
234
235 let source_length_reg = self.temp_registers.allocate(
236 VmType::new_contained_in_register(u16_type()),
237 "source capacity",
238 );
239
240 self.builder.add_mov_16_immediate_value(
241 source_length_reg.register(),
242 len as u16,
243 node,
244 "known length size",
245 );
246
247 self.builder.add_trap_if_lt(
248 destination_capacity_reg.register(),
249 source_length_reg.register(),
250 node,
251 &format!("{comment} - verify that we are within bounds"),
252 );
253
254 source_length_reg.register
255 }
256 pub(crate) fn emit_copy_vec_like_value_helper(
257 &mut self,
258 destination_memory_location: &MemoryLocation,
259 source_memory_location: &MemoryLocation,
260 node: &Node,
261 comment: &str,
262 ) {
263 let destination_pointer = self.emit_compute_effective_address_from_location_to_register(
264 destination_memory_location,
265 node,
266 "get the destination vec",
267 );
268 let source_pointer = self.emit_compute_effective_address_from_location_to_register(
269 source_memory_location,
270 node,
271 "get vector source address",
272 );
273
274 self.builder.add_vec_copy(
275 &destination_pointer,
276 &source_pointer,
277 node,
278 "copy over, but leave the capacity on the destination",
279 );
280 }
281
282 pub(crate) fn emit_copy_map_like_value_helper(
297 &mut self,
298 destination_memory_location: &MemoryLocation,
299 source_memory_location: &MemoryLocation,
300 node: &Node,
301 comment: &str,
302 ) {
303 let destination_ptr_location = self
304 .emit_compute_effective_address_from_location_to_register(
305 destination_memory_location,
306 node,
307 comment,
308 );
309 let source_ptr_location = self.emit_compute_effective_address_from_location_to_register(
310 source_memory_location,
311 node,
312 comment,
313 );
314 self.builder.add_map_overwrite(
315 &destination_ptr_location,
316 &source_ptr_location,
317 node,
318 comment,
319 );
320 }
321
322 pub(crate) fn emit_copy_aggregate_value_helper(
323 &mut self,
324 destination_memory_location: &MemoryLocation,
325 source_memory_location: &MemoryLocation,
326 node: &Node,
327 comment: &str,
328 ) {
329 if matches!(
331 source_memory_location.ty.basic_type.kind,
332 BasicTypeKind::StringView { byte: _, char: _ }
333 ) && matches!(
334 destination_memory_location.ty.basic_type.kind,
335 BasicTypeKind::StringStorage {
336 element_type: _,
337 char: _,
338 capacity: _
339 }
340 ) {
341 self.emit_copy_vec_like_value_helper(
343 destination_memory_location,
344 source_memory_location,
345 node,
346 &format!("copy StringView to StringStorage {comment}"),
347 );
348 return;
349 }
350
351 let ty = &source_memory_location.ty;
352 if ty.is_collection_like() {
353 if ty.basic_type.is_vec_like() {
354 if let (Some(_element_size), Some(_header_size)) = (
355 source_memory_location
356 .ty
357 .basic_type
358 .bucket_size_for_vec_like(),
359 source_memory_location
360 .ty
361 .basic_type
362 .header_size_for_vec_like(),
363 ) {
364 self.emit_copy_vec_like_value_helper(
365 destination_memory_location,
366 source_memory_location,
367 node,
368 comment,
369 );
370 } else {
371 self.emit_block_copy_with_size_from_location(
373 destination_memory_location,
374 source_memory_location,
375 node,
376 &format!("block copy {comment} (vec-like fallback) to memory pointed by register {destination_memory_location} <- {source_memory_location}"),
377 );
378 }
379 } else {
380 self.emit_compute_effective_address_from_location_to_register(
381 destination_memory_location,
382 node,
383 comment,
384 );
385 self.emit_copy_map_like_value_helper(
386 destination_memory_location,
387 source_memory_location,
388 node,
389 comment,
390 );
391 }
392 } else {
393 self.emit_block_copy_with_size_from_location(
394 destination_memory_location,
395 source_memory_location,
396 node,
397 &format!("block copy {comment} to memory pointed by register {destination_memory_location} <- {source_memory_location}"),
398 );
399 }
400 }
401
402 pub fn emit_copy_value_between_destinations(
403 &mut self,
404 output_destination: &Destination,
405 value_source: &Destination,
406 node: &Node,
407 comment: &str,
408 ) {
409 match output_destination {
410 Destination::Register(reg) => {
411 self.emit_transfer_value_to_register(reg, value_source, node, comment);
412 }
413 Destination::Memory(_) => {
414 self.emit_store_value_to_memory_destination(
415 output_destination,
416 value_source,
417 node,
418 comment,
419 );
420 }
421 Destination::Unit => {
422 panic!("Cannot copy to Unit destination")
423 }
424 }
425 }
426
427 pub(crate) fn emit_copy_value_from_memory_location(
428 &mut self,
429 destination: &Destination,
430 source_memory_location: &MemoryLocation,
431 node: &Node,
432 comment: &str,
433 ) {
434 if let Some(_mem_loc) = destination.memory_location() {
435 let source_loc = Destination::Memory(source_memory_location.clone());
436 self.emit_store_value_to_memory_destination(destination, &source_loc, node, comment);
437 } else if let Some(output_target_reg) = destination.register() {
438 self.emit_load_value_from_memory_source(
439 output_target_reg,
440 source_memory_location,
441 node,
442 comment,
443 );
444 } else {
445 panic!("it was unit, not supported");
446 }
447 }
448
449 pub(crate) fn emit_store_value_to_memory_destination(
458 &mut self,
459 output_destination: &Destination,
460 value_source: &Destination,
461 node: &Node,
462 comment: &str,
463 ) {
464 let output_mem_loc = output_destination.grab_memory_location(); match value_source {
467 Destination::Register(value_reg) => {
468 if matches!(
470 value_reg.ty.basic_type.kind,
471 BasicTypeKind::StringView { byte: _, char: _ }
472 ) && matches!(
473 output_mem_loc.ty.basic_type.kind,
474 BasicTypeKind::StringStorage {
475 element_type: _,
476 char: _,
477 capacity: _
478 }
479 ) {
480 let source_memory_location =
482 MemoryLocation::new_copy_over_whole_type_with_zero_offset(
483 value_reg.clone(),
484 );
485 self.emit_copy_aggregate_value_helper(
487 output_destination.grab_memory_location(),
488 &source_memory_location,
489 node,
490 &format!("copy StringView to StringStorage {comment}"),
491 );
492 } else if value_reg.ty.is_scalar() {
493 self.emit_store_scalar_to_memory_offset_instruction(
494 output_mem_loc,
495 value_reg,
496 node,
497 &format!("store {comment} to memory pointed by register {output_destination} <- {value_reg}"),
498 );
499 } else {
500 let source_memory_location =
501 MemoryLocation::new_copy_over_whole_type_with_zero_offset(
502 value_reg.clone(),
503 );
504 self.emit_copy_aggregate_value_helper(
505 output_destination.grab_memory_location(),
506 &source_memory_location,
507 node,
508 "copy aggregate",
509 );
510 }
511 }
512 Destination::Memory(source_mem_loc) => {
513 let temp_reg = self
514 .temp_registers
515 .allocate(source_mem_loc.ty.clone(), "temp_for_memory_to_memory_store");
516
517 self.emit_load_value_from_memory_source(
518 temp_reg.register(),
519 source_mem_loc,
520 node,
521 &format!("load {comment} from memory for store"),
522 );
523
524 if source_mem_loc.ty.is_scalar() {
525 self.emit_store_scalar_to_memory_offset_instruction(
526 output_mem_loc,
527 temp_reg.register(),
528 node,
529 &format!("store {comment} from temp to memory pointed by register"),
530 );
531 } else {
532 self.emit_copy_aggregate_value_helper(
533 output_destination.grab_memory_location(),
534 source_mem_loc,
535 node,
536 "copy aggregate",
537 );
538 }
539 }
540 Destination::Unit => panic!("Cannot store from Unit source"),
541 }
542 }
543}