1use crate::code_bld::CodeBuilder;
6use crate::ctx::Context;
7
8use crate::transformer::{Collection, Transformer};
9use source_map_node::Node;
10use swamp_semantic::intr::IntrinsicFunction;
11use swamp_semantic::{ArgumentExpression, Expression, ExpressionKind, VariableRef};
12use swamp_vm_types::types::{
13 Destination, TypedRegister, VmType, float_type, int_type, pointer_type, u8_type, u16_type,
14 u32_type,
15};
16use swamp_vm_types::{
17 AggregateMemoryLocation, COLLECTION_CAPACITY_OFFSET, COLLECTION_ELEMENT_COUNT_OFFSET,
18 GRID_HEADER_HEIGHT_OFFSET, GRID_HEADER_WIDTH_OFFSET, MemoryLocation, MemoryOffset,
19 PointerLocation,
20};
21
22impl CodeBuilder<'_> {
23 #[allow(clippy::too_many_lines)]
24 #[allow(clippy::single_match_else)]
25 pub fn emit_single_intrinsic_call(
26 &mut self,
27 target_reg: &Destination,
28 node: &Node,
29 intrinsic_fn: &IntrinsicFunction,
30 arguments: &[ArgumentExpression],
31 ctx: &Context,
32 ) {
33 {
34 let self_reg = if arguments.is_empty() {
36 None
37 } else {
38 let ArgumentExpression::Expression(self_expr) = &arguments[0] else {
39 panic!("Expected expression for self argument");
40 };
41 Some(self.emit_scalar_rvalue(self_expr, ctx))
42 };
43
44 let rest_args = if arguments.len() > 1 {
45 &arguments[1..]
46 } else {
47 &vec![]
48 };
49 self.emit_single_intrinsic_call_with_self(
50 target_reg,
51 node,
52 intrinsic_fn,
53 self_reg.as_ref(),
54 rest_args,
55 ctx,
56 "single intrinsic call",
57 );
58 }
59 }
60
61 pub fn emit_intrinsic_map(
62 &mut self,
63 output_destination: &Destination,
64 intrinsic_fn: &IntrinsicFunction,
65 self_ptr_reg: &PointerLocation,
66 arguments: &[Expression],
67 node: &Node,
68 comment: &str,
69 ctx: &Context,
70 ) {
71 match intrinsic_fn {
72 IntrinsicFunction::MapHas => {
73 let key_argument = &arguments[0];
74 let key_temp_storage_reg =
76 self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_argument, ctx);
77
78 self.builder.add_map_has(
79 output_destination.register().unwrap(),
80 self_ptr_reg,
81 &key_temp_storage_reg,
82 node,
83 "map_has",
84 );
85 }
86 IntrinsicFunction::MapRemove => {
87 let key_argument = &arguments[0];
88 self.emit_intrinsic_map_remove(self_ptr_reg, key_argument, ctx);
89 }
90 _ => todo!("missing intrinsic_map {intrinsic_fn}"),
91 }
92 }
93
94 pub fn emit_intrinsic_sparse(
95 &mut self,
96 output_destination: &Destination,
97 intrinsic_fn: &IntrinsicFunction,
98 self_ptr_reg: &PointerLocation,
99 arguments: &[Expression],
100 node: &Node,
101 comment: &str,
102 ctx: &Context,
103 ) {
104 match intrinsic_fn {
105 IntrinsicFunction::SparseAdd => {
106 let element_to_add_expression = &arguments[0];
107 self.emit_sparse_add(
108 &output_destination.register().unwrap().clone(),
109 self_ptr_reg,
110 element_to_add_expression,
111 node,
112 ctx,
113 );
114 }
115
116 IntrinsicFunction::SparseRemove => {
117 let sparse_id_int_expression = &arguments[0];
118 self.emit_sparse_remove(self_ptr_reg, sparse_id_int_expression, node, ctx);
119 }
120
121 IntrinsicFunction::SparseIsAlive => {
122 let sparse_id_int_expression = &arguments[0];
123 self.emit_sparse_is_alive(
124 &output_destination.register().unwrap().clone(),
125 self_ptr_reg,
126 sparse_id_int_expression,
127 node,
128 ctx,
129 );
130 }
131 _ => todo!("unknown sparse {intrinsic_fn}"),
132 }
133 }
134 pub fn emit_intrinsic_grid(
135 &mut self,
136 target_destination: &Destination,
137 intrinsic_fn: &IntrinsicFunction,
138 self_ptr_reg: &PointerLocation,
139 arguments: &[Expression],
140 node: &Node,
141 comment: &str,
142 ctx: &Context,
143 ) {
144 let (temp_reg, dest_reg) = if target_destination.is_register() {
145 (None, target_destination.register().unwrap().clone())
146 } else {
147 let temp_reg = self.temp_registers.allocate(
148 VmType::new_contained_in_register(float_type()),
149 "temporary destination for low level intrinsic",
150 );
151
152 (Some(temp_reg.register.clone()), temp_reg.register)
153 };
154 match intrinsic_fn {
155 IntrinsicFunction::GridSet => {
156 let x_expr = &arguments[0];
157 let y_expr = &arguments[1];
158 let value_expr = &arguments[2];
159
160 let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
161 let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
162 let element_gen_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
163
164 let temp_element_ptr = self.temp_registers.allocate(
165 VmType::new_contained_in_register(element_gen_type.clone()),
166 "temporary scalar",
167 );
168
169 self.builder.add_grid_get_entry_addr(
170 &temp_element_ptr.register,
171 self_ptr_reg,
172 &x_reg,
173 &y_reg,
174 element_gen_type.total_size,
175 node,
176 comment,
177 );
178
179 let location = AggregateMemoryLocation {
180 location: MemoryLocation {
181 base_ptr_reg: temp_element_ptr.register,
182 offset: MemoryOffset(0),
183 ty: VmType::new_unknown_placement(element_gen_type.clone()),
184 },
185 };
186
187 if element_gen_type.is_aggregate() {
189 self.emit_initialize_memory_for_any_type(
190 &location.location,
191 node,
192 "initialize grid set allocated space",
193 );
194 }
195
196 self.emit_expression_into_target_memory(
197 &location.location,
198 value_expr,
199 "grid set",
200 ctx,
201 );
202 }
203 IntrinsicFunction::GridGet => {
204 let x_expr = &arguments[0];
205 let y_expr = &arguments[1];
206
207 let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
208 let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
209
210 let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
211
212 let temp_element_ptr = self.temp_registers.allocate(
214 VmType::new_contained_in_register(pointer_type()),
215 "temp for grid element address",
216 );
217
218 self.builder.add_grid_get_entry_addr(
220 &temp_element_ptr.register,
221 self_ptr_reg,
222 &x_reg,
223 &y_reg,
224 element_type.total_size,
225 node,
226 comment,
227 );
228
229 let element_memory_location = MemoryLocation {
231 base_ptr_reg: temp_element_ptr.register,
232 offset: MemoryOffset(0),
233 ty: VmType::new_unknown_placement(element_type),
234 };
235
236 self.emit_copy_value_from_memory_location(
239 target_destination,
240 &element_memory_location,
241 node,
242 "copy grid element value to destination",
243 );
244 }
245
246 IntrinsicFunction::GridWidth => {
247 let temp = self.temp_registers.allocate(
249 VmType::new_contained_in_register(u16_type()),
250 "temp for grid width",
251 );
252
253 let self_memory_location = AggregateMemoryLocation::new(
255 MemoryLocation::new_copy_over_whole_type_with_zero_offset(
256 self_ptr_reg.ptr_reg.clone(),
257 ),
258 );
259 let width_location =
260 self_memory_location.offset(GRID_HEADER_WIDTH_OFFSET, int_type());
261
262 self.builder.add_ld16_from_pointer_from_memory_location(
264 &temp.register,
265 &width_location.location,
266 node,
267 comment,
268 );
269
270 let value_source = Destination::Register(temp.register);
272
273 self.emit_copy_value_between_destinations(
275 target_destination,
276 &value_source,
277 node,
278 "store grid width to destination",
279 );
280 }
281 IntrinsicFunction::GridHeight => {
282 let temp = self.temp_registers.allocate(
284 VmType::new_contained_in_register(u16_type()),
285 "temp for grid height",
286 );
287
288 let self_memory_location = AggregateMemoryLocation::new(
290 MemoryLocation::new_copy_over_whole_type_with_zero_offset(
291 self_ptr_reg.ptr_reg.clone(),
292 ),
293 );
294 let height_location =
295 self_memory_location.offset(GRID_HEADER_HEIGHT_OFFSET, int_type());
296
297 self.builder.add_ld16_from_pointer_from_memory_location(
299 &temp.register,
300 &height_location.location,
301 node,
302 comment,
303 );
304
305 let value_source = Destination::Register(temp.register);
307
308 self.emit_copy_value_between_destinations(
310 target_destination,
311 &value_source,
312 node,
313 "store grid height to destination",
314 );
315 }
316 _ => todo!("wrong grid {intrinsic_fn}"),
317 }
318 }
319
320 #[allow(clippy::too_many_lines)]
321 fn emit_intrinsic_call_vec(
322 &mut self,
323 output_destination: &Destination,
324 intrinsic_fn: &IntrinsicFunction,
325 self_ptr_reg: &PointerLocation,
326 arguments: &[Expression],
327 node: &Node,
328 ctx: &Context,
329 ) {
330 let self_basic_type = &self_ptr_reg.ptr_reg.ty.basic_type;
331 match intrinsic_fn {
332 IntrinsicFunction::VecPush => {
333 let element_expr = &arguments[0];
334
335 let element_gen_type = self.state.layout_cache.layout(&element_expr.ty);
336
337 let temp_element_ptr = self.temp_registers.allocate(
338 VmType::new_contained_in_register(pointer_type()),
339 "pointer to new element",
340 );
341
342 self.builder.add_vec_push_addr(
343 temp_element_ptr.register(),
344 &self_ptr_reg.ptr_reg,
345 node,
346 "set pointer to new element",
347 );
348
349 let location = AggregateMemoryLocation {
350 location: MemoryLocation {
351 base_ptr_reg: temp_element_ptr.register,
352 offset: MemoryOffset(0),
353 ty: VmType::new_unknown_placement(element_gen_type.clone()),
354 },
355 };
356
357 if element_gen_type.is_aggregate() {
359 self.emit_initialize_memory_for_any_type(
360 &location.location,
361 node,
362 "initialize vec.push allocated space",
363 );
364 }
365
366 self.emit_expression_into_target_memory(
367 &location.location,
368 element_expr,
369 "vec push",
370 ctx,
371 );
372 }
373
374 IntrinsicFunction::VecPop => {
375 let element_type = self_basic_type.element().unwrap();
376 let pop_target_reg = if let Some(found_target_reg) = output_destination.register() {
377 found_target_reg.clone()
378 } else {
379 let temp = self.temp_registers.allocate(
380 VmType::new_contained_in_register(element_type.clone()),
381 "temp for vec pop",
382 );
383 temp.register
384 };
385 self.builder.add_vec_pop(
386 &pop_target_reg,
387 &self_ptr_reg.ptr_reg, element_type.total_size,
389 node,
390 "vec pop",
391 );
392 let source_memory_location = MemoryLocation {
393 base_ptr_reg: pop_target_reg,
394 offset: MemoryOffset(0),
395 ty: VmType::new_unknown_placement(element_type),
396 };
397
398 self.emit_copy_value_from_memory_location(
399 output_destination,
400 &source_memory_location,
401 node,
402 "copy from vec pop",
403 );
404 }
405
406 IntrinsicFunction::VecSlice => {
407 let range_expr = &arguments[0];
408 let range_region = self.emit_scalar_rvalue(range_expr, ctx);
409
410 let output_pointer = self.emit_compute_effective_address_to_register(
411 output_destination,
412 node,
413 "get absolute pointer for vec slice destination",
414 );
415 let output_pointer_location = PointerLocation::new(output_pointer);
416
417 self.builder.add_vec_copy_range(
418 &output_pointer_location,
419 self_ptr_reg,
420 &range_region,
421 node,
422 "vec slice",
423 );
424 }
425
426 IntrinsicFunction::VecRemoveIndex => {
427 let index_region_expr = &arguments[0];
428 let index_region = self.emit_scalar_rvalue(index_region_expr, ctx);
429
430 let element_type = self_basic_type.element().unwrap();
431
432 self.builder.add_vec_remove_index(
433 &self_ptr_reg.ptr_reg,
434 &index_region,
435 node,
436 "remove index",
437 );
438 }
439 IntrinsicFunction::VecRemoveIndexGetValue => {
440 let key_expr = &arguments[0];
441 let key_region = self.emit_scalar_rvalue(key_expr, ctx);
442 let element_type = self_basic_type.element().unwrap();
443
444 if let Some(target_reg) = output_destination.register() {
446 self.builder.add_vec_remove_index_get_value(
448 target_reg,
449 &self_ptr_reg.ptr_reg, &key_region,
451 node,
452 "vec remove index get value to register",
453 );
454 } else {
455 let temp_reg = self.temp_registers.allocate(
457 VmType::new_contained_in_register(element_type),
458 "temp for vec remove index get value",
459 );
460
461 self.builder.add_vec_remove_index_get_value(
462 &temp_reg.register,
463 &self_ptr_reg.ptr_reg,
464 &key_region,
465 node,
466 "vec remove index get value to temp",
467 );
468
469 let source = Destination::Register(temp_reg.register);
471 self.emit_copy_value_between_destinations(
472 output_destination,
473 &source,
474 node,
475 "copy vec element to destination",
476 );
477 }
478 }
479 IntrinsicFunction::VecRemoveFirstIndexGetValue => {
480 let zero_reg = self.temp_registers.allocate(
481 VmType::new_contained_in_register(u8_type()),
482 "vec remove first. set index 0",
483 );
484 self.builder
485 .add_mov8_immediate(zero_reg.register(), 0, node, "zero index");
486 let value_addr_reg = self.temp_registers.allocate(
487 VmType::new_contained_in_register(u32_type()),
488 "vec entry addr to copy from",
489 );
490 let element_type = self_basic_type.element().unwrap();
491 self.builder.add_vec_subscript(
492 value_addr_reg.register(),
493 &self_ptr_reg.ptr_reg,
494 zero_reg.register(),
495 element_type.total_size,
496 node,
497 "lookup first entry in vec",
498 );
499
500 let source_memory_location = MemoryLocation {
501 base_ptr_reg: value_addr_reg.register,
502 offset: MemoryOffset(0),
503 ty: VmType::new_unknown_placement(element_type),
504 };
505
506 self.emit_copy_value_from_memory_location(
507 output_destination,
508 &source_memory_location,
509 node,
510 "load the vec entry to target register",
511 );
512
513 self.builder.add_vec_remove_index(
514 &self_ptr_reg.ptr_reg, zero_reg.register(),
516 node,
517 "vec remove first index",
518 );
519 }
520 IntrinsicFunction::VecClear => {
521 let temp_element_count_reg = self.temp_registers.allocate(
522 VmType::new_contained_in_register(u16_type()),
523 "vec_clear zero",
524 );
525 self.builder.add_mov_16_immediate_value(
526 temp_element_count_reg.register(),
527 0,
528 node,
529 "set to zero",
530 );
531
532 let self_memory_location = AggregateMemoryLocation::new(
533 MemoryLocation::new_copy_over_whole_type_with_zero_offset(
534 self_ptr_reg.ptr_reg.clone(),
535 ),
536 );
537
538 self.builder.add_st16_using_ptr_with_offset(
539 &self_memory_location
540 .offset(COLLECTION_ELEMENT_COUNT_OFFSET, u16_type())
541 .location,
542 temp_element_count_reg.register(),
543 node,
544 "set element_count to zero",
545 );
546 }
547
548 IntrinsicFunction::VecGet => {
549 let key_expr = &arguments[0];
550 let key_region = self.emit_scalar_rvalue(key_expr, ctx);
551 let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
552
553 let temp_element_ptr = self.temp_registers.allocate(
555 VmType::new_contained_in_register(pointer_type()),
556 "temp for vec element address",
557 );
558
559 self.builder.add_vec_subscript(
561 temp_element_ptr.register(),
562 &self_ptr_reg.ptr_reg,
563 &key_region,
564 element_type.total_size,
565 node,
566 "get vec element address",
567 );
568
569 let element_memory_location = MemoryLocation {
571 base_ptr_reg: temp_element_ptr.register,
572 offset: MemoryOffset(0),
573 ty: VmType::new_unknown_placement(element_type),
574 };
575
576 self.emit_copy_value_from_memory_location(
578 output_destination,
579 &element_memory_location,
580 node,
581 "copy vec element to destination",
582 );
583 }
584 _ => todo!("Vec {intrinsic_fn}"),
585 }
586
587 }
610
611 fn emit_intrinsic_call_int(
612 &mut self,
613 target_reg: &TypedRegister,
614 intrinsic_fn: &IntrinsicFunction,
615 arguments: &[TypedRegister],
616 node: &Node,
617 ) {
618 let first_argument = &arguments[0];
619
620 match intrinsic_fn {
622 IntrinsicFunction::IntAbs => {
623 self.builder
624 .add_int_abs(target_reg, first_argument, node, "int abs");
625 }
626
627 IntrinsicFunction::IntRnd => {
628 self.builder
629 .add_int_rnd(target_reg, first_argument, node, "int pseudo random");
630 }
631 IntrinsicFunction::IntMax => {
632 let int_register = &arguments[1];
633
634 self.builder
635 .add_int_max(target_reg, first_argument, int_register, node, "int max");
636 }
637 IntrinsicFunction::IntMin => {
638 let int_register = &arguments[1];
639
640 self.builder
641 .add_int_min(target_reg, first_argument, int_register, node, "int min");
642 }
643 IntrinsicFunction::IntClamp => {
644 let min_reg = &arguments[1];
645 let max_reg = &arguments[2];
646 self.builder.add_int_clamp(
647 target_reg,
648 first_argument,
649 min_reg,
650 max_reg,
651 node,
652 "int clamp",
653 );
654 }
655 IntrinsicFunction::IntToFloat => {
656 self.builder.add_int_to_float(
657 target_reg,
658 first_argument,
659 node,
660 &format!("int to float {}", first_argument.comment()),
661 );
662 }
663 IntrinsicFunction::IntToString => {
664 self.builder
665 .add_int_to_string(target_reg, first_argument, node, "int_to_string");
666 }
667 _ => {}
668 }
669 }
671
672 #[allow(clippy::too_many_lines)]
673 fn emit_intrinsic_call_fixed(
674 &mut self,
675 target_reg: &TypedRegister,
676 intrinsic_fn: &IntrinsicFunction,
677 arguments: &[TypedRegister],
678 node: &Node,
679 ) {
680 let first_argument_reg = &arguments[0];
682 match intrinsic_fn {
683 IntrinsicFunction::FloatRound => {
684 self.builder
685 .add_float_round(target_reg, first_argument_reg, node, "float round");
686 }
687 IntrinsicFunction::FloatFloor => {
688 self.builder
689 .add_float_floor(target_reg, first_argument_reg, node, "float floor");
690 }
691 IntrinsicFunction::FloatSqrt => {
692 self.builder
693 .add_float_sqrt(target_reg, first_argument_reg, node, "float sqr");
694 }
695 IntrinsicFunction::FloatSign => {
696 self.builder
697 .add_float_sign(target_reg, first_argument_reg, node, "float sign");
698 }
699 IntrinsicFunction::FloatAbs => {
700 self.builder
701 .add_float_abs(target_reg, first_argument_reg, node, "float abs");
702 }
703 IntrinsicFunction::FloatRnd => {
704 self.builder.add_float_prnd(
705 target_reg,
706 first_argument_reg,
707 node,
708 "float pseudo random",
709 );
710 }
711 IntrinsicFunction::FloatCos => {
712 self.builder
713 .add_float_cos(target_reg, first_argument_reg, node, "float cos");
714 }
715 IntrinsicFunction::FloatSin => {
716 self.builder
717 .add_float_sin(target_reg, first_argument_reg, node, "float sin");
718 }
719 IntrinsicFunction::FloatAcos => {
720 self.builder
721 .add_float_acos(target_reg, first_argument_reg, node, "float acos");
722 }
723 IntrinsicFunction::FloatAsin => {
724 self.builder
725 .add_float_asin(target_reg, first_argument_reg, node, "float asin");
726 }
727 IntrinsicFunction::FloatAtan2 => {
728 self.builder
729 .add_float_atan2(target_reg, first_argument_reg, node, "float atan2");
730 }
731 IntrinsicFunction::FloatMin => {
732 let float_region = &arguments[1];
733 self.builder.add_float_min(
734 target_reg,
735 first_argument_reg,
736 float_region,
737 node,
738 "float min",
739 );
740 }
741 IntrinsicFunction::FloatMax => {
742 let float_region = &arguments[1];
743 self.builder.add_float_max(
744 target_reg,
745 first_argument_reg,
746 float_region,
747 node,
748 "float max",
749 );
750 }
751 IntrinsicFunction::FloatClamp => {
752 let float_region = &arguments[1];
753 let float_b_region = &arguments[2];
754
755 self.builder.add_float_clamp(
756 target_reg,
757 float_region,
758 first_argument_reg,
759 float_b_region,
760 node,
761 "float round",
762 );
763 }
764 IntrinsicFunction::FloatToString => self.builder.add_float_to_string(
765 target_reg,
766 first_argument_reg,
767 node,
768 "float_to_string",
769 ),
770 _ => panic!("wasn't a fixed operation"),
771 }
772 }
774
775 pub fn emit_intrinsic_transformer(
776 &mut self,
777 target_destination: &Destination,
778 intrinsic_fn: &IntrinsicFunction,
779 self_addr: &PointerLocation,
780 lambda: (Vec<VariableRef>, &Expression),
781 node: &Node,
782 ctx: &Context,
783 ) {
784 match intrinsic_fn {
785 IntrinsicFunction::TransformerFold => { }
787 IntrinsicFunction::TransformerFilter => {
788 self.emit_iterate_over_collection_with_lambda(
789 target_destination,
790 node,
791 Collection::Vec,
792 Transformer::Filter,
793 &self_addr.ptr_reg,
794 lambda,
795 ctx,
796 );
797 }
798
799 IntrinsicFunction::TransformerFor => {
800 self.emit_iterate_over_collection_with_lambda(
801 target_destination,
802 node,
803 Collection::Vec,
804 Transformer::For,
805 &self_addr.ptr_reg,
806 lambda,
807 ctx,
808 );
809 }
810 IntrinsicFunction::TransformerWhile => {
811 self.emit_iterate_over_collection_with_lambda(
812 target_destination,
813 node,
814 Collection::Vec,
815 Transformer::While,
816 &self_addr.ptr_reg,
817 lambda,
818 ctx,
819 );
820 }
821
822 IntrinsicFunction::TransformerFind => {
823 self.emit_iterate_over_collection_with_lambda(
824 target_destination,
825 node,
826 Collection::Vec,
827 Transformer::Find,
828 &self_addr.ptr_reg,
829 lambda,
830 ctx,
831 );
832 }
833 _ => todo!("{intrinsic_fn}"),
834 }
835 }
836
837 #[allow(clippy::too_many_lines)]
838 #[allow(clippy::too_many_arguments)]
839 pub fn emit_single_intrinsic_call_with_self_destination(
840 &mut self,
841 target_destination: &Destination,
842 node: &Node,
843 intrinsic_fn: &IntrinsicFunction,
844 self_destination: Option<&Destination>,
845 arguments: &[ArgumentExpression],
846 ctx: &Context,
847 comment: &str,
848 ) {
849 let self_reg = if let Some(self_dest) = self_destination {
851 self.emit_load_scalar_or_absolute_aggregate_pointer(self_dest, node, comment)
852 } else {
853 None
854 };
855
856 self.emit_single_intrinsic_call_with_self(
858 target_destination,
859 node,
860 intrinsic_fn,
861 self_reg.as_ref(),
862 arguments,
863 ctx,
864 comment,
865 );
866 }
867
868 #[allow(clippy::too_many_lines)]
869 #[allow(clippy::too_many_arguments)]
870 pub fn emit_single_intrinsic_call_with_self(
871 &mut self,
872 target_destination: &Destination,
873 node: &Node,
874 intrinsic_fn: &IntrinsicFunction,
875 self_reg: Option<&TypedRegister>,
876 arguments: &[ArgumentExpression],
877 ctx: &Context,
878 comment: &str,
879 ) {
880 let maybe_target = target_destination.register();
881
882 match intrinsic_fn {
883 IntrinsicFunction::Float2Magnitude
884 | IntrinsicFunction::FloatAbs
885 | IntrinsicFunction::FloatRound
886 | IntrinsicFunction::FloatFloor
887 | IntrinsicFunction::FloatSqrt
888 | IntrinsicFunction::FloatSign
889 | IntrinsicFunction::FloatRnd
890 | IntrinsicFunction::FloatCos
891 | IntrinsicFunction::FloatSin
892 | IntrinsicFunction::FloatAcos
893 | IntrinsicFunction::FloatAsin
894 | IntrinsicFunction::FloatAtan2
895 | IntrinsicFunction::FloatMin
896 | IntrinsicFunction::FloatMax
897 | IntrinsicFunction::FloatClamp
898 | IntrinsicFunction::FloatToString => {
899 let (temp_reg, dest_reg) = if target_destination.is_register() {
901 (None, target_destination.register().unwrap().clone())
902 } else {
903 let temp_reg = self.temp_registers.allocate(
904 VmType::new_contained_in_register(float_type()),
905 "temporary destination for low level intrinsic",
906 );
907
908 (Some(temp_reg.register.clone()), temp_reg.register)
909 };
910
911 let mut converted_regs = vec![self_reg.unwrap().clone()];
913 for arg in arguments {
914 let ArgumentExpression::Expression(found_expression) = arg else {
915 panic!("must be expression");
916 };
917 let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
918 converted_regs.push(materialized_arg);
919 }
920
921 self.emit_intrinsic_call_fixed(&dest_reg, intrinsic_fn, &converted_regs, node);
922
923 if let Some(temp_reg) = temp_reg {
924 self.emit_store_scalar_to_memory_offset_instruction(
925 target_destination.grab_memory_location(),
926 &temp_reg,
927 node,
928 "store the fixed point value into memory",
929 );
930 }
931 }
932
933 IntrinsicFunction::IntToFloat => {
934 let (temp_reg, dest_reg) = if target_destination.is_register() {
936 (None, target_destination.register().unwrap().clone())
937 } else {
938 let temp_reg = self.temp_registers.allocate(
939 VmType::new_contained_in_register(float_type()),
940 "temporary destination for int to float intrinsic",
941 );
942
943 (Some(temp_reg.register.clone()), temp_reg.register)
944 };
945
946 let int_value_reg = self_reg.unwrap();
948
949 self.builder.add_int_to_float(
951 &dest_reg,
952 int_value_reg,
953 node,
954 &format!("int to float {}", int_value_reg.comment()),
955 );
956
957 if let Some(temp_reg) = temp_reg {
958 self.emit_store_scalar_to_memory_offset_instruction(
959 target_destination.grab_memory_location(),
960 &temp_reg,
961 node,
962 "store the float result from int to float conversion",
963 );
964 }
965 }
966
967 IntrinsicFunction::IntAbs
968 | IntrinsicFunction::IntRnd
969 | IntrinsicFunction::IntMax
970 | IntrinsicFunction::IntMin
971 | IntrinsicFunction::IntClamp
972 | IntrinsicFunction::IntToString => {
973 let (temp_reg, dest_reg) = if target_destination.is_register() {
975 let target_reg = target_destination.register().unwrap();
976 (None, target_reg.clone())
978 } else {
979 let temp_reg = self.temp_registers.allocate(
980 VmType::new_contained_in_register(u32_type()),
981 "temporary destination for low level intrinsic",
982 );
983
984 (Some(temp_reg.register.clone()), temp_reg.register)
985 };
986
987 let mut converted_regs = vec![self_reg.unwrap().clone()];
989 for arg in arguments {
990 let ArgumentExpression::Expression(found_expression) = arg else {
991 panic!("must be expression");
992 };
993 let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
994 converted_regs.push(materialized_arg);
995 }
996
997 self.emit_intrinsic_call_int(&dest_reg, intrinsic_fn, &converted_regs, node);
998
999 if let Some(temp_reg) = temp_reg {
1000 if target_destination.is_register() {
1001 self.builder.add_mov_reg(
1003 target_destination.register().unwrap(),
1004 &temp_reg,
1005 node,
1006 "copy intrinsic result from temp to target register",
1007 );
1008 } else {
1009 self.emit_store_scalar_to_memory_offset_instruction(
1011 target_destination.grab_memory_location(),
1012 &temp_reg,
1013 node,
1014 "put the low level intrinsic fixed (int) back to memory",
1015 );
1016 }
1017 }
1018 }
1019
1020 IntrinsicFunction::VecPush
1021 | IntrinsicFunction::VecPop
1022 | IntrinsicFunction::VecRemoveIndex
1023 | IntrinsicFunction::VecRemoveIndexGetValue
1024 | IntrinsicFunction::VecRemoveFirstIndexGetValue
1025 | IntrinsicFunction::VecClear
1026 | IntrinsicFunction::VecSlice
1027 | IntrinsicFunction::VecSwap
1028 | IntrinsicFunction::VecInsert
1029 | IntrinsicFunction::VecFirst
1030 | IntrinsicFunction::VecGet
1031 | IntrinsicFunction::VecLast => {
1032 let vec_self_ptr_reg = PointerLocation {
1035 ptr_reg: self_reg.unwrap().clone(),
1036 };
1037 let converted_to_expressions: Vec<_> = arguments
1038 .iter()
1039 .map(|arg| {
1040 let ArgumentExpression::Expression(found_expression) = arg else {
1041 panic!("must be expression");
1042 };
1043 found_expression.clone()
1044 })
1045 .collect();
1046
1047 self.emit_intrinsic_call_vec(
1048 target_destination,
1049 intrinsic_fn,
1050 &vec_self_ptr_reg,
1051 &converted_to_expressions,
1052 node,
1053 ctx,
1054 );
1055 }
1056
1057 IntrinsicFunction::GridGet
1058 | IntrinsicFunction::GridSet
1059 | IntrinsicFunction::GridWidth
1060 | IntrinsicFunction::GridHeight => {
1061 let grid_self_ptr_reg = PointerLocation {
1064 ptr_reg: self_reg.unwrap().clone(),
1065 };
1066 let converted_to_expressions: Vec<_> = arguments
1067 .iter()
1068 .map(|arg| {
1069 let ArgumentExpression::Expression(found_expression) = arg else {
1070 panic!("must be expression");
1071 };
1072 found_expression.clone()
1073 })
1074 .collect();
1075 self.emit_intrinsic_grid(
1076 target_destination,
1077 intrinsic_fn,
1078 &grid_self_ptr_reg,
1079 &converted_to_expressions,
1080 node,
1081 comment,
1082 ctx,
1083 );
1084 }
1085
1086 IntrinsicFunction::SparseIsAlive
1087 | IntrinsicFunction::SparseRemove
1088 | IntrinsicFunction::SparseAdd => {
1089 let grid_self_ptr_reg = PointerLocation {
1092 ptr_reg: self_reg.unwrap().clone(),
1093 };
1094 let converted_to_expressions: Vec<_> = arguments
1095 .iter()
1096 .map(|arg| {
1097 let ArgumentExpression::Expression(found_expression) = arg else {
1098 panic!("must be expression");
1099 };
1100 found_expression.clone()
1101 })
1102 .collect();
1103 self.emit_intrinsic_sparse(
1104 target_destination,
1105 intrinsic_fn,
1106 &grid_self_ptr_reg,
1107 &converted_to_expressions,
1108 node,
1109 comment,
1110 ctx,
1111 );
1112 }
1113
1114 IntrinsicFunction::TransformerFor
1115 | IntrinsicFunction::TransformerWhile
1116 | IntrinsicFunction::TransformerFindMap
1117 | IntrinsicFunction::TransformerAny
1118 | IntrinsicFunction::TransformerAll
1119 | IntrinsicFunction::TransformerMap
1120 | IntrinsicFunction::TransformerFilter
1121 | IntrinsicFunction::TransformerFilterMap
1122 | IntrinsicFunction::TransformerFind
1123 | IntrinsicFunction::TransformerFold => {
1124 let collection_self_ptr_reg = PointerLocation {
1126 ptr_reg: self_reg.unwrap().clone(),
1127 };
1128
1129 let lambda_expression = &arguments[0];
1130
1131 let ArgumentExpression::Expression(expr) = lambda_expression else {
1133 panic!("err");
1134 };
1135
1136 let ExpressionKind::Lambda(lambda_variables, lambda_expr) = &expr.kind else {
1137 panic!("must have lambda for transformers");
1138 };
1139
1140 self.emit_intrinsic_transformer(
1141 target_destination,
1142 intrinsic_fn,
1143 &collection_self_ptr_reg,
1144 (lambda_variables.clone(), lambda_expr),
1145 node,
1146 ctx,
1147 );
1148 }
1149
1150 IntrinsicFunction::RuntimePanic => {
1151 self.builder
1152 .add_panic(self_reg.unwrap(), node, "intrinsic panic");
1153 }
1154
1155 IntrinsicFunction::RuntimeHalt => {
1156 self.builder.add_halt(node, "intrinsic halt");
1157 }
1158
1159 IntrinsicFunction::RuntimeStep => {
1160 self.builder.add_step(node, "intrinsic step");
1161 }
1162
1163 IntrinsicFunction::RangeInit => {
1164 let start_reg = self_reg.unwrap();
1165 let end_arg = &arguments[0];
1171 let ArgumentExpression::Expression(end_arg_expr) = end_arg else {
1172 panic!();
1173 };
1174 let end_reg = self.emit_scalar_rvalue(end_arg_expr, ctx);
1175
1176 let is_inclusive = &arguments[1];
1177 let ArgumentExpression::Expression(is_inclusive_expr) = is_inclusive else {
1178 panic!();
1179 };
1180 let is_inclusive_reg = self.emit_scalar_rvalue(is_inclusive_expr, ctx);
1181 let absolute_range_pointer = self.emit_compute_effective_address_to_register(
1182 target_destination,
1183 node,
1184 "create range target pointer",
1185 );
1186 self.builder.add_range_init(
1187 &absolute_range_pointer,
1188 start_reg,
1189 &end_reg,
1190 &is_inclusive_reg,
1191 node,
1192 "create a range",
1193 );
1194 }
1195
1196 IntrinsicFunction::CodepointToString => {
1198 if target_destination.is_register() {
1199 self.builder.add_codepoint_to_string(
1200 target_destination.register().unwrap(),
1201 self_reg.unwrap(),
1202 node,
1203 "char_to_string",
1204 );
1205 } else {
1206 let temp_reg = self.temp_registers.allocate(
1207 VmType::new_contained_in_register(u32_type()),
1208 "temporary for codepoint to string intrinsic",
1209 );
1210
1211 self.builder.add_codepoint_to_string(
1212 &temp_reg.register,
1213 self_reg.unwrap(),
1214 node,
1215 "char_to_string",
1216 );
1217
1218 self.emit_store_scalar_to_memory_offset_instruction(
1219 target_destination.grab_memory_location(),
1220 &temp_reg.register,
1221 node,
1222 "store codepoint to string result to memory",
1223 );
1224 }
1225 }
1226
1227 IntrinsicFunction::CodepointToInt => {
1228 if target_destination.is_register() {
1229 self.builder.add_mov_reg(
1230 target_destination.register().unwrap(),
1231 self_reg.unwrap(),
1232 node,
1233 "char_to_int",
1234 );
1235 } else {
1236 self.emit_store_scalar_to_memory_offset_instruction(
1237 target_destination.grab_memory_location(),
1238 self_reg.unwrap(),
1239 node,
1240 "store codepoint to int result to memory",
1241 );
1242 }
1243 }
1244
1245 IntrinsicFunction::ByteToString => {
1247 if target_destination.is_register() {
1248 self.builder.byte_to_string(
1249 target_destination.register().unwrap(),
1250 self_reg.unwrap(),
1251 node,
1252 "byte_to_string",
1253 );
1254 } else {
1255 let temp_reg = self.temp_registers.allocate(
1256 VmType::new_contained_in_register(u32_type()),
1257 "temporary for byte to string intrinsic",
1258 );
1259
1260 self.builder.byte_to_string(
1261 &temp_reg.register,
1262 self_reg.unwrap(),
1263 node,
1264 "byte_to_string",
1265 );
1266
1267 self.emit_store_scalar_to_memory_offset_instruction(
1268 target_destination.grab_memory_location(),
1269 &temp_reg.register,
1270 node,
1271 "store byte to string result to memory",
1272 );
1273 }
1274 }
1275
1276 IntrinsicFunction::ByteToInt => {
1277 if target_destination.is_register() {
1278 self.builder.add_mov_reg(
1281 target_destination.register().unwrap(),
1282 self_reg.unwrap(),
1283 node,
1284 "byte_to_int",
1285 );
1286 } else {
1287 self.emit_store_scalar_to_memory_offset_instruction(
1288 target_destination.grab_memory_location(),
1289 self_reg.unwrap(),
1290 node,
1291 "store byte to int result to memory",
1292 );
1293 }
1294 }
1295
1296 IntrinsicFunction::ByteToFloat => {
1297 if target_destination.is_register() {
1298 self.builder.add_int_to_float(
1300 target_destination.register().unwrap(),
1301 self_reg.unwrap(),
1302 node,
1303 "byte_to_float",
1304 );
1305 } else {
1306 let temp_reg = self.temp_registers.allocate(
1307 VmType::new_contained_in_register(float_type()),
1308 "temporary for byte to float intrinsic",
1309 );
1310
1311 self.builder.add_int_to_float(
1313 &temp_reg.register,
1314 self_reg.unwrap(),
1315 node,
1316 "byte_to_float",
1317 );
1318
1319 self.emit_store_scalar_to_memory_offset_instruction(
1320 target_destination.grab_memory_location(),
1321 &temp_reg.register,
1322 node,
1323 "store byte to float result to memory",
1324 );
1325 }
1326 }
1327
1328 IntrinsicFunction::ByteToCodepoint => {
1329 if target_destination.is_register() {
1330 self.builder.add_mov_reg(
1333 target_destination.register().unwrap(),
1334 self_reg.unwrap(),
1335 node,
1336 "byte_to_codepoint",
1337 );
1338 } else {
1339 self.emit_store_scalar_to_memory_offset_instruction(
1340 target_destination.grab_memory_location(),
1341 self_reg.unwrap(),
1342 node,
1343 "store byte to codepoint result to memory",
1344 );
1345 }
1346 }
1347
1348 IntrinsicFunction::BoolToString => {
1350 if target_destination.is_register() {
1351 self.builder.bool_to_string(
1352 target_destination.register().unwrap(),
1353 self_reg.unwrap(),
1354 node,
1355 "bool_to_string",
1356 );
1357 } else {
1358 let temp_reg = self.temp_registers.allocate(
1359 VmType::new_contained_in_register(u32_type()),
1360 "temporary for bool to string intrinsic",
1361 );
1362
1363 self.builder.bool_to_string(
1364 &temp_reg.register,
1365 self_reg.unwrap(),
1366 node,
1367 "bool_to_string",
1368 );
1369
1370 self.emit_store_scalar_to_memory_offset_instruction(
1371 target_destination.grab_memory_location(),
1372 &temp_reg.register,
1373 node,
1374 "store bool to string result to memory",
1375 );
1376 }
1377 }
1378
1379 IntrinsicFunction::StringToString => {
1380 if target_destination.is_register() {
1381 self.builder.add_string_to_string(
1382 target_destination.register().unwrap(),
1383 self_reg.unwrap(),
1384 node,
1385 "string_to_string",
1386 );
1387 } else {
1388 let temp_reg = self.temp_registers.allocate(
1389 VmType::new_contained_in_register(u32_type()),
1390 "temporary for string to string intrinsic",
1391 );
1392
1393 self.builder.add_string_to_string(
1394 &temp_reg.register,
1395 self_reg.unwrap(),
1396 node,
1397 "string_to_string",
1398 );
1399
1400 self.emit_store_scalar_to_memory_offset_instruction(
1401 target_destination.grab_memory_location(),
1402 &temp_reg.register,
1403 node,
1404 "store string to string result to memory",
1405 );
1406 }
1407 }
1408
1409 IntrinsicFunction::StringStartsWith => {
1410 let converted_to_expressions: Vec<_> = arguments
1411 .iter()
1412 .map(|arg| {
1413 let ArgumentExpression::Expression(found_expression) = arg else {
1414 panic!("must be expression");
1415 };
1416 found_expression.clone()
1417 })
1418 .collect();
1419 let other_str = self.emit_scalar_rvalue(&converted_to_expressions[0], ctx);
1420 if target_destination.is_register() {
1421 self.builder.add_string_starts_with(
1422 target_destination.register().unwrap(),
1423 self_reg.unwrap(),
1424 &other_str,
1425 node,
1426 "string_starts_with",
1427 );
1428 } else {
1429 let temp_reg = self.temp_registers.allocate(
1430 VmType::new_contained_in_register(u8_type()),
1431 "temporary for string starts with intrinsic",
1432 );
1433
1434 self.builder.add_string_starts_with(
1435 &temp_reg.register,
1436 self_reg.unwrap(),
1437 &other_str,
1438 node,
1439 "string_starts_with",
1440 );
1441
1442 self.emit_store_scalar_to_memory_offset_instruction(
1443 target_destination.grab_memory_location(),
1444 &temp_reg.register,
1445 node,
1446 "store string starts with result to memory",
1447 );
1448 }
1449 }
1450
1451 IntrinsicFunction::StringToInt => {
1452 let pointer = self.emit_compute_effective_address_to_register(
1453 target_destination,
1454 node,
1455 "need pointer to tuple",
1456 );
1457 self.builder
1458 .add_string_to_int(&pointer, self_reg.unwrap(), node, "string to int");
1459 }
1460 IntrinsicFunction::StringToFloat => {
1461 let pointer = self.emit_compute_effective_address_to_register(
1462 target_destination,
1463 node,
1464 "need pointer to tuple",
1465 );
1466 self.builder.add_string_to_float(
1467 &pointer,
1468 self_reg.unwrap(),
1469 node,
1470 "string to float",
1471 );
1472 }
1473
1474 IntrinsicFunction::MapIsEmpty | IntrinsicFunction::VecIsEmpty => {
1476 let collection_pointer = PointerLocation {
1477 ptr_reg: self_reg.unwrap().clone(),
1478 };
1479 if target_destination.is_register() {
1480 self.emit_collection_is_empty(
1481 target_destination.register().unwrap().clone(),
1482 &collection_pointer,
1483 node,
1484 "vec empty",
1485 );
1486 } else {
1487 let temp_reg = self.temp_registers.allocate(
1488 VmType::new_contained_in_register(u8_type()),
1489 "temporary for collection is empty intrinsic",
1490 );
1491
1492 self.emit_collection_is_empty(
1493 temp_reg.register.clone(),
1494 &collection_pointer,
1495 node,
1496 "vec empty",
1497 );
1498
1499 self.emit_store_scalar_to_memory_offset_instruction(
1500 target_destination.grab_memory_location(),
1501 &temp_reg.register,
1502 node,
1503 "store collection is empty result to memory",
1504 );
1505 }
1506 }
1507
1508 IntrinsicFunction::StringLen
1509 | IntrinsicFunction::MapLen
1510 | IntrinsicFunction::VecLen => {
1511 let collection_pointer = PointerLocation {
1512 ptr_reg: self_reg.unwrap().clone(),
1513 };
1514 if target_destination.is_register() {
1515 self.emit_collection_len(
1516 target_destination.register().unwrap(),
1517 &collection_pointer,
1518 node,
1519 "get the collection element_count",
1520 );
1521 } else {
1522 let temp_reg = self.temp_registers.allocate(
1523 VmType::new_contained_in_register(u16_type()),
1524 "temporary for collection len intrinsic",
1525 );
1526
1527 self.emit_collection_len(
1528 &temp_reg.register,
1529 &collection_pointer,
1530 node,
1531 "get the collection element_count",
1532 );
1533
1534 self.emit_store_scalar_to_memory_offset_instruction(
1535 target_destination.grab_memory_location(),
1536 &temp_reg.register,
1537 node,
1538 "store collection len result to memory",
1539 );
1540 }
1541 }
1542 IntrinsicFunction::MapCapacity | IntrinsicFunction::VecCapacity => {
1543 let collection_pointer = PointerLocation {
1544 ptr_reg: self_reg.unwrap().clone(),
1545 };
1546 if target_destination.is_register() {
1547 self.emit_collection_capacity(
1548 target_destination.register().unwrap(),
1549 &collection_pointer,
1550 node,
1551 "get the collection element_count",
1552 );
1553 } else {
1554 let temp_reg = self.temp_registers.allocate(
1555 VmType::new_contained_in_register(u16_type()),
1556 "temporary for collection capacity intrinsic",
1557 );
1558
1559 self.emit_collection_capacity(
1560 &temp_reg.register,
1561 &collection_pointer,
1562 node,
1563 "get the collection element_count",
1564 );
1565
1566 self.emit_store_scalar_to_memory_offset_instruction(
1567 target_destination.grab_memory_location(),
1568 &temp_reg.register,
1569 node,
1570 "store collection capacity result to memory",
1571 );
1572 }
1573 }
1574
1575 IntrinsicFunction::MapRemove | IntrinsicFunction::MapHas => {
1576 let grid_self_ptr_reg = PointerLocation {
1579 ptr_reg: self_reg.unwrap().clone(),
1580 };
1581 let converted_to_expressions: Vec<_> = arguments
1582 .iter()
1583 .map(|arg| {
1584 let ArgumentExpression::Expression(found_expression) = arg else {
1585 panic!("must be expression");
1586 };
1587 found_expression.clone()
1588 })
1589 .collect();
1590 self.emit_intrinsic_map(
1591 target_destination,
1592 intrinsic_fn,
1593 &grid_self_ptr_reg,
1594 &converted_to_expressions,
1595 node,
1596 comment,
1597 ctx,
1598 );
1599 } }
1601 }
1602
1603 fn emit_intrinsic_map_remove(
1604 &mut self,
1605 map_header_reg: &PointerLocation,
1606 key_expression: &Expression,
1607 ctx: &Context,
1608 ) {
1609 let key_register =
1610 self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_expression, ctx);
1611
1612 self.builder
1613 .add_map_remove(map_header_reg, &key_register, &key_expression.node, "");
1614 }
1615
1616 fn emit_collection_capacity(
1617 &mut self,
1618 output_reg: &TypedRegister,
1619 collection_addr: &PointerLocation,
1620 node: &Node,
1621 comment: &str,
1622 ) {
1623 self.builder.add_ld16_from_pointer_with_offset_u16(
1624 output_reg,
1625 &collection_addr.ptr_reg,
1626 COLLECTION_CAPACITY_OFFSET,
1627 node,
1628 comment,
1629 );
1630 }
1631
1632 fn emit_collection_len(
1633 &mut self,
1634 output_reg: &TypedRegister,
1635 collection_addr: &PointerLocation,
1636 node: &Node,
1637 comment: &str,
1638 ) {
1639 self.builder.add_ld16_from_pointer_with_offset_u16(
1640 output_reg,
1641 &collection_addr.ptr_reg,
1642 COLLECTION_ELEMENT_COUNT_OFFSET,
1643 node,
1644 &format!("{comment} - collection element_count"),
1645 );
1646 }
1647
1648 fn emit_collection_is_empty(
1649 &mut self,
1650 output_reg: TypedRegister,
1651 collection_addr: &PointerLocation,
1652 node: &Node,
1653 _comment: &str,
1654 ) {
1655 self.builder.add_ld16_from_pointer_with_offset_u16(
1656 &output_reg,
1657 &collection_addr.ptr_reg,
1658 COLLECTION_ELEMENT_COUNT_OFFSET,
1659 node,
1660 "get the map length for testing if it is empty",
1661 );
1662 self.builder.add_meqz(
1663 &output_reg,
1664 &output_reg,
1665 node,
1666 "convert the map length to inverted bool",
1667 );
1668 }
1669}