1use crate::code_bld::CodeBuilder;
6use crate::ctx::Context;
7
8use crate::transformer::{Collection, Transformer};
9use source_map_node::Node;
10use swamp_semantic::intr::IntrinsicFunction;
11use swamp_semantic::{ArgumentExpression, Expression, ExpressionKind, VariableRef};
12use swamp_vm_isa::{
13 MemoryOffset, COLLECTION_CAPACITY_OFFSET, COLLECTION_ELEMENT_COUNT_OFFSET,
14 GRID_HEADER_HEIGHT_OFFSET, GRID_HEADER_WIDTH_OFFSET,
15};
16use swamp_vm_types::types::{
17 float_type, int_type, pointer_type, u16_type, u32_type, u8_type, Place, TypedRegister, VmType,
18};
19use swamp_vm_types::{AggregateMemoryLocation, MemoryLocation, PointerLocation};
20
21impl CodeBuilder<'_> {
22 #[allow(clippy::too_many_lines)]
23 #[allow(clippy::single_match_else)]
24 pub fn emit_single_intrinsic_call(
25 &mut self,
26 target_reg: &Place,
27 node: &Node,
28 intrinsic_fn: &IntrinsicFunction,
29 arguments: &[ArgumentExpression],
30 ctx: &Context,
31 ) {
32 {
33 let self_reg = if arguments.is_empty() {
35 None
36 } else {
37 let ArgumentExpression::Expression(self_expr) = &arguments[0] else {
38 panic!("Expected expression for self argument");
39 };
40 Some(self.emit_scalar_rvalue(self_expr, ctx))
41 };
42
43 let rest_args = if arguments.len() > 1 {
44 &arguments[1..]
45 } else {
46 &vec![]
47 };
48 self.emit_single_intrinsic_call_with_self(
49 target_reg,
50 node,
51 intrinsic_fn,
52 self_reg.as_ref(),
53 rest_args,
54 ctx,
55 "single intrinsic call",
56 );
57 }
58 }
59
60 pub fn emit_intrinsic_map(
61 &mut self,
62 output_destination: &Place,
63 intrinsic_fn: &IntrinsicFunction,
64 self_ptr_reg: &PointerLocation,
65 arguments: &[Expression],
66 node: &Node,
67 comment: &str,
68 ctx: &Context,
69 ) {
70 match intrinsic_fn {
71 IntrinsicFunction::MapHas => {
72 let key_argument = &arguments[0];
73 let key_temp_storage_reg =
75 self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_argument, ctx);
76
77 self.builder.add_map_has(
78 output_destination.register().unwrap(),
79 self_ptr_reg,
80 &key_temp_storage_reg,
81 node,
82 "map_has",
83 );
84 }
85 IntrinsicFunction::MapRemove => {
86 let key_argument = &arguments[0];
87 self.emit_intrinsic_map_remove(self_ptr_reg, key_argument, ctx);
88 }
89 _ => todo!("missing intrinsic_map {intrinsic_fn}"),
90 }
91 }
92
93 pub fn emit_intrinsic_sparse(
94 &mut self,
95 output_destination: &Place,
96 intrinsic_fn: &IntrinsicFunction,
97 self_ptr_reg: &PointerLocation,
98 arguments: &[Expression],
99 node: &Node,
100 comment: &str,
101 ctx: &Context,
102 ) {
103 match intrinsic_fn {
104 IntrinsicFunction::SparseAdd => {
105 let element_to_add_expression = &arguments[0];
106 self.emit_sparse_add(
107 &output_destination.register().unwrap().clone(),
108 self_ptr_reg,
109 element_to_add_expression,
110 node,
111 ctx,
112 );
113 }
114
115 IntrinsicFunction::SparseRemove => {
116 let sparse_id_int_expression = &arguments[0];
117 self.emit_sparse_remove(self_ptr_reg, sparse_id_int_expression, node, ctx);
118 }
119
120 IntrinsicFunction::SparseIsAlive => {
121 let sparse_id_int_expression = &arguments[0];
122 self.emit_sparse_is_alive(
123 &output_destination.register().unwrap().clone(),
124 self_ptr_reg,
125 sparse_id_int_expression,
126 node,
127 ctx,
128 );
129 }
130 _ => todo!("unknown sparse {intrinsic_fn}"),
131 }
132 }
133 pub fn emit_intrinsic_grid(
134 &mut self,
135 target_destination: &Place,
136 intrinsic_fn: &IntrinsicFunction,
137 self_ptr_reg: &PointerLocation,
138 arguments: &[Expression],
139 node: &Node,
140 comment: &str,
141 ctx: &Context,
142 ) {
143 let (temp_reg, dest_reg) = if target_destination.is_register() {
144 (None, target_destination.register().unwrap().clone())
145 } else {
146 let temp_reg = self.temp_registers.allocate(
147 VmType::new_contained_in_register(float_type()),
148 "temporary destination for low level intrinsic",
149 );
150
151 (Some(temp_reg.register.clone()), temp_reg.register)
152 };
153 match intrinsic_fn {
154 IntrinsicFunction::GridSet => {
155 let x_expr = &arguments[0];
156 let y_expr = &arguments[1];
157 let value_expr = &arguments[2];
158
159 let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
160 let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
161 let element_gen_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
162
163 let temp_element_ptr = self.temp_registers.allocate(
164 VmType::new_contained_in_register(element_gen_type.clone()),
165 "temporary scalar",
166 );
167
168 self.builder.add_grid_get_entry_addr(
169 &temp_element_ptr.register,
170 self_ptr_reg,
171 &x_reg,
172 &y_reg,
173 element_gen_type.total_size,
174 node,
175 comment,
176 );
177
178 let location = AggregateMemoryLocation {
179 location: MemoryLocation {
180 base_ptr_reg: temp_element_ptr.register,
181 offset: MemoryOffset(0),
182 ty: VmType::new_unknown_placement(element_gen_type.clone()),
183 },
184 };
185
186 if element_gen_type.is_aggregate() {
188 self.emit_initialize_memory_for_any_type(
189 &location.location,
190 node,
191 "initialize grid set allocated space",
192 );
193 }
194
195 self.emit_expression_into_target_memory(
196 &location.location,
197 value_expr,
198 "grid set",
199 ctx,
200 );
201 }
202 IntrinsicFunction::GridGet => {
203 let x_expr = &arguments[0];
204 let y_expr = &arguments[1];
205
206 let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
207 let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
208
209 let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
210
211 let temp_element_ptr = self.temp_registers.allocate(
213 VmType::new_contained_in_register(pointer_type()),
214 "temp for grid element address",
215 );
216
217 self.builder.add_grid_get_entry_addr(
219 &temp_element_ptr.register,
220 self_ptr_reg,
221 &x_reg,
222 &y_reg,
223 element_type.total_size,
224 node,
225 comment,
226 );
227
228 let element_memory_location = MemoryLocation {
230 base_ptr_reg: temp_element_ptr.register,
231 offset: MemoryOffset(0),
232 ty: VmType::new_unknown_placement(element_type),
233 };
234
235 self.emit_copy_value_from_memory_location(
238 target_destination,
239 &element_memory_location,
240 node,
241 "copy grid element value to destination",
242 );
243 }
244
245 IntrinsicFunction::GridWidth => {
246 let temp = self.temp_registers.allocate(
248 VmType::new_contained_in_register(u16_type()),
249 "temp for grid width",
250 );
251
252 let self_memory_location = AggregateMemoryLocation::new(
254 MemoryLocation::new_copy_over_whole_type_with_zero_offset(
255 self_ptr_reg.ptr_reg.clone(),
256 ),
257 );
258 let width_location =
259 self_memory_location.offset(GRID_HEADER_WIDTH_OFFSET, int_type());
260
261 self.builder.add_ld16_from_pointer_from_memory_location(
263 &temp.register,
264 &width_location.location,
265 node,
266 comment,
267 );
268
269 let value_source = Place::Register(temp.register);
271
272 self.emit_copy_value_between_places(
274 target_destination,
275 &value_source,
276 node,
277 "store grid width to destination",
278 );
279 }
280 IntrinsicFunction::GridHeight => {
281 let temp = self.temp_registers.allocate(
283 VmType::new_contained_in_register(u16_type()),
284 "temp for grid height",
285 );
286
287 let self_memory_location = AggregateMemoryLocation::new(
289 MemoryLocation::new_copy_over_whole_type_with_zero_offset(
290 self_ptr_reg.ptr_reg.clone(),
291 ),
292 );
293 let height_location =
294 self_memory_location.offset(GRID_HEADER_HEIGHT_OFFSET, int_type());
295
296 self.builder.add_ld16_from_pointer_from_memory_location(
298 &temp.register,
299 &height_location.location,
300 node,
301 comment,
302 );
303
304 let value_source = Place::Register(temp.register);
306
307 self.emit_copy_value_between_places(
309 target_destination,
310 &value_source,
311 node,
312 "store grid height to destination",
313 );
314 }
315 _ => todo!("wrong grid {intrinsic_fn}"),
316 }
317 }
318
319 #[allow(clippy::too_many_lines)]
320 fn emit_intrinsic_call_vec(
321 &mut self,
322 output_destination: &Place,
323 intrinsic_fn: &IntrinsicFunction,
324 self_ptr_reg: &PointerLocation,
325 arguments: &[Expression],
326 node: &Node,
327 ctx: &Context,
328 ) {
329 let self_basic_type = &self_ptr_reg.ptr_reg.ty.basic_type;
330 match intrinsic_fn {
331 IntrinsicFunction::VecPush => {
332 let element_expr = &arguments[0];
333
334 let element_gen_type = self.state.layout_cache.layout(&element_expr.ty);
335
336 let temp_element_ptr = self.temp_registers.allocate(
337 VmType::new_contained_in_register(pointer_type()),
338 "pointer to new element",
339 );
340
341 self.builder.add_vec_push_addr(
342 temp_element_ptr.register(),
343 &self_ptr_reg.ptr_reg,
344 node,
345 "set pointer to new element",
346 );
347
348 let location = AggregateMemoryLocation {
349 location: MemoryLocation {
350 base_ptr_reg: temp_element_ptr.register,
351 offset: MemoryOffset(0),
352 ty: VmType::new_unknown_placement(element_gen_type.clone()),
353 },
354 };
355
356 if element_gen_type.is_aggregate() {
358 self.emit_initialize_memory_for_any_type(
359 &location.location,
360 node,
361 "initialize vec.push allocated space",
362 );
363 }
364
365 self.emit_expression_into_target_memory(
366 &location.location,
367 element_expr,
368 "vec push",
369 ctx,
370 );
371 }
372
373 IntrinsicFunction::VecExtend => {
374 let element_expr = &arguments[0];
375 let other_vec_reg = self.emit_scalar_rvalue(element_expr, ctx);
376
377 self.builder.add_vec_extend(
378 &self_ptr_reg.ptr_reg,
379 &other_vec_reg,
380 node,
381 "extend vec",
382 );
383 }
384
385 IntrinsicFunction::VecCopy => {
386 let output_pointer = self.emit_compute_effective_address_to_register(
387 output_destination,
388 node,
389 "get absolute pointer for vec slice destination",
390 );
391 let output_pointer_location = PointerLocation::new(output_pointer);
392
393 self.builder.add_vec_copy(
394 &output_pointer_location,
395 self_ptr_reg,
396 node,
397 "copy vector",
398 );
399 }
400
401 IntrinsicFunction::VecPop => {
402 let element_type = self_basic_type.element().unwrap();
403 let pop_target_reg = if let Some(found_target_reg) = output_destination.register() {
404 found_target_reg.clone()
405 } else {
406 let temp = self.temp_registers.allocate(
407 VmType::new_contained_in_register(element_type.clone()),
408 "temp for vec pop",
409 );
410 temp.register
411 };
412 self.builder.add_vec_pop(
413 &pop_target_reg,
414 &self_ptr_reg.ptr_reg, element_type.total_size,
416 node,
417 "vec pop",
418 );
419 let source_memory_location = MemoryLocation {
420 base_ptr_reg: pop_target_reg,
421 offset: MemoryOffset(0),
422 ty: VmType::new_unknown_placement(element_type),
423 };
424
425 self.emit_copy_value_from_memory_location(
426 output_destination,
427 &source_memory_location,
428 node,
429 "copy from vec pop",
430 );
431 }
432
433 IntrinsicFunction::VecSlice => {
434 let range_expr = &arguments[0];
435 let range_region = self.emit_scalar_rvalue(range_expr, ctx);
436
437 let output_pointer = self.emit_compute_effective_address_to_register(
438 output_destination,
439 node,
440 "get absolute pointer for vec slice destination",
441 );
442 let output_pointer_location = PointerLocation::new(output_pointer);
443
444 self.builder.add_vec_copy_range(
445 &output_pointer_location,
446 self_ptr_reg,
447 &range_region,
448 node,
449 "vec slice",
450 );
451 }
452
453 IntrinsicFunction::VecRemoveIndex => {
454 let index_region_expr = &arguments[0];
455 let index_region = self.emit_scalar_rvalue(index_region_expr, ctx);
456
457 let element_type = self_basic_type.element().unwrap();
458
459 self.builder.add_vec_remove_index(
460 &self_ptr_reg.ptr_reg,
461 &index_region,
462 node,
463 "remove index",
464 );
465 }
466 IntrinsicFunction::VecRemoveIndexGetValue => {
467 let key_expr = &arguments[0];
468 let key_region = self.emit_scalar_rvalue(key_expr, ctx);
469 let element_type = self_basic_type.element().unwrap();
470
471 if let Some(target_reg) = output_destination.register() {
473 self.builder.add_vec_remove_index_get_value(
475 target_reg,
476 &self_ptr_reg.ptr_reg, &key_region,
478 node,
479 "vec remove index get value to register",
480 );
481 } else {
482 let temp_reg = self.temp_registers.allocate(
484 VmType::new_contained_in_register(element_type),
485 "temp for vec remove index get value",
486 );
487
488 self.builder.add_vec_remove_index_get_value(
489 &temp_reg.register,
490 &self_ptr_reg.ptr_reg,
491 &key_region,
492 node,
493 "vec remove index get value to temp",
494 );
495
496 let source = Place::Register(temp_reg.register);
498 self.emit_copy_value_between_places(
499 output_destination,
500 &source,
501 node,
502 "copy vec element to destination",
503 );
504 }
505 }
506 IntrinsicFunction::VecRemoveFirstIndexGetValue => {
507 let zero_reg = self.temp_registers.allocate(
508 VmType::new_contained_in_register(u8_type()),
509 "vec remove first. set index 0",
510 );
511 self.builder
512 .add_mov8_immediate(zero_reg.register(), 0, node, "zero index");
513 let value_addr_reg = self.temp_registers.allocate(
514 VmType::new_contained_in_register(u32_type()),
515 "vec entry addr to copy from",
516 );
517 let element_type = self_basic_type.element().unwrap();
518 self.builder.add_vec_subscript(
519 value_addr_reg.register(),
520 &self_ptr_reg.ptr_reg,
521 zero_reg.register(),
522 node,
523 "lookup first entry in vec",
524 );
525
526 let source_memory_location = MemoryLocation {
527 base_ptr_reg: value_addr_reg.register,
528 offset: MemoryOffset(0),
529 ty: VmType::new_unknown_placement(element_type),
530 };
531
532 self.emit_copy_value_from_memory_location(
533 output_destination,
534 &source_memory_location,
535 node,
536 "load the vec entry to target register",
537 );
538
539 self.builder.add_vec_remove_index(
540 &self_ptr_reg.ptr_reg, zero_reg.register(),
542 node,
543 "vec remove first index",
544 );
545 }
546 IntrinsicFunction::VecClear => {
547 let temp_element_count_reg = self.temp_registers.allocate(
548 VmType::new_contained_in_register(u16_type()),
549 "vec_clear zero",
550 );
551 self.builder.add_mov_16_immediate_value(
552 temp_element_count_reg.register(),
553 0,
554 node,
555 "set to zero",
556 );
557
558 let self_memory_location = AggregateMemoryLocation::new(
559 MemoryLocation::new_copy_over_whole_type_with_zero_offset(
560 self_ptr_reg.ptr_reg.clone(),
561 ),
562 );
563
564 self.builder.add_st16_using_ptr_with_offset(
565 &self_memory_location
566 .offset(COLLECTION_ELEMENT_COUNT_OFFSET, u16_type())
567 .location,
568 temp_element_count_reg.register(),
569 node,
570 "set element_count to zero",
571 );
572 }
573
574 IntrinsicFunction::VecGet => {
575 let key_expr = &arguments[0];
576 let key_region = self.emit_scalar_rvalue(key_expr, ctx);
577 let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
578
579 let temp_element_ptr = self.temp_registers.allocate(
581 VmType::new_contained_in_register(pointer_type()),
582 "temp for vec element address",
583 );
584
585 self.builder.add_vec_subscript(
587 temp_element_ptr.register(),
588 &self_ptr_reg.ptr_reg,
589 &key_region,
590 node,
591 "get vec element address",
592 );
593
594 let element_memory_location = MemoryLocation {
596 base_ptr_reg: temp_element_ptr.register,
597 offset: MemoryOffset(0),
598 ty: VmType::new_unknown_placement(element_type),
599 };
600
601 self.emit_copy_value_from_memory_location(
603 output_destination,
604 &element_memory_location,
605 node,
606 "copy vec element to destination",
607 );
608 }
609 _ => todo!("Vec {intrinsic_fn}"),
610 }
611
612 }
635
636 fn emit_intrinsic_call_int(
637 &mut self,
638 target_reg: &TypedRegister,
639 intrinsic_fn: &IntrinsicFunction,
640 arguments: &[TypedRegister],
641 node: &Node,
642 ) {
643 let first_argument = &arguments[0];
644
645 match intrinsic_fn {
647 IntrinsicFunction::IntAbs => {
648 self.builder
649 .add_int_abs(target_reg, first_argument, node, "int abs");
650 }
651
652 IntrinsicFunction::IntRnd => {
653 self.builder
654 .add_int_rnd(target_reg, first_argument, node, "int pseudo random");
655 }
656 IntrinsicFunction::IntMax => {
657 let int_register = &arguments[1];
658
659 self.builder
660 .add_int_max(target_reg, first_argument, int_register, node, "int max");
661 }
662 IntrinsicFunction::IntMin => {
663 let int_register = &arguments[1];
664
665 self.builder
666 .add_int_min(target_reg, first_argument, int_register, node, "int min");
667 }
668 IntrinsicFunction::IntClamp => {
669 let min_reg = &arguments[1];
670 let max_reg = &arguments[2];
671 self.builder.add_int_clamp(
672 target_reg,
673 first_argument,
674 min_reg,
675 max_reg,
676 node,
677 "int clamp",
678 );
679 }
680 IntrinsicFunction::IntToFloat => {
681 self.builder.add_int_to_float(
682 target_reg,
683 first_argument,
684 node,
685 &format!("int to float {}", first_argument.comment()),
686 );
687 }
688 IntrinsicFunction::IntToString => {
689 self.builder
690 .add_int_to_string(target_reg, first_argument, node, "int_to_string");
691 }
692 _ => {}
693 }
694 }
696
697 #[allow(clippy::too_many_lines)]
698 fn emit_intrinsic_call_fixed(
699 &mut self,
700 target_reg: &TypedRegister,
701 intrinsic_fn: &IntrinsicFunction,
702 arguments: &[TypedRegister],
703 node: &Node,
704 ) {
705 let first_argument_reg = &arguments[0];
707 match intrinsic_fn {
708 IntrinsicFunction::FloatRound => {
709 self.builder
710 .add_float_round(target_reg, first_argument_reg, node, "float round");
711 }
712 IntrinsicFunction::FloatFloor => {
713 self.builder
714 .add_float_floor(target_reg, first_argument_reg, node, "float floor");
715 }
716 IntrinsicFunction::FloatSqrt => {
717 self.builder
718 .add_float_sqrt(target_reg, first_argument_reg, node, "float sqr");
719 }
720 IntrinsicFunction::FloatSign => {
721 self.builder
722 .add_float_sign(target_reg, first_argument_reg, node, "float sign");
723 }
724 IntrinsicFunction::FloatAbs => {
725 self.builder
726 .add_float_abs(target_reg, first_argument_reg, node, "float abs");
727 }
728 IntrinsicFunction::FloatRnd => {
729 self.builder.add_float_prnd(
730 target_reg,
731 first_argument_reg,
732 node,
733 "float pseudo random",
734 );
735 }
736 IntrinsicFunction::FloatCos => {
737 self.builder
738 .add_float_cos(target_reg, first_argument_reg, node, "float cos");
739 }
740 IntrinsicFunction::FloatSin => {
741 self.builder
742 .add_float_sin(target_reg, first_argument_reg, node, "float sin");
743 }
744 IntrinsicFunction::FloatAcos => {
745 self.builder
746 .add_float_acos(target_reg, first_argument_reg, node, "float acos");
747 }
748 IntrinsicFunction::FloatAsin => {
749 self.builder
750 .add_float_asin(target_reg, first_argument_reg, node, "float asin");
751 }
752 IntrinsicFunction::FloatAtan2 => {
753 self.builder
754 .add_float_atan2(target_reg, first_argument_reg, node, "float atan2");
755 }
756 IntrinsicFunction::FloatMin => {
757 let float_region = &arguments[1];
758 self.builder.add_float_min(
759 target_reg,
760 first_argument_reg,
761 float_region,
762 node,
763 "float min",
764 );
765 }
766 IntrinsicFunction::FloatMax => {
767 let float_region = &arguments[1];
768 self.builder.add_float_max(
769 target_reg,
770 first_argument_reg,
771 float_region,
772 node,
773 "float max",
774 );
775 }
776 IntrinsicFunction::FloatClamp => {
777 let float_region = &arguments[1];
778 let float_b_region = &arguments[2];
779
780 self.builder.add_float_clamp(
781 target_reg,
782 float_region,
783 first_argument_reg,
784 float_b_region,
785 node,
786 "float round",
787 );
788 }
789 IntrinsicFunction::FloatToString => self.builder.add_float_to_string(
790 target_reg,
791 first_argument_reg,
792 node,
793 "float_to_string",
794 ),
795 _ => panic!("wasn't a fixed operation"),
796 }
797 }
799
800 pub fn emit_intrinsic_transformer(
801 &mut self,
802 target_destination: &Place,
803 intrinsic_fn: &IntrinsicFunction,
804 self_addr: &PointerLocation,
805 lambda: (Vec<VariableRef>, &Expression),
806 node: &Node,
807 ctx: &Context,
808 ) {
809 match intrinsic_fn {
810 IntrinsicFunction::TransformerFold => { }
812 IntrinsicFunction::TransformerFilter => {
813 self.emit_iterate_over_collection_with_lambda(
814 target_destination,
815 node,
816 Collection::Vec,
817 Transformer::Filter,
818 &self_addr.ptr_reg,
819 lambda,
820 ctx,
821 );
822 }
823
824 IntrinsicFunction::TransformerFilterInPlace => {
825 self.emit_iterate_over_collection_with_lambda(
826 target_destination,
827 node,
828 Collection::Vec,
829 Transformer::FilterMut,
830 &self_addr.ptr_reg,
831 lambda,
832 ctx,
833 );
834 }
835
836 IntrinsicFunction::TransformerFor => {
837 self.emit_iterate_over_collection_with_lambda(
838 target_destination,
839 node,
840 Collection::Vec,
841 Transformer::For,
842 &self_addr.ptr_reg,
843 lambda,
844 ctx,
845 );
846 }
847 IntrinsicFunction::TransformerWhile => {
848 self.emit_iterate_over_collection_with_lambda(
849 target_destination,
850 node,
851 Collection::Vec,
852 Transformer::While,
853 &self_addr.ptr_reg,
854 lambda,
855 ctx,
856 );
857 }
858
859 IntrinsicFunction::TransformerFind => {
860 self.emit_iterate_over_collection_with_lambda(
861 target_destination,
862 node,
863 Collection::Vec,
864 Transformer::Find,
865 &self_addr.ptr_reg,
866 lambda,
867 ctx,
868 );
869 }
870 _ => todo!("{intrinsic_fn}"),
871 }
872 }
873
874 #[allow(clippy::too_many_lines)]
875 #[allow(clippy::too_many_arguments)]
876 pub fn emit_single_intrinsic_call_with_self_destination(
877 &mut self,
878 target_destination: &Place,
879 node: &Node,
880 intrinsic_fn: &IntrinsicFunction,
881 self_destination: Option<&Place>,
882 arguments: &[ArgumentExpression],
883 ctx: &Context,
884 comment: &str,
885 ) {
886 let self_reg = if let Some(self_dest) = self_destination {
888 self.emit_load_scalar_or_absolute_aggregate_pointer(self_dest, node, comment)
889 } else {
890 None
891 };
892
893 self.emit_single_intrinsic_call_with_self(
895 target_destination,
896 node,
897 intrinsic_fn,
898 self_reg.as_ref(),
899 arguments,
900 ctx,
901 comment,
902 );
903 }
904
905 #[allow(clippy::too_many_lines)]
906 #[allow(clippy::too_many_arguments)]
907 pub fn emit_single_intrinsic_call_with_self(
908 &mut self,
909 target_destination: &Place,
910 node: &Node,
911 intrinsic_fn: &IntrinsicFunction,
912 self_reg: Option<&TypedRegister>,
913 arguments: &[ArgumentExpression],
914 ctx: &Context,
915 comment: &str,
916 ) {
917 let maybe_target = target_destination.register();
918
919 match intrinsic_fn {
920 IntrinsicFunction::Float2Magnitude
921 | IntrinsicFunction::FloatAbs
922 | IntrinsicFunction::FloatRound
923 | IntrinsicFunction::FloatFloor
924 | IntrinsicFunction::FloatSqrt
925 | IntrinsicFunction::FloatSign
926 | IntrinsicFunction::FloatRnd
927 | IntrinsicFunction::FloatCos
928 | IntrinsicFunction::FloatSin
929 | IntrinsicFunction::FloatAcos
930 | IntrinsicFunction::FloatAsin
931 | IntrinsicFunction::FloatAtan2
932 | IntrinsicFunction::FloatMin
933 | IntrinsicFunction::FloatMax
934 | IntrinsicFunction::FloatClamp
935 | IntrinsicFunction::FloatToString => {
936 let (temp_reg, dest_reg) = if target_destination.is_register() {
938 (None, target_destination.register().unwrap().clone())
939 } else {
940 let temp_reg = self.temp_registers.allocate(
941 VmType::new_contained_in_register(float_type()),
942 "temporary destination for low level intrinsic",
943 );
944
945 (Some(temp_reg.register.clone()), temp_reg.register)
946 };
947
948 let mut converted_regs = vec![self_reg.unwrap().clone()];
950 for arg in arguments {
951 let (ArgumentExpression::Expression(found_expression)
952 | ArgumentExpression::MaterializedExpression(found_expression)) = arg
953 else {
954 panic!("must be expression");
955 };
956 let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
957 converted_regs.push(materialized_arg);
958 }
959
960 self.emit_intrinsic_call_fixed(&dest_reg, intrinsic_fn, &converted_regs, node);
961
962 if let Some(temp_reg) = temp_reg {
963 self.emit_store_scalar_to_memory_offset_instruction(
964 target_destination.grab_memory_location(),
965 &temp_reg,
966 node,
967 "store the fixed point value into memory",
968 );
969 }
970 }
971
972 IntrinsicFunction::IntToFloat => {
973 let (temp_reg, dest_reg) = if target_destination.is_register() {
975 (None, target_destination.register().unwrap().clone())
976 } else {
977 let temp_reg = self.temp_registers.allocate(
978 VmType::new_contained_in_register(float_type()),
979 "temporary destination for int to float intrinsic",
980 );
981
982 (Some(temp_reg.register.clone()), temp_reg.register)
983 };
984
985 let int_value_reg = self_reg.unwrap();
987
988 self.builder.add_int_to_float(
990 &dest_reg,
991 int_value_reg,
992 node,
993 &format!("int to float {}", int_value_reg.comment()),
994 );
995
996 if let Some(temp_reg) = temp_reg {
997 self.emit_store_scalar_to_memory_offset_instruction(
998 target_destination.grab_memory_location(),
999 &temp_reg,
1000 node,
1001 "store the float result from int to float conversion",
1002 );
1003 }
1004 }
1005
1006 IntrinsicFunction::IntAbs
1007 | IntrinsicFunction::IntRnd
1008 | IntrinsicFunction::IntMax
1009 | IntrinsicFunction::IntMin
1010 | IntrinsicFunction::IntClamp
1011 | IntrinsicFunction::IntToString => {
1012 let (temp_reg, dest_reg) = if target_destination.is_register() {
1014 let target_reg = target_destination.register().unwrap();
1015 (None, target_reg.clone())
1017 } else {
1018 let temp_reg = self.temp_registers.allocate(
1019 VmType::new_contained_in_register(u32_type()),
1020 "temporary destination for low level intrinsic",
1021 );
1022
1023 (Some(temp_reg.register.clone()), temp_reg.register)
1024 };
1025
1026 let mut converted_regs = vec![self_reg.unwrap().clone()];
1028 for arg in arguments {
1029 let (ArgumentExpression::Expression(found_expression)
1030 | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1031 else {
1032 panic!("must be expression");
1033 };
1034 let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
1035 converted_regs.push(materialized_arg);
1036 }
1037
1038 self.emit_intrinsic_call_int(&dest_reg, intrinsic_fn, &converted_regs, node);
1039
1040 if let Some(temp_reg) = temp_reg {
1041 if target_destination.is_register() {
1042 self.builder.add_mov_reg(
1044 target_destination.register().unwrap(),
1045 &temp_reg,
1046 node,
1047 "copy intrinsic result from temp to target register",
1048 );
1049 } else {
1050 self.emit_store_scalar_to_memory_offset_instruction(
1052 target_destination.grab_memory_location(),
1053 &temp_reg,
1054 node,
1055 "put the low level intrinsic fixed (int) back to memory",
1056 );
1057 }
1058 }
1059 }
1060
1061 IntrinsicFunction::EnumFromDiscriminant => {
1062 let enum_pointer = PointerLocation {
1063 ptr_reg: self_reg.unwrap().clone(),
1064 };
1065
1066 assert!(arguments.len() == 1, "problem");
1067 let mut converted_regs = vec![self_reg.unwrap().clone()];
1069 for arg in arguments {
1070 let (ArgumentExpression::Expression(found_expression)
1071 | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1072 else {
1073 panic!("must be expression");
1074 };
1075 let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
1076 converted_regs.push(materialized_arg);
1077 }
1078
1079 self.builder.add_st8_using_ptr_with_offset(
1081 &MemoryLocation::new_copy_over_whole_type_with_zero_offset(
1082 enum_pointer.ptr_reg,
1083 ),
1084 &converted_regs[1], node,
1086 "overwrite the discriminant in the enum pointer",
1087 );
1088 }
1089
1090 IntrinsicFunction::EnumDiscriminant => {
1091 let enum_pointer = PointerLocation {
1092 ptr_reg: self_reg.unwrap().clone(),
1093 };
1094
1095 let discriminant_temp_reg = self.temp_registers.allocate(
1096 VmType::new_contained_in_register(u8_type()),
1097 "temp register for fetching discriminant",
1098 );
1099
1100 self.builder.add_ld8_from_pointer_with_offset(
1102 discriminant_temp_reg.register(),
1103 &enum_pointer.ptr_reg,
1104 MemoryOffset(0), node,
1106 "get the discriminant from the enum pointer",
1107 );
1108
1109 if target_destination.is_register() {
1110 self.builder.add_mov_reg(
1111 target_destination.register().unwrap(),
1112 &discriminant_temp_reg.register,
1113 node,
1114 "copy discriminant in register to target register",
1115 );
1116 } else {
1117 self.emit_store_scalar_to_memory_offset_instruction(
1118 target_destination.grab_memory_location(),
1119 &discriminant_temp_reg.register,
1120 node,
1121 "store discriminant in register to target memory location",
1122 );
1123 }
1124 }
1125
1126 IntrinsicFunction::VecPush
1133 | IntrinsicFunction::VecPop
1134 | IntrinsicFunction::VecExtend
1135 | IntrinsicFunction::VecRemoveIndex
1136 | IntrinsicFunction::VecRemoveIndexGetValue
1137 | IntrinsicFunction::VecRemoveFirstIndexGetValue
1138 | IntrinsicFunction::VecClear
1139 | IntrinsicFunction::VecSlice
1140 | IntrinsicFunction::VecSwap
1141 | IntrinsicFunction::VecInsert
1142 | IntrinsicFunction::VecFirst
1143 | IntrinsicFunction::VecGet
1144 | IntrinsicFunction::VecCopy
1145 | IntrinsicFunction::VecLast => {
1146 let vec_self_ptr_reg = PointerLocation {
1149 ptr_reg: self_reg.unwrap().clone(),
1150 };
1151 let converted_to_expressions: Vec<_> = arguments
1152 .iter()
1153 .map(|arg| {
1154 let (ArgumentExpression::Expression(found_expression)
1155 | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1156 else {
1157 panic!("must be expression");
1158 };
1159 found_expression.clone()
1160 })
1161 .collect();
1162
1163 self.emit_intrinsic_call_vec(
1164 target_destination,
1165 intrinsic_fn,
1166 &vec_self_ptr_reg,
1167 &converted_to_expressions,
1168 node,
1169 ctx,
1170 );
1171 }
1172
1173 IntrinsicFunction::GridGet
1174 | IntrinsicFunction::GridSet
1175 | IntrinsicFunction::GridWidth
1176 | IntrinsicFunction::GridHeight => {
1177 let grid_self_ptr_reg = PointerLocation {
1180 ptr_reg: self_reg.unwrap().clone(),
1181 };
1182 let converted_to_expressions: Vec<_> = arguments
1183 .iter()
1184 .map(|arg| {
1185 let (ArgumentExpression::Expression(found_expression)
1186 | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1187 else {
1188 panic!("must be expression");
1189 };
1190 found_expression.clone()
1191 })
1192 .collect();
1193 self.emit_intrinsic_grid(
1194 target_destination,
1195 intrinsic_fn,
1196 &grid_self_ptr_reg,
1197 &converted_to_expressions,
1198 node,
1199 comment,
1200 ctx,
1201 );
1202 }
1203
1204 IntrinsicFunction::SparseIsAlive
1205 | IntrinsicFunction::SparseRemove
1206 | IntrinsicFunction::SparseAdd => {
1207 let grid_self_ptr_reg = PointerLocation {
1210 ptr_reg: self_reg.unwrap().clone(),
1211 };
1212 let converted_to_expressions: Vec<_> = arguments
1213 .iter()
1214 .map(|arg| {
1215 let (ArgumentExpression::Expression(found_expression)
1216 | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1217 else {
1218 panic!("must be expression");
1219 };
1220 found_expression.clone()
1221 })
1222 .collect();
1223 self.emit_intrinsic_sparse(
1224 target_destination,
1225 intrinsic_fn,
1226 &grid_self_ptr_reg,
1227 &converted_to_expressions,
1228 node,
1229 comment,
1230 ctx,
1231 );
1232 }
1233
1234 IntrinsicFunction::TransformerFor
1235 | IntrinsicFunction::TransformerWhile
1236 | IntrinsicFunction::TransformerFindMap
1237 | IntrinsicFunction::TransformerAny
1238 | IntrinsicFunction::TransformerAll
1239 | IntrinsicFunction::TransformerMap
1240 | IntrinsicFunction::TransformerFilter
1241 | IntrinsicFunction::TransformerFilterInPlace
1242 | IntrinsicFunction::TransformerFilterMap
1243 | IntrinsicFunction::TransformerFind
1244 | IntrinsicFunction::TransformerFold => {
1245 let collection_self_ptr_reg = PointerLocation {
1247 ptr_reg: self_reg.unwrap().clone(),
1248 };
1249
1250 let lambda_expression = &arguments[0];
1251
1252 let ArgumentExpression::Expression(expr) = lambda_expression else {
1254 panic!("err");
1255 };
1256
1257 let ExpressionKind::Lambda(lambda_variables, lambda_expr) = &expr.kind else {
1258 panic!("must have lambda for transformers");
1259 };
1260
1261 self.emit_intrinsic_transformer(
1262 target_destination,
1263 intrinsic_fn,
1264 &collection_self_ptr_reg,
1265 (lambda_variables.clone(), lambda_expr),
1266 node,
1267 ctx,
1268 );
1269 }
1270
1271 IntrinsicFunction::RuntimePanic => {
1272 self.builder
1273 .add_panic(self_reg.unwrap(), node, "intrinsic panic");
1274 }
1275
1276 IntrinsicFunction::RuntimeHalt => {
1277 self.builder.add_halt(node, "intrinsic halt");
1278 }
1279
1280 IntrinsicFunction::RuntimeStep => {
1281 self.builder.add_step(node, "intrinsic step");
1282 }
1283
1284 IntrinsicFunction::RangeInit => {
1285 let start_reg = self_reg.unwrap();
1286 let end_arg = &arguments[0];
1292 let ArgumentExpression::Expression(end_arg_expr) = end_arg else {
1293 panic!();
1294 };
1295 let end_reg = self.emit_scalar_rvalue(end_arg_expr, ctx);
1296
1297 let is_inclusive = &arguments[1];
1298 let ArgumentExpression::Expression(is_inclusive_expr) = is_inclusive else {
1299 panic!();
1300 };
1301 let is_inclusive_reg = self.emit_scalar_rvalue(is_inclusive_expr, ctx);
1302 let absolute_range_pointer = self.emit_compute_effective_address_to_register(
1303 target_destination,
1304 node,
1305 "create range target pointer",
1306 );
1307 self.builder.add_range_init(
1308 &absolute_range_pointer,
1309 start_reg,
1310 &end_reg,
1311 &is_inclusive_reg,
1312 node,
1313 "create a range",
1314 );
1315 }
1316
1317 IntrinsicFunction::CodepointToString => {
1319 if target_destination.is_register() {
1320 self.builder.add_codepoint_to_string(
1321 target_destination.register().unwrap(),
1322 self_reg.unwrap(),
1323 node,
1324 "char_to_string",
1325 );
1326 } else {
1327 let temp_reg = self.temp_registers.allocate(
1328 VmType::new_contained_in_register(u32_type()),
1329 "temporary for codepoint to string intrinsic",
1330 );
1331
1332 self.builder.add_codepoint_to_string(
1333 &temp_reg.register,
1334 self_reg.unwrap(),
1335 node,
1336 "char_to_string",
1337 );
1338
1339 self.emit_store_scalar_to_memory_offset_instruction(
1340 target_destination.grab_memory_location(),
1341 &temp_reg.register,
1342 node,
1343 "store codepoint to string result to memory",
1344 );
1345 }
1346 }
1347
1348 IntrinsicFunction::CodepointToInt => {
1349 if target_destination.is_register() {
1350 self.builder.add_mov_reg(
1351 target_destination.register().unwrap(),
1352 self_reg.unwrap(),
1353 node,
1354 "char_to_int",
1355 );
1356 } else {
1357 self.emit_store_scalar_to_memory_offset_instruction(
1358 target_destination.grab_memory_location(),
1359 self_reg.unwrap(),
1360 node,
1361 "store codepoint to int result to memory",
1362 );
1363 }
1364 }
1365
1366 IntrinsicFunction::ByteToString => {
1368 if target_destination.is_register() {
1369 self.builder.byte_to_string(
1370 target_destination.register().unwrap(),
1371 self_reg.unwrap(),
1372 node,
1373 "byte_to_string",
1374 );
1375 } else {
1376 let temp_reg = self.temp_registers.allocate(
1377 VmType::new_contained_in_register(u32_type()),
1378 "temporary for byte to string intrinsic",
1379 );
1380
1381 self.builder.byte_to_string(
1382 &temp_reg.register,
1383 self_reg.unwrap(),
1384 node,
1385 "byte_to_string",
1386 );
1387
1388 self.emit_store_scalar_to_memory_offset_instruction(
1389 target_destination.grab_memory_location(),
1390 &temp_reg.register,
1391 node,
1392 "store byte to string result to memory",
1393 );
1394 }
1395 }
1396
1397 IntrinsicFunction::ByteToInt => {
1398 if target_destination.is_register() {
1399 self.builder.add_mov_reg(
1402 target_destination.register().unwrap(),
1403 self_reg.unwrap(),
1404 node,
1405 "byte_to_int",
1406 );
1407 } else {
1408 self.emit_store_scalar_to_memory_offset_instruction(
1409 target_destination.grab_memory_location(),
1410 self_reg.unwrap(),
1411 node,
1412 "store byte to int result to memory",
1413 );
1414 }
1415 }
1416
1417 IntrinsicFunction::ByteToFloat => {
1418 if target_destination.is_register() {
1419 self.builder.add_int_to_float(
1421 target_destination.register().unwrap(),
1422 self_reg.unwrap(),
1423 node,
1424 "byte_to_float",
1425 );
1426 } else {
1427 let temp_reg = self.temp_registers.allocate(
1428 VmType::new_contained_in_register(float_type()),
1429 "temporary for byte to float intrinsic",
1430 );
1431
1432 self.builder.add_int_to_float(
1434 &temp_reg.register,
1435 self_reg.unwrap(),
1436 node,
1437 "byte_to_float",
1438 );
1439
1440 self.emit_store_scalar_to_memory_offset_instruction(
1441 target_destination.grab_memory_location(),
1442 &temp_reg.register,
1443 node,
1444 "store byte to float result to memory",
1445 );
1446 }
1447 }
1448
1449 IntrinsicFunction::ByteToCodepoint => {
1450 if target_destination.is_register() {
1451 self.builder.add_mov_reg(
1454 target_destination.register().unwrap(),
1455 self_reg.unwrap(),
1456 node,
1457 "byte_to_codepoint",
1458 );
1459 } else {
1460 self.emit_store_scalar_to_memory_offset_instruction(
1461 target_destination.grab_memory_location(),
1462 self_reg.unwrap(),
1463 node,
1464 "store byte to codepoint result to memory",
1465 );
1466 }
1467 }
1468
1469 IntrinsicFunction::BoolToString => {
1471 if target_destination.is_register() {
1472 self.builder.bool_to_string(
1473 target_destination.register().unwrap(),
1474 self_reg.unwrap(),
1475 node,
1476 "bool_to_string",
1477 );
1478 } else {
1479 let temp_reg = self.temp_registers.allocate(
1480 VmType::new_contained_in_register(u32_type()),
1481 "temporary for bool to string intrinsic",
1482 );
1483
1484 self.builder.bool_to_string(
1485 &temp_reg.register,
1486 self_reg.unwrap(),
1487 node,
1488 "bool_to_string",
1489 );
1490
1491 self.emit_store_scalar_to_memory_offset_instruction(
1492 target_destination.grab_memory_location(),
1493 &temp_reg.register,
1494 node,
1495 "store bool to string result to memory",
1496 );
1497 }
1498 }
1499
1500 IntrinsicFunction::ByteVectorToString => {
1501 if target_destination.is_register() {
1502 self.builder.add_bytes_to_string(
1503 target_destination.register().unwrap(),
1504 self_reg.unwrap(),
1505 node,
1506 "bytes_to_string",
1507 );
1508 } else {
1509 let temp_reg = self.temp_registers.allocate(
1510 VmType::new_contained_in_register(u32_type()),
1511 "temporary for string duplicate intrinsic",
1512 );
1513
1514 self.builder.add_bytes_to_string(
1515 &temp_reg.register,
1516 self_reg.unwrap(),
1517 node,
1518 "bytes_to_string",
1519 );
1520
1521 self.emit_store_scalar_to_memory_offset_instruction(
1522 target_destination.grab_memory_location(),
1523 &temp_reg.register,
1524 node,
1525 "store converted utf8 string to memory",
1526 );
1527 }
1528 }
1529
1530 IntrinsicFunction::ByteVectorToStringStorage => {
1531 if target_destination.is_register() {
1532 self.builder.add_bytes_to_string_storage(
1533 target_destination.register().unwrap(),
1534 self_reg.unwrap(),
1535 node,
1536 "bytes_to_string_storage",
1537 );
1538 } else {
1539 let target_pointer_reg = self.emit_compute_effective_address_to_register(
1540 target_destination,
1541 node,
1542 "get pointer to target string storage",
1543 );
1544
1545 self.builder.add_bytes_to_string_storage(
1546 &target_pointer_reg,
1547 self_reg.unwrap(),
1548 node,
1549 "bytes_to_string_storage",
1550 );
1551 }
1552 }
1553
1554 IntrinsicFunction::StringDuplicate => {
1555 if target_destination.is_register() {
1556 self.builder.add_string_duplicate(
1557 target_destination.register().unwrap(),
1558 self_reg.unwrap(),
1559 node,
1560 "string_to_string",
1561 );
1562 } else {
1563 let temp_reg = self.temp_registers.allocate(
1564 VmType::new_contained_in_register(u32_type()),
1565 "temporary for string duplicate intrinsic",
1566 );
1567
1568 self.builder.add_string_duplicate(
1569 &temp_reg.register,
1570 self_reg.unwrap(),
1571 node,
1572 "string_duplicate",
1573 );
1574
1575 self.emit_store_scalar_to_memory_offset_instruction(
1576 target_destination.grab_memory_location(),
1577 &temp_reg.register,
1578 node,
1579 "store string duplicate result to memory",
1580 );
1581 }
1582 }
1583
1584 IntrinsicFunction::StringToString => {
1585 if target_destination.is_register() {
1586 self.builder.add_string_to_string(
1587 target_destination.register().unwrap(),
1588 self_reg.unwrap(),
1589 node,
1590 "string_to_string",
1591 );
1592 } else {
1593 let temp_reg = self.temp_registers.allocate(
1594 VmType::new_contained_in_register(u32_type()),
1595 "temporary for string to string intrinsic",
1596 );
1597
1598 self.builder.add_string_to_string(
1599 &temp_reg.register,
1600 self_reg.unwrap(),
1601 node,
1602 "string_to_string",
1603 );
1604
1605 self.emit_store_scalar_to_memory_offset_instruction(
1606 target_destination.grab_memory_location(),
1607 &temp_reg.register,
1608 node,
1609 "store string to string result to memory",
1610 );
1611 }
1612 }
1613
1614 IntrinsicFunction::StringStartsWith => {
1615 let converted_to_expressions: Vec<_> = arguments
1616 .iter()
1617 .map(|arg| {
1618 let (ArgumentExpression::Expression(found_expression)
1619 | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1620 else {
1621 panic!("must be expression");
1622 };
1623 found_expression.clone()
1624 })
1625 .collect();
1626 let other_str = self.emit_scalar_rvalue(&converted_to_expressions[0], ctx);
1627 if target_destination.is_register() {
1628 self.builder.add_string_starts_with(
1629 target_destination.register().unwrap(),
1630 self_reg.unwrap(),
1631 &other_str,
1632 node,
1633 "string_starts_with",
1634 );
1635 } else {
1636 let temp_reg = self.temp_registers.allocate(
1637 VmType::new_contained_in_register(u8_type()),
1638 "temporary for string starts with intrinsic",
1639 );
1640
1641 self.builder.add_string_starts_with(
1642 &temp_reg.register,
1643 self_reg.unwrap(),
1644 &other_str,
1645 node,
1646 "string_starts_with",
1647 );
1648
1649 self.emit_store_scalar_to_memory_offset_instruction(
1650 target_destination.grab_memory_location(),
1651 &temp_reg.register,
1652 node,
1653 "store string starts with result to memory",
1654 );
1655 }
1656 }
1657
1658 IntrinsicFunction::StringToInt => {
1659 let pointer = self.emit_compute_effective_address_to_register(
1660 target_destination,
1661 node,
1662 "need pointer to tuple",
1663 );
1664 self.builder
1665 .add_string_to_int(&pointer, self_reg.unwrap(), node, "string to int");
1666 }
1667 IntrinsicFunction::StringToFloat => {
1668 let pointer = self.emit_compute_effective_address_to_register(
1669 target_destination,
1670 node,
1671 "need pointer to tuple",
1672 );
1673 self.builder.add_string_to_float(
1674 &pointer,
1675 self_reg.unwrap(),
1676 node,
1677 "string to float",
1678 );
1679 }
1680
1681 IntrinsicFunction::MapIsEmpty | IntrinsicFunction::VecIsEmpty => {
1683 let collection_pointer = PointerLocation {
1684 ptr_reg: self_reg.unwrap().clone(),
1685 };
1686 if target_destination.is_register() {
1687 self.emit_collection_is_empty(
1688 target_destination.register().unwrap().clone(),
1689 &collection_pointer,
1690 node,
1691 "vec empty",
1692 );
1693 } else {
1694 let temp_reg = self.temp_registers.allocate(
1695 VmType::new_contained_in_register(u8_type()),
1696 "temporary for collection is empty intrinsic",
1697 );
1698
1699 self.emit_collection_is_empty(
1700 temp_reg.register.clone(),
1701 &collection_pointer,
1702 node,
1703 "vec empty",
1704 );
1705
1706 self.emit_store_scalar_to_memory_offset_instruction(
1707 target_destination.grab_memory_location(),
1708 &temp_reg.register,
1709 node,
1710 "store collection is empty result to memory",
1711 );
1712 }
1713 }
1714
1715 IntrinsicFunction::StringLen
1716 | IntrinsicFunction::MapLen
1717 | IntrinsicFunction::VecLen => {
1718 let collection_pointer = PointerLocation {
1719 ptr_reg: self_reg.unwrap().clone(),
1720 };
1721 if target_destination.is_register() {
1722 self.emit_collection_len(
1723 target_destination.register().unwrap(),
1724 &collection_pointer,
1725 node,
1726 "get the collection element_count",
1727 );
1728 } else {
1729 let temp_reg = self.temp_registers.allocate(
1730 VmType::new_contained_in_register(u16_type()),
1731 "temporary for collection len intrinsic",
1732 );
1733
1734 self.emit_collection_len(
1735 &temp_reg.register,
1736 &collection_pointer,
1737 node,
1738 "get the collection element_count",
1739 );
1740
1741 self.emit_store_scalar_to_memory_offset_instruction(
1742 target_destination.grab_memory_location(),
1743 &temp_reg.register,
1744 node,
1745 "store collection len result to memory",
1746 );
1747 }
1748 }
1749 IntrinsicFunction::MapCapacity | IntrinsicFunction::VecCapacity => {
1750 let collection_pointer = PointerLocation {
1751 ptr_reg: self_reg.unwrap().clone(),
1752 };
1753 if target_destination.is_register() {
1754 self.emit_collection_capacity(
1755 target_destination.register().unwrap(),
1756 &collection_pointer,
1757 node,
1758 "get the collection element_count",
1759 );
1760 } else {
1761 let temp_reg = self.temp_registers.allocate(
1762 VmType::new_contained_in_register(u16_type()),
1763 "temporary for collection capacity intrinsic",
1764 );
1765
1766 self.emit_collection_capacity(
1767 &temp_reg.register,
1768 &collection_pointer,
1769 node,
1770 "get the collection element_count",
1771 );
1772
1773 self.emit_store_scalar_to_memory_offset_instruction(
1774 target_destination.grab_memory_location(),
1775 &temp_reg.register,
1776 node,
1777 "store collection capacity result to memory",
1778 );
1779 }
1780 }
1781
1782 IntrinsicFunction::MapRemove | IntrinsicFunction::MapHas => {
1783 let grid_self_ptr_reg = PointerLocation {
1786 ptr_reg: self_reg.unwrap().clone(),
1787 };
1788 let converted_to_expressions: Vec<_> = arguments
1789 .iter()
1790 .map(|arg| {
1791 let (ArgumentExpression::Expression(found_expression)
1792 | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1793 else {
1794 panic!("must be expression");
1795 };
1796 found_expression.clone()
1797 })
1798 .collect();
1799 self.emit_intrinsic_map(
1800 target_destination,
1801 intrinsic_fn,
1802 &grid_self_ptr_reg,
1803 &converted_to_expressions,
1804 node,
1805 comment,
1806 ctx,
1807 );
1808 } IntrinsicFunction::PtrFromU32 => {}
1811 }
1812 }
1813
1814 fn emit_intrinsic_map_remove(
1815 &mut self,
1816 map_header_reg: &PointerLocation,
1817 key_expression: &Expression,
1818 ctx: &Context,
1819 ) {
1820 let key_register =
1821 self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_expression, ctx);
1822
1823 self.builder
1824 .add_map_remove(map_header_reg, &key_register, &key_expression.node, "");
1825 }
1826
1827 fn emit_collection_capacity(
1828 &mut self,
1829 output_reg: &TypedRegister,
1830 collection_addr: &PointerLocation,
1831 node: &Node,
1832 comment: &str,
1833 ) {
1834 self.builder.add_ld16_from_pointer_with_offset_u16(
1835 output_reg,
1836 &collection_addr.ptr_reg,
1837 COLLECTION_CAPACITY_OFFSET,
1838 node,
1839 comment,
1840 );
1841 }
1842
1843 fn emit_collection_len(
1844 &mut self,
1845 output_reg: &TypedRegister,
1846 collection_addr: &PointerLocation,
1847 node: &Node,
1848 comment: &str,
1849 ) {
1850 self.builder.add_ld16_from_pointer_with_offset_u16(
1851 output_reg,
1852 &collection_addr.ptr_reg,
1853 COLLECTION_ELEMENT_COUNT_OFFSET,
1854 node,
1855 &format!("{comment} - collection element_count"),
1856 );
1857 }
1858
1859 fn emit_collection_is_empty(
1860 &mut self,
1861 output_reg: TypedRegister,
1862 collection_addr: &PointerLocation,
1863 node: &Node,
1864 _comment: &str,
1865 ) {
1866 self.builder.add_ld16_from_pointer_with_offset_u16(
1867 &output_reg,
1868 &collection_addr.ptr_reg,
1869 COLLECTION_ELEMENT_COUNT_OFFSET,
1870 node,
1871 "get the map length for testing if it is empty",
1872 );
1873 self.builder.add_meqz(
1874 &output_reg,
1875 &output_reg,
1876 node,
1877 "convert the map length to inverted bool",
1878 );
1879 }
1880}