swamp_code_gen/
intr.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::code_bld::CodeBuilder;
6use crate::ctx::Context;
7
8use crate::transformer::{Collection, Transformer};
9use source_map_node::Node;
10use swamp_semantic::intr::IntrinsicFunction;
11use swamp_semantic::{ArgumentExpression, Expression, ExpressionKind, VariableRef};
12use swamp_vm_isa::{
13    COLLECTION_CAPACITY_OFFSET, COLLECTION_ELEMENT_COUNT_OFFSET, GRID_HEADER_HEIGHT_OFFSET,
14    GRID_HEADER_WIDTH_OFFSET, MemoryOffset,
15};
16use swamp_vm_types::types::{
17    Place, TypedRegister, VmType, float_type, int_type, pointer_type, u8_type, u16_type, u32_type,
18};
19use swamp_vm_types::{AggregateMemoryLocation, MemoryLocation, PointerLocation};
20
21impl CodeBuilder<'_> {
22    #[allow(clippy::too_many_lines)]
23    #[allow(clippy::single_match_else)]
24    pub fn emit_single_intrinsic_call(
25        &mut self,
26        target_reg: &Place,
27        node: &Node,
28        intrinsic_fn: &IntrinsicFunction,
29        arguments: &[ArgumentExpression],
30        ctx: &Context,
31    ) {
32        {
33            // For primitive intrinsics, materialize the self argument to a register early
34            let self_reg = if arguments.is_empty() {
35                None
36            } else {
37                let ArgumentExpression::Expression(self_expr) = &arguments[0] else {
38                    panic!("Expected expression for self argument");
39                };
40                Some(self.emit_scalar_rvalue(self_expr, ctx))
41            };
42
43            let rest_args = if arguments.len() > 1 {
44                &arguments[1..]
45            } else {
46                &vec![]
47            };
48            self.emit_single_intrinsic_call_with_self(
49                target_reg,
50                node,
51                intrinsic_fn,
52                self_reg.as_ref(),
53                rest_args,
54                ctx,
55                "single intrinsic call",
56            );
57        }
58    }
59
60    pub fn emit_intrinsic_map(
61        &mut self,
62        output_destination: &Place,
63        intrinsic_fn: &IntrinsicFunction,
64        self_ptr_reg: &PointerLocation,
65        arguments: &[Expression],
66        node: &Node,
67        comment: &str,
68        ctx: &Context,
69    ) {
70        match intrinsic_fn {
71            IntrinsicFunction::MapHas => {
72                let key_argument = &arguments[0];
73                // We have to get the key materialized in a temporary storage, so the map can calculate the hash for it.
74                let key_temp_storage_reg =
75                    self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_argument, ctx);
76
77                self.builder.add_map_has(
78                    output_destination.register().unwrap(),
79                    self_ptr_reg,
80                    &key_temp_storage_reg,
81                    node,
82                    "map_has",
83                );
84            }
85            IntrinsicFunction::MapRemove => {
86                let key_argument = &arguments[0];
87                self.emit_intrinsic_map_remove(self_ptr_reg, key_argument, ctx);
88            }
89            _ => todo!("missing intrinsic_map {intrinsic_fn}"),
90        }
91    }
92
93    pub fn emit_intrinsic_sparse(
94        &mut self,
95        output_destination: &Place,
96        intrinsic_fn: &IntrinsicFunction,
97        self_ptr_reg: &PointerLocation,
98        arguments: &[Expression],
99        node: &Node,
100        comment: &str,
101        ctx: &Context,
102    ) {
103        match intrinsic_fn {
104            IntrinsicFunction::SparseAdd => {
105                let element_to_add_expression = &arguments[0];
106                self.emit_sparse_add(
107                    &output_destination.register().unwrap().clone(),
108                    self_ptr_reg,
109                    element_to_add_expression,
110                    node,
111                    ctx,
112                );
113            }
114
115            IntrinsicFunction::SparseRemove => {
116                let sparse_id_int_expression = &arguments[0];
117                self.emit_sparse_remove(self_ptr_reg, sparse_id_int_expression, node, ctx);
118            }
119
120            IntrinsicFunction::SparseIsAlive => {
121                let sparse_id_int_expression = &arguments[0];
122                self.emit_sparse_is_alive(
123                    &output_destination.register().unwrap().clone(),
124                    self_ptr_reg,
125                    sparse_id_int_expression,
126                    node,
127                    ctx,
128                );
129            }
130            _ => todo!("unknown sparse {intrinsic_fn}"),
131        }
132    }
133    pub fn emit_intrinsic_grid(
134        &mut self,
135        target_destination: &Place,
136        intrinsic_fn: &IntrinsicFunction,
137        self_ptr_reg: &PointerLocation,
138        arguments: &[Expression],
139        node: &Node,
140        comment: &str,
141        ctx: &Context,
142    ) {
143        let (temp_reg, dest_reg) = if target_destination.is_register() {
144            (None, target_destination.register().unwrap().clone())
145        } else {
146            let temp_reg = self.temp_registers.allocate(
147                VmType::new_contained_in_register(float_type()),
148                "temporary destination for low level intrinsic",
149            );
150
151            (Some(temp_reg.register.clone()), temp_reg.register)
152        };
153        match intrinsic_fn {
154            IntrinsicFunction::GridSet => {
155                let x_expr = &arguments[0];
156                let y_expr = &arguments[1];
157                let value_expr = &arguments[2];
158
159                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
160                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
161                let element_gen_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
162
163                let temp_element_ptr = self.temp_registers.allocate(
164                    VmType::new_contained_in_register(element_gen_type.clone()),
165                    "temporary scalar",
166                );
167
168                self.builder.add_grid_get_entry_addr(
169                    &temp_element_ptr.register,
170                    self_ptr_reg,
171                    &x_reg,
172                    &y_reg,
173                    element_gen_type.total_size,
174                    node,
175                    comment,
176                );
177
178                let location = AggregateMemoryLocation {
179                    location: MemoryLocation {
180                        base_ptr_reg: temp_element_ptr.register,
181                        offset: MemoryOffset(0),
182                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
183                    },
184                };
185
186                // Initialize the allocated space first (like variable definition)
187                if element_gen_type.is_aggregate() {
188                    self.emit_initialize_memory_for_any_type(
189                        &location.location,
190                        node,
191                        "initialize grid set allocated space",
192                    );
193                }
194
195                self.emit_expression_into_target_memory(
196                    &location.location,
197                    value_expr,
198                    "grid set",
199                    ctx,
200                );
201            }
202            IntrinsicFunction::GridGet => {
203                let x_expr = &arguments[0];
204                let y_expr = &arguments[1];
205
206                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
207                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
208
209                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
210
211                // Allocate a temporary register to hold the address of the grid element
212                let temp_element_ptr = self.temp_registers.allocate(
213                    VmType::new_contained_in_register(pointer_type()),
214                    "temp for grid element address",
215                );
216
217                // Get the address of the grid element using the opcode
218                self.builder.add_grid_get_entry_addr(
219                    &temp_element_ptr.register,
220                    self_ptr_reg,
221                    &x_reg,
222                    &y_reg,
223                    element_type.total_size,
224                    node,
225                    comment,
226                );
227
228                // Create a memory location from the element address with correct type information
229                let element_memory_location = MemoryLocation {
230                    base_ptr_reg: temp_element_ptr.register,
231                    offset: MemoryOffset(0),
232                    ty: VmType::new_unknown_placement(element_type),
233                };
234
235                // Use emit_copy_value_from_memory_location to handle both register and memory destinations
236                // This will properly handle aggregates (like optionals) vs scalars
237                self.emit_copy_value_from_memory_location(
238                    target_destination,
239                    &element_memory_location,
240                    node,
241                    "copy grid element value to destination",
242                );
243            }
244
245            IntrinsicFunction::GridWidth => {
246                // Allocate a temporary register for the width value
247                let temp = self.temp_registers.allocate(
248                    VmType::new_contained_in_register(u16_type()),
249                    "temp for grid width",
250                );
251
252                // Get the memory location of the width field in the grid header
253                let self_memory_location = AggregateMemoryLocation::new(
254                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
255                        self_ptr_reg.ptr_reg.clone(),
256                    ),
257                );
258                let width_location =
259                    self_memory_location.offset(GRID_HEADER_WIDTH_OFFSET, int_type());
260
261                // Load the width value from the grid header into the temporary register
262                self.builder.add_ld16_from_pointer_from_memory_location(
263                    &temp.register,
264                    &width_location.location,
265                    node,
266                    comment,
267                );
268
269                // Create a source destination from the temporary register
270                let value_source = Place::Register(temp.register);
271
272                // Use emit_copy_value_between_destinations to handle both register and memory destinations
273                self.emit_copy_value_between_places(
274                    target_destination,
275                    &value_source,
276                    node,
277                    "store grid width to destination",
278                );
279            }
280            IntrinsicFunction::GridHeight => {
281                // Allocate a temporary register for the height value
282                let temp = self.temp_registers.allocate(
283                    VmType::new_contained_in_register(u16_type()),
284                    "temp for grid height",
285                );
286
287                // Get the memory location of the height field in the grid header
288                let self_memory_location = AggregateMemoryLocation::new(
289                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
290                        self_ptr_reg.ptr_reg.clone(),
291                    ),
292                );
293                let height_location =
294                    self_memory_location.offset(GRID_HEADER_HEIGHT_OFFSET, int_type());
295
296                // Load the height value from the grid header into the temporary register
297                self.builder.add_ld16_from_pointer_from_memory_location(
298                    &temp.register,
299                    &height_location.location,
300                    node,
301                    comment,
302                );
303
304                // Create a source destination from the temporary register
305                let value_source = Place::Register(temp.register);
306
307                // Use emit_copy_value_between_destinations to handle both register and memory destinations
308                self.emit_copy_value_between_places(
309                    target_destination,
310                    &value_source,
311                    node,
312                    "store grid height to destination",
313                );
314            }
315            _ => todo!("wrong grid {intrinsic_fn}"),
316        }
317    }
318
319    #[allow(clippy::too_many_lines)]
320    fn emit_intrinsic_call_vec(
321        &mut self,
322        output_destination: &Place,
323        intrinsic_fn: &IntrinsicFunction,
324        self_ptr_reg: &PointerLocation,
325        arguments: &[Expression],
326        node: &Node,
327        ctx: &Context,
328    ) {
329        let self_basic_type = &self_ptr_reg.ptr_reg.ty.basic_type;
330        match intrinsic_fn {
331            IntrinsicFunction::VecPush => {
332                let element_expr = &arguments[0];
333
334                let element_gen_type = self.state.layout_cache.layout(&element_expr.ty);
335
336                let temp_element_ptr = self.temp_registers.allocate(
337                    VmType::new_contained_in_register(pointer_type()),
338                    "pointer to new element",
339                );
340
341                self.builder.add_vec_push_addr(
342                    temp_element_ptr.register(),
343                    &self_ptr_reg.ptr_reg,
344                    node,
345                    "set pointer to new element",
346                );
347
348                let location = AggregateMemoryLocation {
349                    location: MemoryLocation {
350                        base_ptr_reg: temp_element_ptr.register,
351                        offset: MemoryOffset(0),
352                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
353                    },
354                };
355
356                // Initialize the allocated space first (like variable definition)
357                if element_gen_type.is_aggregate() {
358                    self.emit_initialize_memory_for_any_type(
359                        &location.location,
360                        node,
361                        "initialize vec.push allocated space",
362                    );
363                }
364
365                self.emit_expression_into_target_memory(
366                    &location.location,
367                    element_expr,
368                    "vec push",
369                    ctx,
370                );
371            }
372
373            IntrinsicFunction::VecExtend => {
374                let element_expr = &arguments[0];
375                let other_vec_reg = self.emit_scalar_rvalue(element_expr, ctx);
376
377                self.builder.add_vec_extend(
378                    &self_ptr_reg.ptr_reg,
379                    &other_vec_reg,
380                    node,
381                    "extend vec",
382                );
383            }
384
385            IntrinsicFunction::VecCopy => {
386                let output_pointer = self.emit_compute_effective_address_to_register(
387                    output_destination,
388                    node,
389                    "get absolute pointer for vec slice destination",
390                );
391                let output_pointer_location = PointerLocation::new(output_pointer);
392
393                self.builder.add_vec_copy(
394                    &output_pointer_location,
395                    self_ptr_reg,
396                    node,
397                    "copy vector",
398                );
399            }
400
401            IntrinsicFunction::VecPop => {
402                let element_type = self_basic_type.element().unwrap();
403                let pop_target_reg = if let Some(found_target_reg) = output_destination.register() {
404                    found_target_reg.clone()
405                } else {
406                    let temp = self.temp_registers.allocate(
407                        VmType::new_contained_in_register(element_type.clone()),
408                        "temp for vec pop",
409                    );
410                    temp.register
411                };
412                self.builder.add_vec_pop(
413                    &pop_target_reg,
414                    &self_ptr_reg.ptr_reg, // mut self
415                    element_type.total_size,
416                    node,
417                    "vec pop",
418                );
419                let source_memory_location = MemoryLocation {
420                    base_ptr_reg: pop_target_reg,
421                    offset: MemoryOffset(0),
422                    ty: VmType::new_unknown_placement(element_type),
423                };
424
425                self.emit_copy_value_from_memory_location(
426                    output_destination,
427                    &source_memory_location,
428                    node,
429                    "copy from vec pop",
430                );
431            }
432
433            IntrinsicFunction::VecSlice => {
434                let range_expr = &arguments[0];
435                let range_region = self.emit_scalar_rvalue(range_expr, ctx);
436
437                let output_pointer = self.emit_compute_effective_address_to_register(
438                    output_destination,
439                    node,
440                    "get absolute pointer for vec slice destination",
441                );
442                let output_pointer_location = PointerLocation::new(output_pointer);
443
444                self.builder.add_vec_copy_range(
445                    &output_pointer_location,
446                    self_ptr_reg,
447                    &range_region,
448                    node,
449                    "vec slice",
450                );
451            }
452
453            IntrinsicFunction::VecRemoveIndex => {
454                let index_region_expr = &arguments[0];
455                let index_region = self.emit_scalar_rvalue(index_region_expr, ctx);
456
457                let element_type = self_basic_type.element().unwrap();
458
459                self.builder.add_vec_remove_index(
460                    &self_ptr_reg.ptr_reg,
461                    &index_region,
462                    node,
463                    "remove index",
464                );
465            }
466            IntrinsicFunction::VecRemoveIndexGetValue => {
467                let key_expr = &arguments[0];
468                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
469                let element_type = self_basic_type.element().unwrap();
470
471                // Handle both register and memory destinations
472                if let Some(target_reg) = output_destination.register() {
473                    // Direct register destination
474                    self.builder.add_vec_remove_index_get_value(
475                        target_reg,
476                        &self_ptr_reg.ptr_reg, // mut self
477                        &key_region,
478                        node,
479                        "vec remove index get value to register",
480                    );
481                } else {
482                    // Memory destination or other
483                    let temp_reg = self.temp_registers.allocate(
484                        VmType::new_contained_in_register(element_type),
485                        "temp for vec remove index get value",
486                    );
487
488                    self.builder.add_vec_remove_index_get_value(
489                        &temp_reg.register,
490                        &self_ptr_reg.ptr_reg,
491                        &key_region,
492                        node,
493                        "vec remove index get value to temp",
494                    );
495
496                    // Copy from temporary register to destination
497                    let source = Place::Register(temp_reg.register);
498                    self.emit_copy_value_between_places(
499                        output_destination,
500                        &source,
501                        node,
502                        "copy vec element to destination",
503                    );
504                }
505            }
506            IntrinsicFunction::VecRemoveFirstIndexGetValue => {
507                let zero_reg = self.temp_registers.allocate(
508                    VmType::new_contained_in_register(u8_type()),
509                    "vec remove first. set index 0",
510                );
511                self.builder
512                    .add_mov8_immediate(zero_reg.register(), 0, node, "zero index");
513                let value_addr_reg = self.temp_registers.allocate(
514                    VmType::new_contained_in_register(u32_type()),
515                    "vec entry addr to copy from",
516                );
517                let element_type = self_basic_type.element().unwrap();
518                self.builder.add_vec_subscript(
519                    value_addr_reg.register(),
520                    &self_ptr_reg.ptr_reg,
521                    zero_reg.register(),
522                    element_type.total_size,
523                    node,
524                    "lookup first entry in vec",
525                );
526
527                let source_memory_location = MemoryLocation {
528                    base_ptr_reg: value_addr_reg.register,
529                    offset: MemoryOffset(0),
530                    ty: VmType::new_unknown_placement(element_type),
531                };
532
533                self.emit_copy_value_from_memory_location(
534                    output_destination,
535                    &source_memory_location,
536                    node,
537                    "load the vec entry to target register",
538                );
539
540                self.builder.add_vec_remove_index(
541                    &self_ptr_reg.ptr_reg, // mut self
542                    zero_reg.register(),
543                    node,
544                    "vec remove first index",
545                );
546            }
547            IntrinsicFunction::VecClear => {
548                let temp_element_count_reg = self.temp_registers.allocate(
549                    VmType::new_contained_in_register(u16_type()),
550                    "vec_clear zero",
551                );
552                self.builder.add_mov_16_immediate_value(
553                    temp_element_count_reg.register(),
554                    0,
555                    node,
556                    "set to zero",
557                );
558
559                let self_memory_location = AggregateMemoryLocation::new(
560                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
561                        self_ptr_reg.ptr_reg.clone(),
562                    ),
563                );
564
565                self.builder.add_st16_using_ptr_with_offset(
566                    &self_memory_location
567                        .offset(COLLECTION_ELEMENT_COUNT_OFFSET, u16_type())
568                        .location,
569                    temp_element_count_reg.register(),
570                    node,
571                    "set element_count to zero",
572                );
573            }
574
575            IntrinsicFunction::VecGet => {
576                let key_expr = &arguments[0];
577                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
578                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
579
580                // Similar approach as GridGet - get pointer to element and use copy helpers
581                let temp_element_ptr = self.temp_registers.allocate(
582                    VmType::new_contained_in_register(pointer_type()),
583                    "temp for vec element address",
584                );
585
586                // Get the address of the vector element
587                self.builder.add_vec_subscript(
588                    temp_element_ptr.register(),
589                    &self_ptr_reg.ptr_reg,
590                    &key_region,
591                    element_type.total_size,
592                    node,
593                    "get vec element address",
594                );
595
596                // Create a memory location for the element
597                let element_memory_location = MemoryLocation {
598                    base_ptr_reg: temp_element_ptr.register,
599                    offset: MemoryOffset(0),
600                    ty: VmType::new_unknown_placement(element_type),
601                };
602
603                // Copy from memory location to destination (works for both register and memory)
604                self.emit_copy_value_from_memory_location(
605                    output_destination,
606                    &element_memory_location,
607                    node,
608                    "copy vec element to destination",
609                );
610            }
611            _ => todo!("Vec {intrinsic_fn}"),
612        }
613
614        /*
615                   IntrinsicFunction::VecSwap => {
616               let index_a = self
617                   .emit_for_access_or_location(&arguments[0], ctx)
618                   .grab_rvalue()
619                   .clone();
620               let index_b = self
621                   .emit_for_access_or_location(&arguments[1], ctx)
622                   .grab_rvalue()
623                   .clone();
624               self.builder
625                   .add_vec_swap(self_addr.unwrap(), &index_a, &index_b, node, "vec swap");
626           }
627
628           IntrinsicFunction::VecInsert => { // Low prio
629           }
630           IntrinsicFunction::VecFirst => { // Low prio
631           }
632           IntrinsicFunction::VecLast => { // Low prio
633           }
634
635        */
636    }
637
638    fn emit_intrinsic_call_int(
639        &mut self,
640        target_reg: &TypedRegister,
641        intrinsic_fn: &IntrinsicFunction,
642        arguments: &[TypedRegister],
643        node: &Node,
644    ) {
645        let first_argument = &arguments[0];
646
647        // Intrinsics can operate on any register directly, no need for register protection
648        match intrinsic_fn {
649            IntrinsicFunction::IntAbs => {
650                self.builder
651                    .add_int_abs(target_reg, first_argument, node, "int abs");
652            }
653
654            IntrinsicFunction::IntRnd => {
655                self.builder
656                    .add_int_rnd(target_reg, first_argument, node, "int pseudo random");
657            }
658            IntrinsicFunction::IntMax => {
659                let int_register = &arguments[1];
660
661                self.builder
662                    .add_int_max(target_reg, first_argument, int_register, node, "int max");
663            }
664            IntrinsicFunction::IntMin => {
665                let int_register = &arguments[1];
666
667                self.builder
668                    .add_int_min(target_reg, first_argument, int_register, node, "int min");
669            }
670            IntrinsicFunction::IntClamp => {
671                let min_reg = &arguments[1];
672                let max_reg = &arguments[2];
673                self.builder.add_int_clamp(
674                    target_reg,
675                    first_argument,
676                    min_reg,
677                    max_reg,
678                    node,
679                    "int clamp",
680                );
681            }
682            IntrinsicFunction::IntToFloat => {
683                self.builder.add_int_to_float(
684                    target_reg,
685                    first_argument,
686                    node,
687                    &format!("int to float {}", first_argument.comment()),
688                );
689            }
690            IntrinsicFunction::IntToString => {
691                self.builder
692                    .add_int_to_string(target_reg, first_argument, node, "int_to_string");
693            }
694            _ => {}
695        }
696        // No need to copy from a temporary register as we're using target_reg directly
697    }
698
699    #[allow(clippy::too_many_lines)]
700    fn emit_intrinsic_call_fixed(
701        &mut self,
702        target_reg: &TypedRegister,
703        intrinsic_fn: &IntrinsicFunction,
704        arguments: &[TypedRegister],
705        node: &Node,
706    ) {
707        // Intrinsics can operate directly on any register, no need for temporary registers
708        let first_argument_reg = &arguments[0];
709        match intrinsic_fn {
710            IntrinsicFunction::FloatRound => {
711                self.builder
712                    .add_float_round(target_reg, first_argument_reg, node, "float round");
713            }
714            IntrinsicFunction::FloatFloor => {
715                self.builder
716                    .add_float_floor(target_reg, first_argument_reg, node, "float floor");
717            }
718            IntrinsicFunction::FloatSqrt => {
719                self.builder
720                    .add_float_sqrt(target_reg, first_argument_reg, node, "float sqr");
721            }
722            IntrinsicFunction::FloatSign => {
723                self.builder
724                    .add_float_sign(target_reg, first_argument_reg, node, "float sign");
725            }
726            IntrinsicFunction::FloatAbs => {
727                self.builder
728                    .add_float_abs(target_reg, first_argument_reg, node, "float abs");
729            }
730            IntrinsicFunction::FloatRnd => {
731                self.builder.add_float_prnd(
732                    target_reg,
733                    first_argument_reg,
734                    node,
735                    "float pseudo random",
736                );
737            }
738            IntrinsicFunction::FloatCos => {
739                self.builder
740                    .add_float_cos(target_reg, first_argument_reg, node, "float cos");
741            }
742            IntrinsicFunction::FloatSin => {
743                self.builder
744                    .add_float_sin(target_reg, first_argument_reg, node, "float sin");
745            }
746            IntrinsicFunction::FloatAcos => {
747                self.builder
748                    .add_float_acos(target_reg, first_argument_reg, node, "float acos");
749            }
750            IntrinsicFunction::FloatAsin => {
751                self.builder
752                    .add_float_asin(target_reg, first_argument_reg, node, "float asin");
753            }
754            IntrinsicFunction::FloatAtan2 => {
755                self.builder
756                    .add_float_atan2(target_reg, first_argument_reg, node, "float atan2");
757            }
758            IntrinsicFunction::FloatMin => {
759                let float_region = &arguments[1];
760                self.builder.add_float_min(
761                    target_reg,
762                    first_argument_reg,
763                    float_region,
764                    node,
765                    "float min",
766                );
767            }
768            IntrinsicFunction::FloatMax => {
769                let float_region = &arguments[1];
770                self.builder.add_float_max(
771                    target_reg,
772                    first_argument_reg,
773                    float_region,
774                    node,
775                    "float max",
776                );
777            }
778            IntrinsicFunction::FloatClamp => {
779                let float_region = &arguments[1];
780                let float_b_region = &arguments[2];
781
782                self.builder.add_float_clamp(
783                    target_reg,
784                    float_region,
785                    first_argument_reg,
786                    float_b_region,
787                    node,
788                    "float round",
789                );
790            }
791            IntrinsicFunction::FloatToString => self.builder.add_float_to_string(
792                target_reg,
793                first_argument_reg,
794                node,
795                "float_to_string",
796            ),
797            _ => panic!("wasn't a fixed operation"),
798        }
799        // No need to copy from temp register to target as we're using target_reg directly
800    }
801
802    pub fn emit_intrinsic_transformer(
803        &mut self,
804        target_destination: &Place,
805        intrinsic_fn: &IntrinsicFunction,
806        self_addr: &PointerLocation,
807        lambda: (Vec<VariableRef>, &Expression),
808        node: &Node,
809        ctx: &Context,
810    ) {
811        match intrinsic_fn {
812            IntrinsicFunction::TransformerFold => { // Low prio
813            }
814            IntrinsicFunction::TransformerFilter => {
815                self.emit_iterate_over_collection_with_lambda(
816                    target_destination,
817                    node,
818                    Collection::Vec,
819                    Transformer::Filter,
820                    &self_addr.ptr_reg,
821                    lambda,
822                    ctx,
823                );
824            }
825
826            IntrinsicFunction::TransformerFor => {
827                self.emit_iterate_over_collection_with_lambda(
828                    target_destination,
829                    node,
830                    Collection::Vec,
831                    Transformer::For,
832                    &self_addr.ptr_reg,
833                    lambda,
834                    ctx,
835                );
836            }
837            IntrinsicFunction::TransformerWhile => {
838                self.emit_iterate_over_collection_with_lambda(
839                    target_destination,
840                    node,
841                    Collection::Vec,
842                    Transformer::While,
843                    &self_addr.ptr_reg,
844                    lambda,
845                    ctx,
846                );
847            }
848
849            IntrinsicFunction::TransformerFind => {
850                self.emit_iterate_over_collection_with_lambda(
851                    target_destination,
852                    node,
853                    Collection::Vec,
854                    Transformer::Find,
855                    &self_addr.ptr_reg,
856                    lambda,
857                    ctx,
858                );
859            }
860            _ => todo!("{intrinsic_fn}"),
861        }
862    }
863
864    #[allow(clippy::too_many_lines)]
865    #[allow(clippy::too_many_arguments)]
866    pub fn emit_single_intrinsic_call_with_self_destination(
867        &mut self,
868        target_destination: &Place,
869        node: &Node,
870        intrinsic_fn: &IntrinsicFunction,
871        self_destination: Option<&Place>,
872        arguments: &[ArgumentExpression],
873        ctx: &Context,
874        comment: &str,
875    ) {
876        // Use the helper function to properly materialize the self argument
877        let self_reg = if let Some(self_dest) = self_destination {
878            self.emit_load_scalar_or_absolute_aggregate_pointer(self_dest, node, comment)
879        } else {
880            None
881        };
882
883        // Delegate to the existing function
884        self.emit_single_intrinsic_call_with_self(
885            target_destination,
886            node,
887            intrinsic_fn,
888            self_reg.as_ref(),
889            arguments,
890            ctx,
891            comment,
892        );
893    }
894
895    #[allow(clippy::too_many_lines)]
896    #[allow(clippy::too_many_arguments)]
897    pub fn emit_single_intrinsic_call_with_self(
898        &mut self,
899        target_destination: &Place,
900        node: &Node,
901        intrinsic_fn: &IntrinsicFunction,
902        self_reg: Option<&TypedRegister>,
903        arguments: &[ArgumentExpression],
904        ctx: &Context,
905        comment: &str,
906    ) {
907        let maybe_target = target_destination.register();
908
909        match intrinsic_fn {
910            IntrinsicFunction::Float2Magnitude
911            | IntrinsicFunction::FloatAbs
912            | IntrinsicFunction::FloatRound
913            | IntrinsicFunction::FloatFloor
914            | IntrinsicFunction::FloatSqrt
915            | IntrinsicFunction::FloatSign
916            | IntrinsicFunction::FloatRnd
917            | IntrinsicFunction::FloatCos
918            | IntrinsicFunction::FloatSin
919            | IntrinsicFunction::FloatAcos
920            | IntrinsicFunction::FloatAsin
921            | IntrinsicFunction::FloatAtan2
922            | IntrinsicFunction::FloatMin
923            | IntrinsicFunction::FloatMax
924            | IntrinsicFunction::FloatClamp
925            | IntrinsicFunction::FloatToString => {
926                // Float
927                let (temp_reg, dest_reg) = if target_destination.is_register() {
928                    (None, target_destination.register().unwrap().clone())
929                } else {
930                    let temp_reg = self.temp_registers.allocate(
931                        VmType::new_contained_in_register(float_type()),
932                        "temporary destination for low level intrinsic",
933                    );
934
935                    (Some(temp_reg.register.clone()), temp_reg.register)
936                };
937
938                // Materialize self to ensure we have the actual scalar value
939                let mut converted_regs = vec![self_reg.unwrap().clone()];
940                for arg in arguments {
941                    let (ArgumentExpression::Expression(found_expression)
942                    | ArgumentExpression::MaterializedExpression(found_expression)) = arg
943                    else {
944                        panic!("must be expression");
945                    };
946                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
947                    converted_regs.push(materialized_arg);
948                }
949
950                self.emit_intrinsic_call_fixed(&dest_reg, intrinsic_fn, &converted_regs, node);
951
952                if let Some(temp_reg) = temp_reg {
953                    self.emit_store_scalar_to_memory_offset_instruction(
954                        target_destination.grab_memory_location(),
955                        &temp_reg,
956                        node,
957                        "store the fixed point value into memory",
958                    );
959                }
960            }
961
962            IntrinsicFunction::IntToFloat => {
963                // IntToFloat - special case because it returns a float, not an int
964                let (temp_reg, dest_reg) = if target_destination.is_register() {
965                    (None, target_destination.register().unwrap().clone())
966                } else {
967                    let temp_reg = self.temp_registers.allocate(
968                        VmType::new_contained_in_register(float_type()),
969                        "temporary destination for int to float intrinsic",
970                    );
971
972                    (Some(temp_reg.register.clone()), temp_reg.register)
973                };
974
975                // Self is already materialized as a register
976                let int_value_reg = self_reg.unwrap();
977
978                // Now convert the materialized integer value to float
979                self.builder.add_int_to_float(
980                    &dest_reg,
981                    int_value_reg,
982                    node,
983                    &format!("int to float {}", int_value_reg.comment()),
984                );
985
986                if let Some(temp_reg) = temp_reg {
987                    self.emit_store_scalar_to_memory_offset_instruction(
988                        target_destination.grab_memory_location(),
989                        &temp_reg,
990                        node,
991                        "store the float result from int to float conversion",
992                    );
993                }
994            }
995
996            IntrinsicFunction::IntAbs
997            | IntrinsicFunction::IntRnd
998            | IntrinsicFunction::IntMax
999            | IntrinsicFunction::IntMin
1000            | IntrinsicFunction::IntClamp
1001            | IntrinsicFunction::IntToString => {
1002                // Int
1003                let (temp_reg, dest_reg) = if target_destination.is_register() {
1004                    let target_reg = target_destination.register().unwrap();
1005                    // Intrinsics can operate on any register directly, no special treatment needed
1006                    (None, target_reg.clone())
1007                } else {
1008                    let temp_reg = self.temp_registers.allocate(
1009                        VmType::new_contained_in_register(u32_type()),
1010                        "temporary destination for low level intrinsic",
1011                    );
1012
1013                    (Some(temp_reg.register.clone()), temp_reg.register)
1014                };
1015
1016                // Materialize additional arguments (self is already materialized)
1017                let mut converted_regs = vec![self_reg.unwrap().clone()];
1018                for arg in arguments {
1019                    let (ArgumentExpression::Expression(found_expression)
1020                    | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1021                    else {
1022                        panic!("must be expression");
1023                    };
1024                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
1025                    converted_regs.push(materialized_arg);
1026                }
1027
1028                self.emit_intrinsic_call_int(&dest_reg, intrinsic_fn, &converted_regs, node);
1029
1030                if let Some(temp_reg) = temp_reg {
1031                    if target_destination.is_register() {
1032                        // Copy from temp to target register
1033                        self.builder.add_mov_reg(
1034                            target_destination.register().unwrap(),
1035                            &temp_reg,
1036                            node,
1037                            "copy intrinsic result from temp to target register",
1038                        );
1039                    } else {
1040                        // Store to memory location
1041                        self.emit_store_scalar_to_memory_offset_instruction(
1042                            target_destination.grab_memory_location(),
1043                            &temp_reg,
1044                            node,
1045                            "put the low level intrinsic fixed (int) back to memory",
1046                        );
1047                    }
1048                }
1049            }
1050
1051            IntrinsicFunction::EnumFromDiscriminant => {
1052                let enum_pointer = PointerLocation {
1053                    ptr_reg: self_reg.unwrap().clone(),
1054                };
1055
1056                assert!(arguments.len() == 1, "problem");
1057                // Materialize additional arguments (self is already materialized)
1058                let mut converted_regs = vec![self_reg.unwrap().clone()];
1059                for arg in arguments {
1060                    let (ArgumentExpression::Expression(found_expression)
1061                    | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1062                    else {
1063                        panic!("must be expression");
1064                    };
1065                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
1066                    converted_regs.push(materialized_arg);
1067                }
1068
1069                // TODO: @important: Support different sizes of discriminants
1070                self.builder.add_st8_using_ptr_with_offset(
1071                    &MemoryLocation::new_copy_over_whole_type_with_zero_offset(
1072                        enum_pointer.ptr_reg,
1073                    ),
1074                    &converted_regs[1], // the discriminant to set
1075                    node,
1076                    "overwrite the discriminant in the enum pointer",
1077                );
1078            }
1079
1080            IntrinsicFunction::EnumDiscriminant => {
1081                let enum_pointer = PointerLocation {
1082                    ptr_reg: self_reg.unwrap().clone(),
1083                };
1084
1085                let discriminant_temp_reg = self.temp_registers.allocate(
1086                    VmType::new_contained_in_register(u8_type()),
1087                    "temp register for fetching discriminant",
1088                );
1089
1090                // TODO: @important: Support different sizes of discriminants
1091                self.builder.add_ld8_from_pointer_with_offset(
1092                    discriminant_temp_reg.register(),
1093                    &enum_pointer.ptr_reg,
1094                    MemoryOffset(0), // Support proper tag offsets
1095                    node,
1096                    "get the discriminant from the enum pointer",
1097                );
1098
1099                if target_destination.is_register() {
1100                    self.builder.add_mov_reg(
1101                        target_destination.register().unwrap(),
1102                        &discriminant_temp_reg.register,
1103                        node,
1104                        "copy discriminant in register to target register",
1105                    );
1106                } else {
1107                    self.emit_store_scalar_to_memory_offset_instruction(
1108                        target_destination.grab_memory_location(),
1109                        &discriminant_temp_reg.register,
1110                        node,
1111                        "store discriminant in register to target memory location",
1112                    );
1113                }
1114            }
1115
1116            /*
1117            IntrinsicFunction::EnumFromDiscriminant => {
1118
1119            }
1120
1121             */
1122            IntrinsicFunction::VecPush
1123            | IntrinsicFunction::VecPop
1124            | IntrinsicFunction::VecExtend
1125            | IntrinsicFunction::VecRemoveIndex
1126            | IntrinsicFunction::VecRemoveIndexGetValue
1127            | IntrinsicFunction::VecRemoveFirstIndexGetValue
1128            | IntrinsicFunction::VecClear
1129            | IntrinsicFunction::VecSlice
1130            | IntrinsicFunction::VecSwap
1131            | IntrinsicFunction::VecInsert
1132            | IntrinsicFunction::VecFirst
1133            | IntrinsicFunction::VecGet
1134            | IntrinsicFunction::VecCopy
1135            | IntrinsicFunction::VecLast => {
1136                // Vec
1137                // Self is assumed to be a flattened pointer:
1138                let vec_self_ptr_reg = PointerLocation {
1139                    ptr_reg: self_reg.unwrap().clone(),
1140                };
1141                let converted_to_expressions: Vec<_> = arguments
1142                    .iter()
1143                    .map(|arg| {
1144                        let (ArgumentExpression::Expression(found_expression)
1145                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1146                        else {
1147                            panic!("must be expression");
1148                        };
1149                        found_expression.clone()
1150                    })
1151                    .collect();
1152
1153                self.emit_intrinsic_call_vec(
1154                    target_destination,
1155                    intrinsic_fn,
1156                    &vec_self_ptr_reg,
1157                    &converted_to_expressions,
1158                    node,
1159                    ctx,
1160                );
1161            }
1162
1163            IntrinsicFunction::GridGet
1164            | IntrinsicFunction::GridSet
1165            | IntrinsicFunction::GridWidth
1166            | IntrinsicFunction::GridHeight => {
1167                // Grid
1168                // Self is assumed to be a flattened pointer:
1169                let grid_self_ptr_reg = PointerLocation {
1170                    ptr_reg: self_reg.unwrap().clone(),
1171                };
1172                let converted_to_expressions: Vec<_> = arguments
1173                    .iter()
1174                    .map(|arg| {
1175                        let (ArgumentExpression::Expression(found_expression)
1176                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1177                        else {
1178                            panic!("must be expression");
1179                        };
1180                        found_expression.clone()
1181                    })
1182                    .collect();
1183                self.emit_intrinsic_grid(
1184                    target_destination,
1185                    intrinsic_fn,
1186                    &grid_self_ptr_reg,
1187                    &converted_to_expressions,
1188                    node,
1189                    comment,
1190                    ctx,
1191                );
1192            }
1193
1194            IntrinsicFunction::SparseIsAlive
1195            | IntrinsicFunction::SparseRemove
1196            | IntrinsicFunction::SparseAdd => {
1197                // Sparse
1198                // Self is assumed to be a flattened pointer:
1199                let grid_self_ptr_reg = PointerLocation {
1200                    ptr_reg: self_reg.unwrap().clone(),
1201                };
1202                let converted_to_expressions: Vec<_> = arguments
1203                    .iter()
1204                    .map(|arg| {
1205                        let (ArgumentExpression::Expression(found_expression)
1206                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1207                        else {
1208                            panic!("must be expression");
1209                        };
1210                        found_expression.clone()
1211                    })
1212                    .collect();
1213                self.emit_intrinsic_sparse(
1214                    target_destination,
1215                    intrinsic_fn,
1216                    &grid_self_ptr_reg,
1217                    &converted_to_expressions,
1218                    node,
1219                    comment,
1220                    ctx,
1221                );
1222            }
1223
1224            IntrinsicFunction::TransformerFor
1225            | IntrinsicFunction::TransformerWhile
1226            | IntrinsicFunction::TransformerFindMap
1227            | IntrinsicFunction::TransformerAny
1228            | IntrinsicFunction::TransformerAll
1229            | IntrinsicFunction::TransformerMap
1230            | IntrinsicFunction::TransformerFilter
1231            | IntrinsicFunction::TransformerFilterMap
1232            | IntrinsicFunction::TransformerFind
1233            | IntrinsicFunction::TransformerFold => {
1234                // Self is assumed to be a flattened pointer:
1235                let collection_self_ptr_reg = PointerLocation {
1236                    ptr_reg: self_reg.unwrap().clone(),
1237                };
1238
1239                let lambda_expression = &arguments[0];
1240
1241                // Take out lambda and other lookups before generating the code
1242                let ArgumentExpression::Expression(expr) = lambda_expression else {
1243                    panic!("err");
1244                };
1245
1246                let ExpressionKind::Lambda(lambda_variables, lambda_expr) = &expr.kind else {
1247                    panic!("must have lambda for transformers");
1248                };
1249
1250                self.emit_intrinsic_transformer(
1251                    target_destination,
1252                    intrinsic_fn,
1253                    &collection_self_ptr_reg,
1254                    (lambda_variables.clone(), lambda_expr),
1255                    node,
1256                    ctx,
1257                );
1258            }
1259
1260            IntrinsicFunction::RuntimePanic => {
1261                self.builder
1262                    .add_panic(self_reg.unwrap(), node, "intrinsic panic");
1263            }
1264
1265            IntrinsicFunction::RuntimeHalt => {
1266                self.builder.add_halt(node, "intrinsic halt");
1267            }
1268
1269            IntrinsicFunction::RuntimeStep => {
1270                self.builder.add_step(node, "intrinsic step");
1271            }
1272
1273            IntrinsicFunction::RangeInit => {
1274                let start_reg = self_reg.unwrap();
1275                // let MutRefOrImmutableExpression::Expression(start_arg_expr) = start_arg else {
1276                //    panic!();
1277                //};
1278                // let start_reg = self.emit_scalar_rvalue(start_arg_expr, ctx);
1279
1280                let end_arg = &arguments[0];
1281                let ArgumentExpression::Expression(end_arg_expr) = end_arg else {
1282                    panic!();
1283                };
1284                let end_reg = self.emit_scalar_rvalue(end_arg_expr, ctx);
1285
1286                let is_inclusive = &arguments[1];
1287                let ArgumentExpression::Expression(is_inclusive_expr) = is_inclusive else {
1288                    panic!();
1289                };
1290                let is_inclusive_reg = self.emit_scalar_rvalue(is_inclusive_expr, ctx);
1291                let absolute_range_pointer = self.emit_compute_effective_address_to_register(
1292                    target_destination,
1293                    node,
1294                    "create range target pointer",
1295                );
1296                self.builder.add_range_init(
1297                    &absolute_range_pointer,
1298                    start_reg,
1299                    &end_reg,
1300                    &is_inclusive_reg,
1301                    node,
1302                    "create a range",
1303                );
1304            }
1305
1306            // Bool
1307            IntrinsicFunction::CodepointToString => {
1308                if target_destination.is_register() {
1309                    self.builder.add_codepoint_to_string(
1310                        target_destination.register().unwrap(),
1311                        self_reg.unwrap(),
1312                        node,
1313                        "char_to_string",
1314                    );
1315                } else {
1316                    let temp_reg = self.temp_registers.allocate(
1317                        VmType::new_contained_in_register(u32_type()),
1318                        "temporary for codepoint to string intrinsic",
1319                    );
1320
1321                    self.builder.add_codepoint_to_string(
1322                        &temp_reg.register,
1323                        self_reg.unwrap(),
1324                        node,
1325                        "char_to_string",
1326                    );
1327
1328                    self.emit_store_scalar_to_memory_offset_instruction(
1329                        target_destination.grab_memory_location(),
1330                        &temp_reg.register,
1331                        node,
1332                        "store codepoint to string result to memory",
1333                    );
1334                }
1335            }
1336
1337            IntrinsicFunction::CodepointToInt => {
1338                if target_destination.is_register() {
1339                    self.builder.add_mov_reg(
1340                        target_destination.register().unwrap(),
1341                        self_reg.unwrap(),
1342                        node,
1343                        "char_to_int",
1344                    );
1345                } else {
1346                    self.emit_store_scalar_to_memory_offset_instruction(
1347                        target_destination.grab_memory_location(),
1348                        self_reg.unwrap(),
1349                        node,
1350                        "store codepoint to int result to memory",
1351                    );
1352                }
1353            }
1354
1355            // Bool
1356            IntrinsicFunction::ByteToString => {
1357                if target_destination.is_register() {
1358                    self.builder.byte_to_string(
1359                        target_destination.register().unwrap(),
1360                        self_reg.unwrap(),
1361                        node,
1362                        "byte_to_string",
1363                    );
1364                } else {
1365                    let temp_reg = self.temp_registers.allocate(
1366                        VmType::new_contained_in_register(u32_type()),
1367                        "temporary for byte to string intrinsic",
1368                    );
1369
1370                    self.builder.byte_to_string(
1371                        &temp_reg.register,
1372                        self_reg.unwrap(),
1373                        node,
1374                        "byte_to_string",
1375                    );
1376
1377                    self.emit_store_scalar_to_memory_offset_instruction(
1378                        target_destination.grab_memory_location(),
1379                        &temp_reg.register,
1380                        node,
1381                        "store byte to string result to memory",
1382                    );
1383                }
1384            }
1385
1386            IntrinsicFunction::ByteToInt => {
1387                if target_destination.is_register() {
1388                    // It is safe to "upcast" to an i32 from a u8, so just copy the register
1389                    // TODO: Make something smarter so we don't have to copy
1390                    self.builder.add_mov_reg(
1391                        target_destination.register().unwrap(),
1392                        self_reg.unwrap(),
1393                        node,
1394                        "byte_to_int",
1395                    );
1396                } else {
1397                    self.emit_store_scalar_to_memory_offset_instruction(
1398                        target_destination.grab_memory_location(),
1399                        self_reg.unwrap(),
1400                        node,
1401                        "store byte to int result to memory",
1402                    );
1403                }
1404            }
1405
1406            IntrinsicFunction::ByteToFloat => {
1407                if target_destination.is_register() {
1408                    // Use existing int-to-float conversion since byte is just a small integer
1409                    self.builder.add_int_to_float(
1410                        target_destination.register().unwrap(),
1411                        self_reg.unwrap(),
1412                        node,
1413                        "byte_to_float",
1414                    );
1415                } else {
1416                    let temp_reg = self.temp_registers.allocate(
1417                        VmType::new_contained_in_register(float_type()),
1418                        "temporary for byte to float intrinsic",
1419                    );
1420
1421                    // Use existing int-to-float conversion since byte is just a small integer
1422                    self.builder.add_int_to_float(
1423                        &temp_reg.register,
1424                        self_reg.unwrap(),
1425                        node,
1426                        "byte_to_float",
1427                    );
1428
1429                    self.emit_store_scalar_to_memory_offset_instruction(
1430                        target_destination.grab_memory_location(),
1431                        &temp_reg.register,
1432                        node,
1433                        "store byte to float result to memory",
1434                    );
1435                }
1436            }
1437
1438            IntrinsicFunction::ByteToCodepoint => {
1439                if target_destination.is_register() {
1440                    // It is safe to "upcast" to a codepoint from a u8, so just copy the register
1441                    // TODO: Make something smarter so we don't have to copy
1442                    self.builder.add_mov_reg(
1443                        target_destination.register().unwrap(),
1444                        self_reg.unwrap(),
1445                        node,
1446                        "byte_to_codepoint",
1447                    );
1448                } else {
1449                    self.emit_store_scalar_to_memory_offset_instruction(
1450                        target_destination.grab_memory_location(),
1451                        self_reg.unwrap(),
1452                        node,
1453                        "store byte to codepoint result to memory",
1454                    );
1455                }
1456            }
1457
1458            // Bool
1459            IntrinsicFunction::BoolToString => {
1460                if target_destination.is_register() {
1461                    self.builder.bool_to_string(
1462                        target_destination.register().unwrap(),
1463                        self_reg.unwrap(),
1464                        node,
1465                        "bool_to_string",
1466                    );
1467                } else {
1468                    let temp_reg = self.temp_registers.allocate(
1469                        VmType::new_contained_in_register(u32_type()),
1470                        "temporary for bool to string intrinsic",
1471                    );
1472
1473                    self.builder.bool_to_string(
1474                        &temp_reg.register,
1475                        self_reg.unwrap(),
1476                        node,
1477                        "bool_to_string",
1478                    );
1479
1480                    self.emit_store_scalar_to_memory_offset_instruction(
1481                        target_destination.grab_memory_location(),
1482                        &temp_reg.register,
1483                        node,
1484                        "store bool to string result to memory",
1485                    );
1486                }
1487            }
1488
1489            IntrinsicFunction::ByteVectorToString => {
1490                if target_destination.is_register() {
1491                    self.builder.add_bytes_to_string(
1492                        target_destination.register().unwrap(),
1493                        self_reg.unwrap(),
1494                        node,
1495                        "bytes_to_string",
1496                    );
1497                } else {
1498                    let temp_reg = self.temp_registers.allocate(
1499                        VmType::new_contained_in_register(u32_type()),
1500                        "temporary for string duplicate intrinsic",
1501                    );
1502
1503                    self.builder.add_bytes_to_string(
1504                        &temp_reg.register,
1505                        self_reg.unwrap(),
1506                        node,
1507                        "bytes_to_string",
1508                    );
1509
1510                    self.emit_store_scalar_to_memory_offset_instruction(
1511                        target_destination.grab_memory_location(),
1512                        &temp_reg.register,
1513                        node,
1514                        "store converted utf8 string to memory",
1515                    );
1516                }
1517            }
1518
1519            IntrinsicFunction::ByteVectorToStringStorage => {
1520                if target_destination.is_register() {
1521                    self.builder.add_bytes_to_string_storage(
1522                        target_destination.register().unwrap(),
1523                        self_reg.unwrap(),
1524                        node,
1525                        "bytes_to_string_storage",
1526                    );
1527                } else {
1528                    let target_pointer_reg = self.emit_compute_effective_address_to_register(
1529                        target_destination,
1530                        node,
1531                        "get pointer to target string storage",
1532                    );
1533
1534                    self.builder.add_bytes_to_string_storage(
1535                        &target_pointer_reg,
1536                        self_reg.unwrap(),
1537                        node,
1538                        "bytes_to_string_storage",
1539                    );
1540                }
1541            }
1542
1543            IntrinsicFunction::StringDuplicate => {
1544                if target_destination.is_register() {
1545                    self.builder.add_string_duplicate(
1546                        target_destination.register().unwrap(),
1547                        self_reg.unwrap(),
1548                        node,
1549                        "string_to_string",
1550                    );
1551                } else {
1552                    let temp_reg = self.temp_registers.allocate(
1553                        VmType::new_contained_in_register(u32_type()),
1554                        "temporary for string duplicate intrinsic",
1555                    );
1556
1557                    self.builder.add_string_duplicate(
1558                        &temp_reg.register,
1559                        self_reg.unwrap(),
1560                        node,
1561                        "string_duplicate",
1562                    );
1563
1564                    self.emit_store_scalar_to_memory_offset_instruction(
1565                        target_destination.grab_memory_location(),
1566                        &temp_reg.register,
1567                        node,
1568                        "store string duplicate result to memory",
1569                    );
1570                }
1571            }
1572
1573            IntrinsicFunction::StringToString => {
1574                if target_destination.is_register() {
1575                    self.builder.add_string_to_string(
1576                        target_destination.register().unwrap(),
1577                        self_reg.unwrap(),
1578                        node,
1579                        "string_to_string",
1580                    );
1581                } else {
1582                    let temp_reg = self.temp_registers.allocate(
1583                        VmType::new_contained_in_register(u32_type()),
1584                        "temporary for string to string intrinsic",
1585                    );
1586
1587                    self.builder.add_string_to_string(
1588                        &temp_reg.register,
1589                        self_reg.unwrap(),
1590                        node,
1591                        "string_to_string",
1592                    );
1593
1594                    self.emit_store_scalar_to_memory_offset_instruction(
1595                        target_destination.grab_memory_location(),
1596                        &temp_reg.register,
1597                        node,
1598                        "store string to string result to memory",
1599                    );
1600                }
1601            }
1602
1603            IntrinsicFunction::StringStartsWith => {
1604                let converted_to_expressions: Vec<_> = arguments
1605                    .iter()
1606                    .map(|arg| {
1607                        let (ArgumentExpression::Expression(found_expression)
1608                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1609                        else {
1610                            panic!("must be expression");
1611                        };
1612                        found_expression.clone()
1613                    })
1614                    .collect();
1615                let other_str = self.emit_scalar_rvalue(&converted_to_expressions[0], ctx);
1616                if target_destination.is_register() {
1617                    self.builder.add_string_starts_with(
1618                        target_destination.register().unwrap(),
1619                        self_reg.unwrap(),
1620                        &other_str,
1621                        node,
1622                        "string_starts_with",
1623                    );
1624                } else {
1625                    let temp_reg = self.temp_registers.allocate(
1626                        VmType::new_contained_in_register(u8_type()),
1627                        "temporary for string starts with intrinsic",
1628                    );
1629
1630                    self.builder.add_string_starts_with(
1631                        &temp_reg.register,
1632                        self_reg.unwrap(),
1633                        &other_str,
1634                        node,
1635                        "string_starts_with",
1636                    );
1637
1638                    self.emit_store_scalar_to_memory_offset_instruction(
1639                        target_destination.grab_memory_location(),
1640                        &temp_reg.register,
1641                        node,
1642                        "store string starts with result to memory",
1643                    );
1644                }
1645            }
1646
1647            IntrinsicFunction::StringToInt => {
1648                let pointer = self.emit_compute_effective_address_to_register(
1649                    target_destination,
1650                    node,
1651                    "need pointer to tuple",
1652                );
1653                self.builder
1654                    .add_string_to_int(&pointer, self_reg.unwrap(), node, "string to int");
1655            }
1656            IntrinsicFunction::StringToFloat => {
1657                let pointer = self.emit_compute_effective_address_to_register(
1658                    target_destination,
1659                    node,
1660                    "need pointer to tuple",
1661                );
1662                self.builder.add_string_to_float(
1663                    &pointer,
1664                    self_reg.unwrap(),
1665                    node,
1666                    "string to float",
1667                );
1668            }
1669
1670            // Common Collection
1671            IntrinsicFunction::MapIsEmpty | IntrinsicFunction::VecIsEmpty => {
1672                let collection_pointer = PointerLocation {
1673                    ptr_reg: self_reg.unwrap().clone(),
1674                };
1675                if target_destination.is_register() {
1676                    self.emit_collection_is_empty(
1677                        target_destination.register().unwrap().clone(),
1678                        &collection_pointer,
1679                        node,
1680                        "vec empty",
1681                    );
1682                } else {
1683                    let temp_reg = self.temp_registers.allocate(
1684                        VmType::new_contained_in_register(u8_type()),
1685                        "temporary for collection is empty intrinsic",
1686                    );
1687
1688                    self.emit_collection_is_empty(
1689                        temp_reg.register.clone(),
1690                        &collection_pointer,
1691                        node,
1692                        "vec empty",
1693                    );
1694
1695                    self.emit_store_scalar_to_memory_offset_instruction(
1696                        target_destination.grab_memory_location(),
1697                        &temp_reg.register,
1698                        node,
1699                        "store collection is empty result to memory",
1700                    );
1701                }
1702            }
1703
1704            IntrinsicFunction::StringLen
1705            | IntrinsicFunction::MapLen
1706            | IntrinsicFunction::VecLen => {
1707                let collection_pointer = PointerLocation {
1708                    ptr_reg: self_reg.unwrap().clone(),
1709                };
1710                if target_destination.is_register() {
1711                    self.emit_collection_len(
1712                        target_destination.register().unwrap(),
1713                        &collection_pointer,
1714                        node,
1715                        "get the collection element_count",
1716                    );
1717                } else {
1718                    let temp_reg = self.temp_registers.allocate(
1719                        VmType::new_contained_in_register(u16_type()),
1720                        "temporary for collection len intrinsic",
1721                    );
1722
1723                    self.emit_collection_len(
1724                        &temp_reg.register,
1725                        &collection_pointer,
1726                        node,
1727                        "get the collection element_count",
1728                    );
1729
1730                    self.emit_store_scalar_to_memory_offset_instruction(
1731                        target_destination.grab_memory_location(),
1732                        &temp_reg.register,
1733                        node,
1734                        "store collection len result to memory",
1735                    );
1736                }
1737            }
1738            IntrinsicFunction::MapCapacity | IntrinsicFunction::VecCapacity => {
1739                let collection_pointer = PointerLocation {
1740                    ptr_reg: self_reg.unwrap().clone(),
1741                };
1742                if target_destination.is_register() {
1743                    self.emit_collection_capacity(
1744                        target_destination.register().unwrap(),
1745                        &collection_pointer,
1746                        node,
1747                        "get the collection element_count",
1748                    );
1749                } else {
1750                    let temp_reg = self.temp_registers.allocate(
1751                        VmType::new_contained_in_register(u16_type()),
1752                        "temporary for collection capacity intrinsic",
1753                    );
1754
1755                    self.emit_collection_capacity(
1756                        &temp_reg.register,
1757                        &collection_pointer,
1758                        node,
1759                        "get the collection element_count",
1760                    );
1761
1762                    self.emit_store_scalar_to_memory_offset_instruction(
1763                        target_destination.grab_memory_location(),
1764                        &temp_reg.register,
1765                        node,
1766                        "store collection capacity result to memory",
1767                    );
1768                }
1769            }
1770
1771            IntrinsicFunction::MapRemove | IntrinsicFunction::MapHas => {
1772                // Map
1773                // Self is assumed to be a flattened pointer:
1774                let grid_self_ptr_reg = PointerLocation {
1775                    ptr_reg: self_reg.unwrap().clone(),
1776                };
1777                let converted_to_expressions: Vec<_> = arguments
1778                    .iter()
1779                    .map(|arg| {
1780                        let (ArgumentExpression::Expression(found_expression)
1781                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1782                        else {
1783                            panic!("must be expression");
1784                        };
1785                        found_expression.clone()
1786                    })
1787                    .collect();
1788                self.emit_intrinsic_map(
1789                    target_destination,
1790                    intrinsic_fn,
1791                    &grid_self_ptr_reg,
1792                    &converted_to_expressions,
1793                    node,
1794                    comment,
1795                    ctx,
1796                );
1797            } // All intrinsic cases are now handled above
1798
1799            IntrinsicFunction::PtrFromU32 => {}
1800        }
1801    }
1802
1803    fn emit_intrinsic_map_remove(
1804        &mut self,
1805        map_header_reg: &PointerLocation,
1806        key_expression: &Expression,
1807        ctx: &Context,
1808    ) {
1809        let key_register =
1810            self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_expression, ctx);
1811
1812        self.builder
1813            .add_map_remove(map_header_reg, &key_register, &key_expression.node, "");
1814    }
1815
1816    fn emit_collection_capacity(
1817        &mut self,
1818        output_reg: &TypedRegister,
1819        collection_addr: &PointerLocation,
1820        node: &Node,
1821        comment: &str,
1822    ) {
1823        self.builder.add_ld16_from_pointer_with_offset_u16(
1824            output_reg,
1825            &collection_addr.ptr_reg,
1826            COLLECTION_CAPACITY_OFFSET,
1827            node,
1828            comment,
1829        );
1830    }
1831
1832    fn emit_collection_len(
1833        &mut self,
1834        output_reg: &TypedRegister,
1835        collection_addr: &PointerLocation,
1836        node: &Node,
1837        comment: &str,
1838    ) {
1839        self.builder.add_ld16_from_pointer_with_offset_u16(
1840            output_reg,
1841            &collection_addr.ptr_reg,
1842            COLLECTION_ELEMENT_COUNT_OFFSET,
1843            node,
1844            &format!("{comment} - collection element_count"),
1845        );
1846    }
1847
1848    fn emit_collection_is_empty(
1849        &mut self,
1850        output_reg: TypedRegister,
1851        collection_addr: &PointerLocation,
1852        node: &Node,
1853        _comment: &str,
1854    ) {
1855        self.builder.add_ld16_from_pointer_with_offset_u16(
1856            &output_reg,
1857            &collection_addr.ptr_reg,
1858            COLLECTION_ELEMENT_COUNT_OFFSET,
1859            node,
1860            "get the map length for testing if it is empty",
1861        );
1862        self.builder.add_meqz(
1863            &output_reg,
1864            &output_reg,
1865            node,
1866            "convert the map length to inverted bool",
1867        );
1868    }
1869}