swamp_code_gen/
intr.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::code_bld::CodeBuilder;
6use crate::ctx::Context;
7
8use crate::transformer::{Collection, Transformer};
9use source_map_node::Node;
10use swamp_semantic::intr::IntrinsicFunction;
11use swamp_semantic::{ArgumentExpression, Expression, ExpressionKind, VariableRef};
12use swamp_vm_types::types::{
13    float_type, int_type, pointer_type, u16_type, u32_type, u8_type, Destination, TypedRegister,
14    VmType,
15};
16use swamp_vm_types::{
17    AggregateMemoryLocation, MemoryLocation, MemoryOffset,
18    PointerLocation, COLLECTION_CAPACITY_OFFSET, COLLECTION_ELEMENT_COUNT_OFFSET, GRID_HEADER_HEIGHT_OFFSET,
19    GRID_HEADER_WIDTH_OFFSET,
20};
21
22impl CodeBuilder<'_> {
23    #[allow(clippy::too_many_lines)]
24    #[allow(clippy::single_match_else)]
25    pub fn emit_single_intrinsic_call(
26        &mut self,
27        target_reg: &Destination,
28        node: &Node,
29        intrinsic_fn: &IntrinsicFunction,
30        arguments: &[ArgumentExpression],
31        ctx: &Context,
32    ) {
33        {
34            // For primitive intrinsics, materialize the self argument to a register early
35            let self_reg = if arguments.is_empty() {
36                None
37            } else {
38                let ArgumentExpression::Expression(self_expr) = &arguments[0] else {
39                    panic!("Expected expression for self argument");
40                };
41                Some(self.emit_scalar_rvalue(self_expr, ctx))
42            };
43
44            let rest_args = if arguments.len() > 1 {
45                &arguments[1..]
46            } else {
47                &vec![]
48            };
49            self.emit_single_intrinsic_call_with_self(
50                target_reg,
51                node,
52                intrinsic_fn,
53                self_reg.as_ref(),
54                rest_args,
55                ctx,
56                "single intrinsic call",
57            );
58        }
59    }
60
61    pub fn emit_intrinsic_map(
62        &mut self,
63        output_destination: &Destination,
64        intrinsic_fn: &IntrinsicFunction,
65        self_ptr_reg: &PointerLocation,
66        arguments: &[Expression],
67        node: &Node,
68        comment: &str,
69        ctx: &Context,
70    ) {
71        match intrinsic_fn {
72            IntrinsicFunction::MapHas => {
73                let key_argument = &arguments[0];
74                // We have to get the key materialized in a temporary storage, so the map can calculate the hash for it.
75                let key_temp_storage_reg =
76                    self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_argument, ctx);
77
78                self.builder.add_map_has(
79                    output_destination.register().unwrap(),
80                    self_ptr_reg,
81                    &key_temp_storage_reg,
82                    node,
83                    "map_has",
84                );
85            }
86            IntrinsicFunction::MapRemove => {
87                let key_argument = &arguments[0];
88                self.emit_intrinsic_map_remove(self_ptr_reg, key_argument, ctx);
89            }
90            _ => todo!("missing intrinsic_map {intrinsic_fn}"),
91        }
92    }
93
94    pub fn emit_intrinsic_sparse(
95        &mut self,
96        output_destination: &Destination,
97        intrinsic_fn: &IntrinsicFunction,
98        self_ptr_reg: &PointerLocation,
99        arguments: &[Expression],
100        node: &Node,
101        comment: &str,
102        ctx: &Context,
103    ) {
104        match intrinsic_fn {
105            IntrinsicFunction::SparseAdd => {
106                let element_to_add_expression = &arguments[0];
107                self.emit_sparse_add(
108                    &output_destination.register().unwrap().clone(),
109                    self_ptr_reg,
110                    element_to_add_expression,
111                    node,
112                    ctx,
113                );
114            }
115
116            IntrinsicFunction::SparseRemove => {
117                let sparse_id_int_expression = &arguments[0];
118                self.emit_sparse_remove(self_ptr_reg, sparse_id_int_expression, node, ctx);
119            }
120
121            IntrinsicFunction::SparseIsAlive => {
122                let sparse_id_int_expression = &arguments[0];
123                self.emit_sparse_is_alive(
124                    &output_destination.register().unwrap().clone(),
125                    self_ptr_reg,
126                    sparse_id_int_expression,
127                    node,
128                    ctx,
129                );
130            }
131            _ => todo!("unknown sparse {intrinsic_fn}"),
132        }
133    }
134    pub fn emit_intrinsic_grid(
135        &mut self,
136        target_destination: &Destination,
137        intrinsic_fn: &IntrinsicFunction,
138        self_ptr_reg: &PointerLocation,
139        arguments: &[Expression],
140        node: &Node,
141        comment: &str,
142        ctx: &Context,
143    ) {
144        let (temp_reg, dest_reg) = if target_destination.is_register() {
145            (None, target_destination.register().unwrap().clone())
146        } else {
147            let temp_reg = self.temp_registers.allocate(
148                VmType::new_contained_in_register(float_type()),
149                "temporary destination for low level intrinsic",
150            );
151
152            (Some(temp_reg.register.clone()), temp_reg.register)
153        };
154        match intrinsic_fn {
155            IntrinsicFunction::GridSet => {
156                let x_expr = &arguments[0];
157                let y_expr = &arguments[1];
158                let value_expr = &arguments[2];
159
160                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
161                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
162                let element_gen_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
163
164                let temp_element_ptr = self.temp_registers.allocate(
165                    VmType::new_contained_in_register(element_gen_type.clone()),
166                    "temporary scalar",
167                );
168
169                self.builder.add_grid_get_entry_addr(
170                    &temp_element_ptr.register,
171                    self_ptr_reg,
172                    &x_reg,
173                    &y_reg,
174                    element_gen_type.total_size,
175                    node,
176                    comment,
177                );
178
179                let location = AggregateMemoryLocation {
180                    location: MemoryLocation {
181                        base_ptr_reg: temp_element_ptr.register,
182                        offset: MemoryOffset(0),
183                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
184                    },
185                };
186
187                // Initialize the allocated space first (like variable definition)
188                if element_gen_type.is_aggregate() {
189                    self.emit_initialize_memory_for_any_type(
190                        &location.location,
191                        node,
192                        "initialize grid set allocated space",
193                    );
194                }
195
196                self.emit_expression_into_target_memory(
197                    &location.location,
198                    value_expr,
199                    "grid set",
200                    ctx,
201                );
202            }
203            IntrinsicFunction::GridGet => {
204                let x_expr = &arguments[0];
205                let y_expr = &arguments[1];
206
207                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
208                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
209
210                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
211
212                // Allocate a temporary register to hold the address of the grid element
213                let temp_element_ptr = self.temp_registers.allocate(
214                    VmType::new_contained_in_register(pointer_type()),
215                    "temp for grid element address",
216                );
217
218                // Get the address of the grid element using the opcode
219                self.builder.add_grid_get_entry_addr(
220                    &temp_element_ptr.register,
221                    self_ptr_reg,
222                    &x_reg,
223                    &y_reg,
224                    element_type.total_size,
225                    node,
226                    comment,
227                );
228
229                // Create a memory location from the element address with correct type information
230                let element_memory_location = MemoryLocation {
231                    base_ptr_reg: temp_element_ptr.register,
232                    offset: MemoryOffset(0),
233                    ty: VmType::new_unknown_placement(element_type),
234                };
235
236                // Use emit_copy_value_from_memory_location to handle both register and memory destinations
237                // This will properly handle aggregates (like optionals) vs scalars
238                self.emit_copy_value_from_memory_location(
239                    target_destination,
240                    &element_memory_location,
241                    node,
242                    "copy grid element value to destination",
243                );
244            }
245
246            IntrinsicFunction::GridWidth => {
247                // Allocate a temporary register for the width value
248                let temp = self.temp_registers.allocate(
249                    VmType::new_contained_in_register(u16_type()),
250                    "temp for grid width",
251                );
252
253                // Get the memory location of the width field in the grid header
254                let self_memory_location = AggregateMemoryLocation::new(
255                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
256                        self_ptr_reg.ptr_reg.clone(),
257                    ),
258                );
259                let width_location =
260                    self_memory_location.offset(GRID_HEADER_WIDTH_OFFSET, int_type());
261
262                // Load the width value from the grid header into the temporary register
263                self.builder.add_ld16_from_pointer_from_memory_location(
264                    &temp.register,
265                    &width_location.location,
266                    node,
267                    comment,
268                );
269
270                // Create a source destination from the temporary register
271                let value_source = Destination::Register(temp.register);
272
273                // Use emit_copy_value_between_destinations to handle both register and memory destinations
274                self.emit_copy_value_between_destinations(
275                    target_destination,
276                    &value_source,
277                    node,
278                    "store grid width to destination",
279                );
280            }
281            IntrinsicFunction::GridHeight => {
282                // Allocate a temporary register for the height value
283                let temp = self.temp_registers.allocate(
284                    VmType::new_contained_in_register(u16_type()),
285                    "temp for grid height",
286                );
287
288                // Get the memory location of the height field in the grid header
289                let self_memory_location = AggregateMemoryLocation::new(
290                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
291                        self_ptr_reg.ptr_reg.clone(),
292                    ),
293                );
294                let height_location =
295                    self_memory_location.offset(GRID_HEADER_HEIGHT_OFFSET, int_type());
296
297                // Load the height value from the grid header into the temporary register
298                self.builder.add_ld16_from_pointer_from_memory_location(
299                    &temp.register,
300                    &height_location.location,
301                    node,
302                    comment,
303                );
304
305                // Create a source destination from the temporary register
306                let value_source = Destination::Register(temp.register);
307
308                // Use emit_copy_value_between_destinations to handle both register and memory destinations
309                self.emit_copy_value_between_destinations(
310                    target_destination,
311                    &value_source,
312                    node,
313                    "store grid height to destination",
314                );
315            }
316            _ => todo!("wrong grid {intrinsic_fn}"),
317        }
318    }
319
320    #[allow(clippy::too_many_lines)]
321    fn emit_intrinsic_call_vec(
322        &mut self,
323        output_destination: &Destination,
324        intrinsic_fn: &IntrinsicFunction,
325        self_ptr_reg: &PointerLocation,
326        arguments: &[Expression],
327        node: &Node,
328        ctx: &Context,
329    ) {
330        let self_basic_type = &self_ptr_reg.ptr_reg.ty.basic_type;
331        match intrinsic_fn {
332            IntrinsicFunction::VecPush => {
333                let element_expr = &arguments[0];
334
335                let element_gen_type = self.state.layout_cache.layout(&element_expr.ty);
336
337                let temp_element_ptr = self.temp_registers.allocate(
338                    VmType::new_contained_in_register(pointer_type()),
339                    "pointer to new element",
340                );
341
342                self.builder.add_vec_push_addr(
343                    temp_element_ptr.register(),
344                    &self_ptr_reg.ptr_reg,
345                    node,
346                    "set pointer to new element",
347                );
348
349                let location = AggregateMemoryLocation {
350                    location: MemoryLocation {
351                        base_ptr_reg: temp_element_ptr.register,
352                        offset: MemoryOffset(0),
353                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
354                    },
355                };
356
357                // Initialize the allocated space first (like variable definition)
358                if element_gen_type.is_aggregate() {
359                    self.emit_initialize_memory_for_any_type(
360                        &location.location,
361                        node,
362                        "initialize vec.push allocated space",
363                    );
364                }
365
366                self.emit_expression_into_target_memory(
367                    &location.location,
368                    element_expr,
369                    "vec push",
370                    ctx,
371                );
372            }
373
374            IntrinsicFunction::VecExtend => {
375                let element_expr = &arguments[0];
376                let other_vec_reg = self.emit_scalar_rvalue(element_expr, ctx);
377
378                self.builder.add_vec_extend(
379                    &self_ptr_reg.ptr_reg,
380                    &other_vec_reg,
381                    node,
382                    "extend vec",
383                );
384            }
385
386            IntrinsicFunction::VecPop => {
387                let element_type = self_basic_type.element().unwrap();
388                let pop_target_reg = if let Some(found_target_reg) = output_destination.register() {
389                    found_target_reg.clone()
390                } else {
391                    let temp = self.temp_registers.allocate(
392                        VmType::new_contained_in_register(element_type.clone()),
393                        "temp for vec pop",
394                    );
395                    temp.register
396                };
397                self.builder.add_vec_pop(
398                    &pop_target_reg,
399                    &self_ptr_reg.ptr_reg, // mut self
400                    element_type.total_size,
401                    node,
402                    "vec pop",
403                );
404                let source_memory_location = MemoryLocation {
405                    base_ptr_reg: pop_target_reg,
406                    offset: MemoryOffset(0),
407                    ty: VmType::new_unknown_placement(element_type),
408                };
409
410                self.emit_copy_value_from_memory_location(
411                    output_destination,
412                    &source_memory_location,
413                    node,
414                    "copy from vec pop",
415                );
416            }
417
418            IntrinsicFunction::VecSlice => {
419                let range_expr = &arguments[0];
420                let range_region = self.emit_scalar_rvalue(range_expr, ctx);
421
422                let output_pointer = self.emit_compute_effective_address_to_register(
423                    output_destination,
424                    node,
425                    "get absolute pointer for vec slice destination",
426                );
427                let output_pointer_location = PointerLocation::new(output_pointer);
428
429                self.builder.add_vec_copy_range(
430                    &output_pointer_location,
431                    self_ptr_reg,
432                    &range_region,
433                    node,
434                    "vec slice",
435                );
436            }
437
438            IntrinsicFunction::VecRemoveIndex => {
439                let index_region_expr = &arguments[0];
440                let index_region = self.emit_scalar_rvalue(index_region_expr, ctx);
441
442                let element_type = self_basic_type.element().unwrap();
443
444                self.builder.add_vec_remove_index(
445                    &self_ptr_reg.ptr_reg,
446                    &index_region,
447                    node,
448                    "remove index",
449                );
450            }
451            IntrinsicFunction::VecRemoveIndexGetValue => {
452                let key_expr = &arguments[0];
453                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
454                let element_type = self_basic_type.element().unwrap();
455
456                // Handle both register and memory destinations
457                if let Some(target_reg) = output_destination.register() {
458                    // Direct register destination
459                    self.builder.add_vec_remove_index_get_value(
460                        target_reg,
461                        &self_ptr_reg.ptr_reg, // mut self
462                        &key_region,
463                        node,
464                        "vec remove index get value to register",
465                    );
466                } else {
467                    // Memory destination or other
468                    let temp_reg = self.temp_registers.allocate(
469                        VmType::new_contained_in_register(element_type),
470                        "temp for vec remove index get value",
471                    );
472
473                    self.builder.add_vec_remove_index_get_value(
474                        &temp_reg.register,
475                        &self_ptr_reg.ptr_reg,
476                        &key_region,
477                        node,
478                        "vec remove index get value to temp",
479                    );
480
481                    // Copy from temporary register to destination
482                    let source = Destination::Register(temp_reg.register);
483                    self.emit_copy_value_between_destinations(
484                        output_destination,
485                        &source,
486                        node,
487                        "copy vec element to destination",
488                    );
489                }
490            }
491            IntrinsicFunction::VecRemoveFirstIndexGetValue => {
492                let zero_reg = self.temp_registers.allocate(
493                    VmType::new_contained_in_register(u8_type()),
494                    "vec remove first. set index 0",
495                );
496                self.builder
497                    .add_mov8_immediate(zero_reg.register(), 0, node, "zero index");
498                let value_addr_reg = self.temp_registers.allocate(
499                    VmType::new_contained_in_register(u32_type()),
500                    "vec entry addr to copy from",
501                );
502                let element_type = self_basic_type.element().unwrap();
503                self.builder.add_vec_subscript(
504                    value_addr_reg.register(),
505                    &self_ptr_reg.ptr_reg,
506                    zero_reg.register(),
507                    element_type.total_size,
508                    node,
509                    "lookup first entry in vec",
510                );
511
512                let source_memory_location = MemoryLocation {
513                    base_ptr_reg: value_addr_reg.register,
514                    offset: MemoryOffset(0),
515                    ty: VmType::new_unknown_placement(element_type),
516                };
517
518                self.emit_copy_value_from_memory_location(
519                    output_destination,
520                    &source_memory_location,
521                    node,
522                    "load the vec entry to target register",
523                );
524
525                self.builder.add_vec_remove_index(
526                    &self_ptr_reg.ptr_reg, // mut self
527                    zero_reg.register(),
528                    node,
529                    "vec remove first index",
530                );
531            }
532            IntrinsicFunction::VecClear => {
533                let temp_element_count_reg = self.temp_registers.allocate(
534                    VmType::new_contained_in_register(u16_type()),
535                    "vec_clear zero",
536                );
537                self.builder.add_mov_16_immediate_value(
538                    temp_element_count_reg.register(),
539                    0,
540                    node,
541                    "set to zero",
542                );
543
544                let self_memory_location = AggregateMemoryLocation::new(
545                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
546                        self_ptr_reg.ptr_reg.clone(),
547                    ),
548                );
549
550                self.builder.add_st16_using_ptr_with_offset(
551                    &self_memory_location
552                        .offset(COLLECTION_ELEMENT_COUNT_OFFSET, u16_type())
553                        .location,
554                    temp_element_count_reg.register(),
555                    node,
556                    "set element_count to zero",
557                );
558            }
559
560            IntrinsicFunction::VecGet => {
561                let key_expr = &arguments[0];
562                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
563                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
564
565                // Similar approach as GridGet - get pointer to element and use copy helpers
566                let temp_element_ptr = self.temp_registers.allocate(
567                    VmType::new_contained_in_register(pointer_type()),
568                    "temp for vec element address",
569                );
570
571                // Get the address of the vector element
572                self.builder.add_vec_subscript(
573                    temp_element_ptr.register(),
574                    &self_ptr_reg.ptr_reg,
575                    &key_region,
576                    element_type.total_size,
577                    node,
578                    "get vec element address",
579                );
580
581                // Create a memory location for the element
582                let element_memory_location = MemoryLocation {
583                    base_ptr_reg: temp_element_ptr.register,
584                    offset: MemoryOffset(0),
585                    ty: VmType::new_unknown_placement(element_type),
586                };
587
588                // Copy from memory location to destination (works for both register and memory)
589                self.emit_copy_value_from_memory_location(
590                    output_destination,
591                    &element_memory_location,
592                    node,
593                    "copy vec element to destination",
594                );
595            }
596            _ => todo!("Vec {intrinsic_fn}"),
597        }
598
599        /*
600                   IntrinsicFunction::VecSwap => {
601               let index_a = self
602                   .emit_for_access_or_location(&arguments[0], ctx)
603                   .grab_rvalue()
604                   .clone();
605               let index_b = self
606                   .emit_for_access_or_location(&arguments[1], ctx)
607                   .grab_rvalue()
608                   .clone();
609               self.builder
610                   .add_vec_swap(self_addr.unwrap(), &index_a, &index_b, node, "vec swap");
611           }
612
613           IntrinsicFunction::VecInsert => { // Low prio
614           }
615           IntrinsicFunction::VecFirst => { // Low prio
616           }
617           IntrinsicFunction::VecLast => { // Low prio
618           }
619
620        */
621    }
622
623    fn emit_intrinsic_call_int(
624        &mut self,
625        target_reg: &TypedRegister,
626        intrinsic_fn: &IntrinsicFunction,
627        arguments: &[TypedRegister],
628        node: &Node,
629    ) {
630        let first_argument = &arguments[0];
631
632        // Intrinsics can operate on any register directly, no need for register protection
633        match intrinsic_fn {
634            IntrinsicFunction::IntAbs => {
635                self.builder
636                    .add_int_abs(target_reg, first_argument, node, "int abs");
637            }
638
639            IntrinsicFunction::IntRnd => {
640                self.builder
641                    .add_int_rnd(target_reg, first_argument, node, "int pseudo random");
642            }
643            IntrinsicFunction::IntMax => {
644                let int_register = &arguments[1];
645
646                self.builder
647                    .add_int_max(target_reg, first_argument, int_register, node, "int max");
648            }
649            IntrinsicFunction::IntMin => {
650                let int_register = &arguments[1];
651
652                self.builder
653                    .add_int_min(target_reg, first_argument, int_register, node, "int min");
654            }
655            IntrinsicFunction::IntClamp => {
656                let min_reg = &arguments[1];
657                let max_reg = &arguments[2];
658                self.builder.add_int_clamp(
659                    target_reg,
660                    first_argument,
661                    min_reg,
662                    max_reg,
663                    node,
664                    "int clamp",
665                );
666            }
667            IntrinsicFunction::IntToFloat => {
668                self.builder.add_int_to_float(
669                    target_reg,
670                    first_argument,
671                    node,
672                    &format!("int to float {}", first_argument.comment()),
673                );
674            }
675            IntrinsicFunction::IntToString => {
676                self.builder
677                    .add_int_to_string(target_reg, first_argument, node, "int_to_string");
678            }
679            _ => {}
680        }
681        // No need to copy from a temporary register as we're using target_reg directly
682    }
683
684    #[allow(clippy::too_many_lines)]
685    fn emit_intrinsic_call_fixed(
686        &mut self,
687        target_reg: &TypedRegister,
688        intrinsic_fn: &IntrinsicFunction,
689        arguments: &[TypedRegister],
690        node: &Node,
691    ) {
692        // Intrinsics can operate directly on any register, no need for temporary registers
693        let first_argument_reg = &arguments[0];
694        match intrinsic_fn {
695            IntrinsicFunction::FloatRound => {
696                self.builder
697                    .add_float_round(target_reg, first_argument_reg, node, "float round");
698            }
699            IntrinsicFunction::FloatFloor => {
700                self.builder
701                    .add_float_floor(target_reg, first_argument_reg, node, "float floor");
702            }
703            IntrinsicFunction::FloatSqrt => {
704                self.builder
705                    .add_float_sqrt(target_reg, first_argument_reg, node, "float sqr");
706            }
707            IntrinsicFunction::FloatSign => {
708                self.builder
709                    .add_float_sign(target_reg, first_argument_reg, node, "float sign");
710            }
711            IntrinsicFunction::FloatAbs => {
712                self.builder
713                    .add_float_abs(target_reg, first_argument_reg, node, "float abs");
714            }
715            IntrinsicFunction::FloatRnd => {
716                self.builder.add_float_prnd(
717                    target_reg,
718                    first_argument_reg,
719                    node,
720                    "float pseudo random",
721                );
722            }
723            IntrinsicFunction::FloatCos => {
724                self.builder
725                    .add_float_cos(target_reg, first_argument_reg, node, "float cos");
726            }
727            IntrinsicFunction::FloatSin => {
728                self.builder
729                    .add_float_sin(target_reg, first_argument_reg, node, "float sin");
730            }
731            IntrinsicFunction::FloatAcos => {
732                self.builder
733                    .add_float_acos(target_reg, first_argument_reg, node, "float acos");
734            }
735            IntrinsicFunction::FloatAsin => {
736                self.builder
737                    .add_float_asin(target_reg, first_argument_reg, node, "float asin");
738            }
739            IntrinsicFunction::FloatAtan2 => {
740                self.builder
741                    .add_float_atan2(target_reg, first_argument_reg, node, "float atan2");
742            }
743            IntrinsicFunction::FloatMin => {
744                let float_region = &arguments[1];
745                self.builder.add_float_min(
746                    target_reg,
747                    first_argument_reg,
748                    float_region,
749                    node,
750                    "float min",
751                );
752            }
753            IntrinsicFunction::FloatMax => {
754                let float_region = &arguments[1];
755                self.builder.add_float_max(
756                    target_reg,
757                    first_argument_reg,
758                    float_region,
759                    node,
760                    "float max",
761                );
762            }
763            IntrinsicFunction::FloatClamp => {
764                let float_region = &arguments[1];
765                let float_b_region = &arguments[2];
766
767                self.builder.add_float_clamp(
768                    target_reg,
769                    float_region,
770                    first_argument_reg,
771                    float_b_region,
772                    node,
773                    "float round",
774                );
775            }
776            IntrinsicFunction::FloatToString => self.builder.add_float_to_string(
777                target_reg,
778                first_argument_reg,
779                node,
780                "float_to_string",
781            ),
782            _ => panic!("wasn't a fixed operation"),
783        }
784        // No need to copy from temp register to target as we're using target_reg directly
785    }
786
787    pub fn emit_intrinsic_transformer(
788        &mut self,
789        target_destination: &Destination,
790        intrinsic_fn: &IntrinsicFunction,
791        self_addr: &PointerLocation,
792        lambda: (Vec<VariableRef>, &Expression),
793        node: &Node,
794        ctx: &Context,
795    ) {
796        match intrinsic_fn {
797            IntrinsicFunction::TransformerFold => { // Low prio
798            }
799            IntrinsicFunction::TransformerFilter => {
800                self.emit_iterate_over_collection_with_lambda(
801                    target_destination,
802                    node,
803                    Collection::Vec,
804                    Transformer::Filter,
805                    &self_addr.ptr_reg,
806                    lambda,
807                    ctx,
808                );
809            }
810
811            IntrinsicFunction::TransformerFor => {
812                self.emit_iterate_over_collection_with_lambda(
813                    target_destination,
814                    node,
815                    Collection::Vec,
816                    Transformer::For,
817                    &self_addr.ptr_reg,
818                    lambda,
819                    ctx,
820                );
821            }
822            IntrinsicFunction::TransformerWhile => {
823                self.emit_iterate_over_collection_with_lambda(
824                    target_destination,
825                    node,
826                    Collection::Vec,
827                    Transformer::While,
828                    &self_addr.ptr_reg,
829                    lambda,
830                    ctx,
831                );
832            }
833
834            IntrinsicFunction::TransformerFind => {
835                self.emit_iterate_over_collection_with_lambda(
836                    target_destination,
837                    node,
838                    Collection::Vec,
839                    Transformer::Find,
840                    &self_addr.ptr_reg,
841                    lambda,
842                    ctx,
843                );
844            }
845            _ => todo!("{intrinsic_fn}"),
846        }
847    }
848
849    #[allow(clippy::too_many_lines)]
850    #[allow(clippy::too_many_arguments)]
851    pub fn emit_single_intrinsic_call_with_self_destination(
852        &mut self,
853        target_destination: &Destination,
854        node: &Node,
855        intrinsic_fn: &IntrinsicFunction,
856        self_destination: Option<&Destination>,
857        arguments: &[ArgumentExpression],
858        ctx: &Context,
859        comment: &str,
860    ) {
861        // Use the helper function to properly materialize the self argument
862        let self_reg = if let Some(self_dest) = self_destination {
863            self.emit_load_scalar_or_absolute_aggregate_pointer(self_dest, node, comment)
864        } else {
865            None
866        };
867
868        // Delegate to the existing function
869        self.emit_single_intrinsic_call_with_self(
870            target_destination,
871            node,
872            intrinsic_fn,
873            self_reg.as_ref(),
874            arguments,
875            ctx,
876            comment,
877        );
878    }
879
880    #[allow(clippy::too_many_lines)]
881    #[allow(clippy::too_many_arguments)]
882    pub fn emit_single_intrinsic_call_with_self(
883        &mut self,
884        target_destination: &Destination,
885        node: &Node,
886        intrinsic_fn: &IntrinsicFunction,
887        self_reg: Option<&TypedRegister>,
888        arguments: &[ArgumentExpression],
889        ctx: &Context,
890        comment: &str,
891    ) {
892        let maybe_target = target_destination.register();
893
894        match intrinsic_fn {
895            IntrinsicFunction::Float2Magnitude
896            | IntrinsicFunction::FloatAbs
897            | IntrinsicFunction::FloatRound
898            | IntrinsicFunction::FloatFloor
899            | IntrinsicFunction::FloatSqrt
900            | IntrinsicFunction::FloatSign
901            | IntrinsicFunction::FloatRnd
902            | IntrinsicFunction::FloatCos
903            | IntrinsicFunction::FloatSin
904            | IntrinsicFunction::FloatAcos
905            | IntrinsicFunction::FloatAsin
906            | IntrinsicFunction::FloatAtan2
907            | IntrinsicFunction::FloatMin
908            | IntrinsicFunction::FloatMax
909            | IntrinsicFunction::FloatClamp
910            | IntrinsicFunction::FloatToString => {
911                // Float
912                let (temp_reg, dest_reg) = if target_destination.is_register() {
913                    (None, target_destination.register().unwrap().clone())
914                } else {
915                    let temp_reg = self.temp_registers.allocate(
916                        VmType::new_contained_in_register(float_type()),
917                        "temporary destination for low level intrinsic",
918                    );
919
920                    (Some(temp_reg.register.clone()), temp_reg.register)
921                };
922
923                // Materialize self to ensure we have the actual scalar value
924                let mut converted_regs = vec![self_reg.unwrap().clone()];
925                for arg in arguments {
926                    let ArgumentExpression::Expression(found_expression) = arg else {
927                        panic!("must be expression");
928                    };
929                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
930                    converted_regs.push(materialized_arg);
931                }
932
933                self.emit_intrinsic_call_fixed(&dest_reg, intrinsic_fn, &converted_regs, node);
934
935                if let Some(temp_reg) = temp_reg {
936                    self.emit_store_scalar_to_memory_offset_instruction(
937                        target_destination.grab_memory_location(),
938                        &temp_reg,
939                        node,
940                        "store the fixed point value into memory",
941                    );
942                }
943            }
944
945            IntrinsicFunction::IntToFloat => {
946                // IntToFloat - special case because it returns a float, not an int
947                let (temp_reg, dest_reg) = if target_destination.is_register() {
948                    (None, target_destination.register().unwrap().clone())
949                } else {
950                    let temp_reg = self.temp_registers.allocate(
951                        VmType::new_contained_in_register(float_type()),
952                        "temporary destination for int to float intrinsic",
953                    );
954
955                    (Some(temp_reg.register.clone()), temp_reg.register)
956                };
957
958                // Self is already materialized as a register
959                let int_value_reg = self_reg.unwrap();
960
961                // Now convert the materialized integer value to float
962                self.builder.add_int_to_float(
963                    &dest_reg,
964                    int_value_reg,
965                    node,
966                    &format!("int to float {}", int_value_reg.comment()),
967                );
968
969                if let Some(temp_reg) = temp_reg {
970                    self.emit_store_scalar_to_memory_offset_instruction(
971                        target_destination.grab_memory_location(),
972                        &temp_reg,
973                        node,
974                        "store the float result from int to float conversion",
975                    );
976                }
977            }
978
979            IntrinsicFunction::IntAbs
980            | IntrinsicFunction::IntRnd
981            | IntrinsicFunction::IntMax
982            | IntrinsicFunction::IntMin
983            | IntrinsicFunction::IntClamp
984            | IntrinsicFunction::IntToString => {
985                // Int
986                let (temp_reg, dest_reg) = if target_destination.is_register() {
987                    let target_reg = target_destination.register().unwrap();
988                    // Intrinsics can operate on any register directly, no special treatment needed
989                    (None, target_reg.clone())
990                } else {
991                    let temp_reg = self.temp_registers.allocate(
992                        VmType::new_contained_in_register(u32_type()),
993                        "temporary destination for low level intrinsic",
994                    );
995
996                    (Some(temp_reg.register.clone()), temp_reg.register)
997                };
998
999                // Materialize additional arguments (self is already materialized)
1000                let mut converted_regs = vec![self_reg.unwrap().clone()];
1001                for arg in arguments {
1002                    let ArgumentExpression::Expression(found_expression) = arg else {
1003                        panic!("must be expression");
1004                    };
1005                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
1006                    converted_regs.push(materialized_arg);
1007                }
1008
1009                self.emit_intrinsic_call_int(&dest_reg, intrinsic_fn, &converted_regs, node);
1010
1011                if let Some(temp_reg) = temp_reg {
1012                    if target_destination.is_register() {
1013                        // Copy from temp to target register
1014                        self.builder.add_mov_reg(
1015                            target_destination.register().unwrap(),
1016                            &temp_reg,
1017                            node,
1018                            "copy intrinsic result from temp to target register",
1019                        );
1020                    } else {
1021                        // Store to memory location
1022                        self.emit_store_scalar_to_memory_offset_instruction(
1023                            target_destination.grab_memory_location(),
1024                            &temp_reg,
1025                            node,
1026                            "put the low level intrinsic fixed (int) back to memory",
1027                        );
1028                    }
1029                }
1030            }
1031
1032            IntrinsicFunction::VecPush
1033            | IntrinsicFunction::VecPop
1034            | IntrinsicFunction::VecExtend
1035            | IntrinsicFunction::VecRemoveIndex
1036            | IntrinsicFunction::VecRemoveIndexGetValue
1037            | IntrinsicFunction::VecRemoveFirstIndexGetValue
1038            | IntrinsicFunction::VecClear
1039            | IntrinsicFunction::VecSlice
1040            | IntrinsicFunction::VecSwap
1041            | IntrinsicFunction::VecInsert
1042            | IntrinsicFunction::VecFirst
1043            | IntrinsicFunction::VecGet
1044            | IntrinsicFunction::VecLast => {
1045                // Vec
1046                // Self is assumed to be a flattened pointer:
1047                let vec_self_ptr_reg = PointerLocation {
1048                    ptr_reg: self_reg.unwrap().clone(),
1049                };
1050                let converted_to_expressions: Vec<_> = arguments
1051                    .iter()
1052                    .map(|arg| {
1053                        let ArgumentExpression::Expression(found_expression) = arg else {
1054                            panic!("must be expression");
1055                        };
1056                        found_expression.clone()
1057                    })
1058                    .collect();
1059
1060                self.emit_intrinsic_call_vec(
1061                    target_destination,
1062                    intrinsic_fn,
1063                    &vec_self_ptr_reg,
1064                    &converted_to_expressions,
1065                    node,
1066                    ctx,
1067                );
1068            }
1069
1070            IntrinsicFunction::GridGet
1071            | IntrinsicFunction::GridSet
1072            | IntrinsicFunction::GridWidth
1073            | IntrinsicFunction::GridHeight => {
1074                // Grid
1075                // Self is assumed to be a flattened pointer:
1076                let grid_self_ptr_reg = PointerLocation {
1077                    ptr_reg: self_reg.unwrap().clone(),
1078                };
1079                let converted_to_expressions: Vec<_> = arguments
1080                    .iter()
1081                    .map(|arg| {
1082                        let ArgumentExpression::Expression(found_expression) = arg else {
1083                            panic!("must be expression");
1084                        };
1085                        found_expression.clone()
1086                    })
1087                    .collect();
1088                self.emit_intrinsic_grid(
1089                    target_destination,
1090                    intrinsic_fn,
1091                    &grid_self_ptr_reg,
1092                    &converted_to_expressions,
1093                    node,
1094                    comment,
1095                    ctx,
1096                );
1097            }
1098
1099            IntrinsicFunction::SparseIsAlive
1100            | IntrinsicFunction::SparseRemove
1101            | IntrinsicFunction::SparseAdd => {
1102                // Sparse
1103                // Self is assumed to be a flattened pointer:
1104                let grid_self_ptr_reg = PointerLocation {
1105                    ptr_reg: self_reg.unwrap().clone(),
1106                };
1107                let converted_to_expressions: Vec<_> = arguments
1108                    .iter()
1109                    .map(|arg| {
1110                        let ArgumentExpression::Expression(found_expression) = arg else {
1111                            panic!("must be expression");
1112                        };
1113                        found_expression.clone()
1114                    })
1115                    .collect();
1116                self.emit_intrinsic_sparse(
1117                    target_destination,
1118                    intrinsic_fn,
1119                    &grid_self_ptr_reg,
1120                    &converted_to_expressions,
1121                    node,
1122                    comment,
1123                    ctx,
1124                );
1125            }
1126
1127            IntrinsicFunction::TransformerFor
1128            | IntrinsicFunction::TransformerWhile
1129            | IntrinsicFunction::TransformerFindMap
1130            | IntrinsicFunction::TransformerAny
1131            | IntrinsicFunction::TransformerAll
1132            | IntrinsicFunction::TransformerMap
1133            | IntrinsicFunction::TransformerFilter
1134            | IntrinsicFunction::TransformerFilterMap
1135            | IntrinsicFunction::TransformerFind
1136            | IntrinsicFunction::TransformerFold => {
1137                // Self is assumed to be a flattened pointer:
1138                let collection_self_ptr_reg = PointerLocation {
1139                    ptr_reg: self_reg.unwrap().clone(),
1140                };
1141
1142                let lambda_expression = &arguments[0];
1143
1144                // Take out lambda and other lookups before generating the code
1145                let ArgumentExpression::Expression(expr) = lambda_expression else {
1146                    panic!("err");
1147                };
1148
1149                let ExpressionKind::Lambda(lambda_variables, lambda_expr) = &expr.kind else {
1150                    panic!("must have lambda for transformers");
1151                };
1152
1153                self.emit_intrinsic_transformer(
1154                    target_destination,
1155                    intrinsic_fn,
1156                    &collection_self_ptr_reg,
1157                    (lambda_variables.clone(), lambda_expr),
1158                    node,
1159                    ctx,
1160                );
1161            }
1162
1163            IntrinsicFunction::RuntimePanic => {
1164                self.builder
1165                    .add_panic(self_reg.unwrap(), node, "intrinsic panic");
1166            }
1167
1168            IntrinsicFunction::RuntimeHalt => {
1169                self.builder.add_halt(node, "intrinsic halt");
1170            }
1171
1172            IntrinsicFunction::RuntimeStep => {
1173                self.builder.add_step(node, "intrinsic step");
1174            }
1175
1176            IntrinsicFunction::RangeInit => {
1177                let start_reg = self_reg.unwrap();
1178                // let MutRefOrImmutableExpression::Expression(start_arg_expr) = start_arg else {
1179                //    panic!();
1180                //};
1181                // let start_reg = self.emit_scalar_rvalue(start_arg_expr, ctx);
1182
1183                let end_arg = &arguments[0];
1184                let ArgumentExpression::Expression(end_arg_expr) = end_arg else {
1185                    panic!();
1186                };
1187                let end_reg = self.emit_scalar_rvalue(end_arg_expr, ctx);
1188
1189                let is_inclusive = &arguments[1];
1190                let ArgumentExpression::Expression(is_inclusive_expr) = is_inclusive else {
1191                    panic!();
1192                };
1193                let is_inclusive_reg = self.emit_scalar_rvalue(is_inclusive_expr, ctx);
1194                let absolute_range_pointer = self.emit_compute_effective_address_to_register(
1195                    target_destination,
1196                    node,
1197                    "create range target pointer",
1198                );
1199                self.builder.add_range_init(
1200                    &absolute_range_pointer,
1201                    start_reg,
1202                    &end_reg,
1203                    &is_inclusive_reg,
1204                    node,
1205                    "create a range",
1206                );
1207            }
1208
1209            // Bool
1210            IntrinsicFunction::CodepointToString => {
1211                if target_destination.is_register() {
1212                    self.builder.add_codepoint_to_string(
1213                        target_destination.register().unwrap(),
1214                        self_reg.unwrap(),
1215                        node,
1216                        "char_to_string",
1217                    );
1218                } else {
1219                    let temp_reg = self.temp_registers.allocate(
1220                        VmType::new_contained_in_register(u32_type()),
1221                        "temporary for codepoint to string intrinsic",
1222                    );
1223
1224                    self.builder.add_codepoint_to_string(
1225                        &temp_reg.register,
1226                        self_reg.unwrap(),
1227                        node,
1228                        "char_to_string",
1229                    );
1230
1231                    self.emit_store_scalar_to_memory_offset_instruction(
1232                        target_destination.grab_memory_location(),
1233                        &temp_reg.register,
1234                        node,
1235                        "store codepoint to string result to memory",
1236                    );
1237                }
1238            }
1239
1240            IntrinsicFunction::CodepointToInt => {
1241                if target_destination.is_register() {
1242                    self.builder.add_mov_reg(
1243                        target_destination.register().unwrap(),
1244                        self_reg.unwrap(),
1245                        node,
1246                        "char_to_int",
1247                    );
1248                } else {
1249                    self.emit_store_scalar_to_memory_offset_instruction(
1250                        target_destination.grab_memory_location(),
1251                        self_reg.unwrap(),
1252                        node,
1253                        "store codepoint to int result to memory",
1254                    );
1255                }
1256            }
1257
1258            // Bool
1259            IntrinsicFunction::ByteToString => {
1260                if target_destination.is_register() {
1261                    self.builder.byte_to_string(
1262                        target_destination.register().unwrap(),
1263                        self_reg.unwrap(),
1264                        node,
1265                        "byte_to_string",
1266                    );
1267                } else {
1268                    let temp_reg = self.temp_registers.allocate(
1269                        VmType::new_contained_in_register(u32_type()),
1270                        "temporary for byte to string intrinsic",
1271                    );
1272
1273                    self.builder.byte_to_string(
1274                        &temp_reg.register,
1275                        self_reg.unwrap(),
1276                        node,
1277                        "byte_to_string",
1278                    );
1279
1280                    self.emit_store_scalar_to_memory_offset_instruction(
1281                        target_destination.grab_memory_location(),
1282                        &temp_reg.register,
1283                        node,
1284                        "store byte to string result to memory",
1285                    );
1286                }
1287            }
1288
1289            IntrinsicFunction::ByteToInt => {
1290                if target_destination.is_register() {
1291                    // It is safe to "upcast" to an i32 from a u8, so just copy the register
1292                    // TODO: Make something smarter so we don't have to copy
1293                    self.builder.add_mov_reg(
1294                        target_destination.register().unwrap(),
1295                        self_reg.unwrap(),
1296                        node,
1297                        "byte_to_int",
1298                    );
1299                } else {
1300                    self.emit_store_scalar_to_memory_offset_instruction(
1301                        target_destination.grab_memory_location(),
1302                        self_reg.unwrap(),
1303                        node,
1304                        "store byte to int result to memory",
1305                    );
1306                }
1307            }
1308
1309            IntrinsicFunction::ByteToFloat => {
1310                if target_destination.is_register() {
1311                    // Use existing int-to-float conversion since byte is just a small integer
1312                    self.builder.add_int_to_float(
1313                        target_destination.register().unwrap(),
1314                        self_reg.unwrap(),
1315                        node,
1316                        "byte_to_float",
1317                    );
1318                } else {
1319                    let temp_reg = self.temp_registers.allocate(
1320                        VmType::new_contained_in_register(float_type()),
1321                        "temporary for byte to float intrinsic",
1322                    );
1323
1324                    // Use existing int-to-float conversion since byte is just a small integer
1325                    self.builder.add_int_to_float(
1326                        &temp_reg.register,
1327                        self_reg.unwrap(),
1328                        node,
1329                        "byte_to_float",
1330                    );
1331
1332                    self.emit_store_scalar_to_memory_offset_instruction(
1333                        target_destination.grab_memory_location(),
1334                        &temp_reg.register,
1335                        node,
1336                        "store byte to float result to memory",
1337                    );
1338                }
1339            }
1340
1341            IntrinsicFunction::ByteToCodepoint => {
1342                if target_destination.is_register() {
1343                    // It is safe to "upcast" to a codepoint from a u8, so just copy the register
1344                    // TODO: Make something smarter so we don't have to copy
1345                    self.builder.add_mov_reg(
1346                        target_destination.register().unwrap(),
1347                        self_reg.unwrap(),
1348                        node,
1349                        "byte_to_codepoint",
1350                    );
1351                } else {
1352                    self.emit_store_scalar_to_memory_offset_instruction(
1353                        target_destination.grab_memory_location(),
1354                        self_reg.unwrap(),
1355                        node,
1356                        "store byte to codepoint result to memory",
1357                    );
1358                }
1359            }
1360
1361            // Bool
1362            IntrinsicFunction::BoolToString => {
1363                if target_destination.is_register() {
1364                    self.builder.bool_to_string(
1365                        target_destination.register().unwrap(),
1366                        self_reg.unwrap(),
1367                        node,
1368                        "bool_to_string",
1369                    );
1370                } else {
1371                    let temp_reg = self.temp_registers.allocate(
1372                        VmType::new_contained_in_register(u32_type()),
1373                        "temporary for bool to string intrinsic",
1374                    );
1375
1376                    self.builder.bool_to_string(
1377                        &temp_reg.register,
1378                        self_reg.unwrap(),
1379                        node,
1380                        "bool_to_string",
1381                    );
1382
1383                    self.emit_store_scalar_to_memory_offset_instruction(
1384                        target_destination.grab_memory_location(),
1385                        &temp_reg.register,
1386                        node,
1387                        "store bool to string result to memory",
1388                    );
1389                }
1390            }
1391
1392            IntrinsicFunction::StringToString => {
1393                if target_destination.is_register() {
1394                    self.builder.add_string_to_string(
1395                        target_destination.register().unwrap(),
1396                        self_reg.unwrap(),
1397                        node,
1398                        "string_to_string",
1399                    );
1400                } else {
1401                    let temp_reg = self.temp_registers.allocate(
1402                        VmType::new_contained_in_register(u32_type()),
1403                        "temporary for string to string intrinsic",
1404                    );
1405
1406                    self.builder.add_string_to_string(
1407                        &temp_reg.register,
1408                        self_reg.unwrap(),
1409                        node,
1410                        "string_to_string",
1411                    );
1412
1413                    self.emit_store_scalar_to_memory_offset_instruction(
1414                        target_destination.grab_memory_location(),
1415                        &temp_reg.register,
1416                        node,
1417                        "store string to string result to memory",
1418                    );
1419                }
1420            }
1421
1422            IntrinsicFunction::StringStartsWith => {
1423                let converted_to_expressions: Vec<_> = arguments
1424                    .iter()
1425                    .map(|arg| {
1426                        let ArgumentExpression::Expression(found_expression) = arg else {
1427                            panic!("must be expression");
1428                        };
1429                        found_expression.clone()
1430                    })
1431                    .collect();
1432                let other_str = self.emit_scalar_rvalue(&converted_to_expressions[0], ctx);
1433                if target_destination.is_register() {
1434                    self.builder.add_string_starts_with(
1435                        target_destination.register().unwrap(),
1436                        self_reg.unwrap(),
1437                        &other_str,
1438                        node,
1439                        "string_starts_with",
1440                    );
1441                } else {
1442                    let temp_reg = self.temp_registers.allocate(
1443                        VmType::new_contained_in_register(u8_type()),
1444                        "temporary for string starts with intrinsic",
1445                    );
1446
1447                    self.builder.add_string_starts_with(
1448                        &temp_reg.register,
1449                        self_reg.unwrap(),
1450                        &other_str,
1451                        node,
1452                        "string_starts_with",
1453                    );
1454
1455                    self.emit_store_scalar_to_memory_offset_instruction(
1456                        target_destination.grab_memory_location(),
1457                        &temp_reg.register,
1458                        node,
1459                        "store string starts with result to memory",
1460                    );
1461                }
1462            }
1463
1464            IntrinsicFunction::StringToInt => {
1465                let pointer = self.emit_compute_effective_address_to_register(
1466                    target_destination,
1467                    node,
1468                    "need pointer to tuple",
1469                );
1470                self.builder
1471                    .add_string_to_int(&pointer, self_reg.unwrap(), node, "string to int");
1472            }
1473            IntrinsicFunction::StringToFloat => {
1474                let pointer = self.emit_compute_effective_address_to_register(
1475                    target_destination,
1476                    node,
1477                    "need pointer to tuple",
1478                );
1479                self.builder.add_string_to_float(
1480                    &pointer,
1481                    self_reg.unwrap(),
1482                    node,
1483                    "string to float",
1484                );
1485            }
1486
1487            // Common Collection
1488            IntrinsicFunction::MapIsEmpty | IntrinsicFunction::VecIsEmpty => {
1489                let collection_pointer = PointerLocation {
1490                    ptr_reg: self_reg.unwrap().clone(),
1491                };
1492                if target_destination.is_register() {
1493                    self.emit_collection_is_empty(
1494                        target_destination.register().unwrap().clone(),
1495                        &collection_pointer,
1496                        node,
1497                        "vec empty",
1498                    );
1499                } else {
1500                    let temp_reg = self.temp_registers.allocate(
1501                        VmType::new_contained_in_register(u8_type()),
1502                        "temporary for collection is empty intrinsic",
1503                    );
1504
1505                    self.emit_collection_is_empty(
1506                        temp_reg.register.clone(),
1507                        &collection_pointer,
1508                        node,
1509                        "vec empty",
1510                    );
1511
1512                    self.emit_store_scalar_to_memory_offset_instruction(
1513                        target_destination.grab_memory_location(),
1514                        &temp_reg.register,
1515                        node,
1516                        "store collection is empty result to memory",
1517                    );
1518                }
1519            }
1520
1521            IntrinsicFunction::StringLen
1522            | IntrinsicFunction::MapLen
1523            | IntrinsicFunction::VecLen => {
1524                let collection_pointer = PointerLocation {
1525                    ptr_reg: self_reg.unwrap().clone(),
1526                };
1527                if target_destination.is_register() {
1528                    self.emit_collection_len(
1529                        target_destination.register().unwrap(),
1530                        &collection_pointer,
1531                        node,
1532                        "get the collection element_count",
1533                    );
1534                } else {
1535                    let temp_reg = self.temp_registers.allocate(
1536                        VmType::new_contained_in_register(u16_type()),
1537                        "temporary for collection len intrinsic",
1538                    );
1539
1540                    self.emit_collection_len(
1541                        &temp_reg.register,
1542                        &collection_pointer,
1543                        node,
1544                        "get the collection element_count",
1545                    );
1546
1547                    self.emit_store_scalar_to_memory_offset_instruction(
1548                        target_destination.grab_memory_location(),
1549                        &temp_reg.register,
1550                        node,
1551                        "store collection len result to memory",
1552                    );
1553                }
1554            }
1555            IntrinsicFunction::MapCapacity | IntrinsicFunction::VecCapacity => {
1556                let collection_pointer = PointerLocation {
1557                    ptr_reg: self_reg.unwrap().clone(),
1558                };
1559                if target_destination.is_register() {
1560                    self.emit_collection_capacity(
1561                        target_destination.register().unwrap(),
1562                        &collection_pointer,
1563                        node,
1564                        "get the collection element_count",
1565                    );
1566                } else {
1567                    let temp_reg = self.temp_registers.allocate(
1568                        VmType::new_contained_in_register(u16_type()),
1569                        "temporary for collection capacity intrinsic",
1570                    );
1571
1572                    self.emit_collection_capacity(
1573                        &temp_reg.register,
1574                        &collection_pointer,
1575                        node,
1576                        "get the collection element_count",
1577                    );
1578
1579                    self.emit_store_scalar_to_memory_offset_instruction(
1580                        target_destination.grab_memory_location(),
1581                        &temp_reg.register,
1582                        node,
1583                        "store collection capacity result to memory",
1584                    );
1585                }
1586            }
1587
1588            IntrinsicFunction::MapRemove | IntrinsicFunction::MapHas => {
1589                // Map
1590                // Self is assumed to be a flattened pointer:
1591                let grid_self_ptr_reg = PointerLocation {
1592                    ptr_reg: self_reg.unwrap().clone(),
1593                };
1594                let converted_to_expressions: Vec<_> = arguments
1595                    .iter()
1596                    .map(|arg| {
1597                        let ArgumentExpression::Expression(found_expression) = arg else {
1598                            panic!("must be expression");
1599                        };
1600                        found_expression.clone()
1601                    })
1602                    .collect();
1603                self.emit_intrinsic_map(
1604                    target_destination,
1605                    intrinsic_fn,
1606                    &grid_self_ptr_reg,
1607                    &converted_to_expressions,
1608                    node,
1609                    comment,
1610                    ctx,
1611                );
1612            } // All intrinsic cases are now handled above
1613        }
1614    }
1615
1616    fn emit_intrinsic_map_remove(
1617        &mut self,
1618        map_header_reg: &PointerLocation,
1619        key_expression: &Expression,
1620        ctx: &Context,
1621    ) {
1622        let key_register =
1623            self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_expression, ctx);
1624
1625        self.builder
1626            .add_map_remove(map_header_reg, &key_register, &key_expression.node, "");
1627    }
1628
1629    fn emit_collection_capacity(
1630        &mut self,
1631        output_reg: &TypedRegister,
1632        collection_addr: &PointerLocation,
1633        node: &Node,
1634        comment: &str,
1635    ) {
1636        self.builder.add_ld16_from_pointer_with_offset_u16(
1637            output_reg,
1638            &collection_addr.ptr_reg,
1639            COLLECTION_CAPACITY_OFFSET,
1640            node,
1641            comment,
1642        );
1643    }
1644
1645    fn emit_collection_len(
1646        &mut self,
1647        output_reg: &TypedRegister,
1648        collection_addr: &PointerLocation,
1649        node: &Node,
1650        comment: &str,
1651    ) {
1652        self.builder.add_ld16_from_pointer_with_offset_u16(
1653            output_reg,
1654            &collection_addr.ptr_reg,
1655            COLLECTION_ELEMENT_COUNT_OFFSET,
1656            node,
1657            &format!("{comment} - collection element_count"),
1658        );
1659    }
1660
1661    fn emit_collection_is_empty(
1662        &mut self,
1663        output_reg: TypedRegister,
1664        collection_addr: &PointerLocation,
1665        node: &Node,
1666        _comment: &str,
1667    ) {
1668        self.builder.add_ld16_from_pointer_with_offset_u16(
1669            &output_reg,
1670            &collection_addr.ptr_reg,
1671            COLLECTION_ELEMENT_COUNT_OFFSET,
1672            node,
1673            "get the map length for testing if it is empty",
1674        );
1675        self.builder.add_meqz(
1676            &output_reg,
1677            &output_reg,
1678            node,
1679            "convert the map length to inverted bool",
1680        );
1681    }
1682}