swamp_code_gen/
intr.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::code_bld::CodeBuilder;
6use crate::ctx::Context;
7
8use crate::transformer::{Collection, Transformer};
9use source_map_node::Node;
10use swamp_semantic::intr::IntrinsicFunction;
11use swamp_semantic::{ArgumentExpression, Expression, ExpressionKind, VariableRef};
12use swamp_vm_types::types::{
13    float_type, int_type, pointer_type, u16_type, u32_type, u8_type, Destination, TypedRegister,
14    VmType,
15};
16use swamp_vm_types::{
17    AggregateMemoryLocation, MemoryLocation, MemoryOffset,
18    PointerLocation, COLLECTION_CAPACITY_OFFSET, COLLECTION_ELEMENT_COUNT_OFFSET, GRID_HEADER_HEIGHT_OFFSET,
19    GRID_HEADER_WIDTH_OFFSET,
20};
21
22impl CodeBuilder<'_> {
23    #[allow(clippy::too_many_lines)]
24    #[allow(clippy::single_match_else)]
25    pub fn emit_single_intrinsic_call(
26        &mut self,
27        target_reg: &Destination,
28        node: &Node,
29        intrinsic_fn: &IntrinsicFunction,
30        arguments: &[ArgumentExpression],
31        ctx: &Context,
32    ) {
33        {
34            // For primitive intrinsics, materialize the self argument to a register early
35            let self_reg = if arguments.is_empty() {
36                None
37            } else {
38                let ArgumentExpression::Expression(self_expr) = &arguments[0] else {
39                    panic!("Expected expression for self argument");
40                };
41                Some(self.emit_scalar_rvalue(self_expr, ctx))
42            };
43
44            let rest_args = if arguments.len() > 1 {
45                &arguments[1..]
46            } else {
47                &vec![]
48            };
49            self.emit_single_intrinsic_call_with_self(
50                target_reg,
51                node,
52                intrinsic_fn,
53                self_reg.as_ref(),
54                rest_args,
55                ctx,
56                "single intrinsic call",
57            );
58        }
59    }
60
61    pub fn emit_intrinsic_map(
62        &mut self,
63        output_destination: &Destination,
64        intrinsic_fn: &IntrinsicFunction,
65        self_ptr_reg: &PointerLocation,
66        arguments: &[Expression],
67        node: &Node,
68        comment: &str,
69        ctx: &Context,
70    ) {
71        match intrinsic_fn {
72            IntrinsicFunction::MapHas => {
73                let key_argument = &arguments[0];
74                // We have to get the key materialized in a temporary storage, so the map can calculate the hash for it.
75                let key_temp_storage_reg =
76                    self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_argument, ctx);
77
78                self.builder.add_map_has(
79                    output_destination.register().unwrap(),
80                    self_ptr_reg,
81                    &key_temp_storage_reg,
82                    node,
83                    "map_has",
84                );
85            }
86            IntrinsicFunction::MapRemove => {
87                let key_argument = &arguments[0];
88                self.emit_intrinsic_map_remove(self_ptr_reg, key_argument, ctx);
89            }
90            _ => todo!("missing intrinsic_map {intrinsic_fn}"),
91        }
92    }
93
94    pub fn emit_intrinsic_sparse(
95        &mut self,
96        output_destination: &Destination,
97        intrinsic_fn: &IntrinsicFunction,
98        self_ptr_reg: &PointerLocation,
99        arguments: &[Expression],
100        node: &Node,
101        comment: &str,
102        ctx: &Context,
103    ) {
104        match intrinsic_fn {
105            IntrinsicFunction::SparseAdd => {
106                let element_to_add_expression = &arguments[0];
107                self.emit_sparse_add(
108                    &output_destination.register().unwrap().clone(),
109                    self_ptr_reg,
110                    element_to_add_expression,
111                    node,
112                    ctx,
113                );
114            }
115
116            IntrinsicFunction::SparseRemove => {
117                let sparse_id_int_expression = &arguments[0];
118                self.emit_sparse_remove(self_ptr_reg, sparse_id_int_expression, node, ctx);
119            }
120
121            IntrinsicFunction::SparseIsAlive => {
122                let sparse_id_int_expression = &arguments[0];
123                self.emit_sparse_is_alive(
124                    &output_destination.register().unwrap().clone(),
125                    self_ptr_reg,
126                    sparse_id_int_expression,
127                    node,
128                    ctx,
129                );
130            }
131            _ => todo!("unknown sparse {intrinsic_fn}"),
132        }
133    }
134    pub fn emit_intrinsic_grid(
135        &mut self,
136        target_destination: &Destination,
137        intrinsic_fn: &IntrinsicFunction,
138        self_ptr_reg: &PointerLocation,
139        arguments: &[Expression],
140        node: &Node,
141        comment: &str,
142        ctx: &Context,
143    ) {
144        let (temp_reg, dest_reg) = if target_destination.is_register() {
145            (None, target_destination.register().unwrap().clone())
146        } else {
147            let temp_reg = self.temp_registers.allocate(
148                VmType::new_contained_in_register(float_type()),
149                "temporary destination for low level intrinsic",
150            );
151
152            (Some(temp_reg.register.clone()), temp_reg.register)
153        };
154        match intrinsic_fn {
155            IntrinsicFunction::GridSet => {
156                let x_expr = &arguments[0];
157                let y_expr = &arguments[1];
158                let value_expr = &arguments[2];
159
160                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
161                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
162                let element_gen_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
163
164                let temp_element_ptr = self.temp_registers.allocate(
165                    VmType::new_contained_in_register(element_gen_type.clone()),
166                    "temporary scalar",
167                );
168
169                self.builder.add_grid_get_entry_addr(
170                    &temp_element_ptr.register,
171                    self_ptr_reg,
172                    &x_reg,
173                    &y_reg,
174                    element_gen_type.total_size,
175                    node,
176                    comment,
177                );
178
179                let location = AggregateMemoryLocation {
180                    location: MemoryLocation {
181                        base_ptr_reg: temp_element_ptr.register,
182                        offset: MemoryOffset(0),
183                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
184                    },
185                };
186
187                // Initialize the allocated space first (like variable definition)
188                if element_gen_type.is_aggregate() {
189                    self.emit_initialize_memory_for_any_type(
190                        &location.location,
191                        node,
192                        "initialize grid set allocated space",
193                    );
194                }
195
196                self.emit_expression_into_target_memory(
197                    &location.location,
198                    value_expr,
199                    "grid set",
200                    ctx,
201                );
202            }
203            IntrinsicFunction::GridGet => {
204                let x_expr = &arguments[0];
205                let y_expr = &arguments[1];
206
207                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
208                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
209
210                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
211
212                // Allocate a temporary register to hold the address of the grid element
213                let temp_element_ptr = self.temp_registers.allocate(
214                    VmType::new_contained_in_register(pointer_type()),
215                    "temp for grid element address",
216                );
217
218                // Get the address of the grid element using the opcode
219                self.builder.add_grid_get_entry_addr(
220                    &temp_element_ptr.register,
221                    self_ptr_reg,
222                    &x_reg,
223                    &y_reg,
224                    element_type.total_size,
225                    node,
226                    comment,
227                );
228
229                // Create a memory location from the element address with correct type information
230                let element_memory_location = MemoryLocation {
231                    base_ptr_reg: temp_element_ptr.register,
232                    offset: MemoryOffset(0),
233                    ty: VmType::new_unknown_placement(element_type),
234                };
235
236                // Use emit_copy_value_from_memory_location to handle both register and memory destinations
237                // This will properly handle aggregates (like optionals) vs scalars
238                self.emit_copy_value_from_memory_location(
239                    target_destination,
240                    &element_memory_location,
241                    node,
242                    "copy grid element value to destination",
243                );
244            }
245
246            IntrinsicFunction::GridWidth => {
247                // Allocate a temporary register for the width value
248                let temp = self.temp_registers.allocate(
249                    VmType::new_contained_in_register(u16_type()),
250                    "temp for grid width",
251                );
252
253                // Get the memory location of the width field in the grid header
254                let self_memory_location = AggregateMemoryLocation::new(
255                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
256                        self_ptr_reg.ptr_reg.clone(),
257                    ),
258                );
259                let width_location =
260                    self_memory_location.offset(GRID_HEADER_WIDTH_OFFSET, int_type());
261
262                // Load the width value from the grid header into the temporary register
263                self.builder.add_ld16_from_pointer_from_memory_location(
264                    &temp.register,
265                    &width_location.location,
266                    node,
267                    comment,
268                );
269
270                // Create a source destination from the temporary register
271                let value_source = Destination::Register(temp.register);
272
273                // Use emit_copy_value_between_destinations to handle both register and memory destinations
274                self.emit_copy_value_between_destinations(
275                    target_destination,
276                    &value_source,
277                    node,
278                    "store grid width to destination",
279                );
280            }
281            IntrinsicFunction::GridHeight => {
282                // Allocate a temporary register for the height value
283                let temp = self.temp_registers.allocate(
284                    VmType::new_contained_in_register(u16_type()),
285                    "temp for grid height",
286                );
287
288                // Get the memory location of the height field in the grid header
289                let self_memory_location = AggregateMemoryLocation::new(
290                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
291                        self_ptr_reg.ptr_reg.clone(),
292                    ),
293                );
294                let height_location =
295                    self_memory_location.offset(GRID_HEADER_HEIGHT_OFFSET, int_type());
296
297                // Load the height value from the grid header into the temporary register
298                self.builder.add_ld16_from_pointer_from_memory_location(
299                    &temp.register,
300                    &height_location.location,
301                    node,
302                    comment,
303                );
304
305                // Create a source destination from the temporary register
306                let value_source = Destination::Register(temp.register);
307
308                // Use emit_copy_value_between_destinations to handle both register and memory destinations
309                self.emit_copy_value_between_destinations(
310                    target_destination,
311                    &value_source,
312                    node,
313                    "store grid height to destination",
314                );
315            }
316            _ => todo!("wrong grid {intrinsic_fn}"),
317        }
318    }
319
320    #[allow(clippy::too_many_lines)]
321    fn emit_intrinsic_call_vec(
322        &mut self,
323        output_destination: &Destination,
324        intrinsic_fn: &IntrinsicFunction,
325        self_ptr_reg: &PointerLocation,
326        arguments: &[Expression],
327        node: &Node,
328        ctx: &Context,
329    ) {
330        let self_basic_type = &self_ptr_reg.ptr_reg.ty.basic_type;
331        match intrinsic_fn {
332            IntrinsicFunction::VecPush => {
333                let element_expr = &arguments[0];
334
335                let element_gen_type = self.state.layout_cache.layout(&element_expr.ty);
336
337                let temp_element_ptr = self.temp_registers.allocate(
338                    VmType::new_contained_in_register(pointer_type()),
339                    "pointer to new element",
340                );
341
342                self.builder.add_vec_push_addr(
343                    temp_element_ptr.register(),
344                    &self_ptr_reg.ptr_reg,
345                    node,
346                    "set pointer to new element",
347                );
348
349                let location = AggregateMemoryLocation {
350                    location: MemoryLocation {
351                        base_ptr_reg: temp_element_ptr.register,
352                        offset: MemoryOffset(0),
353                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
354                    },
355                };
356
357                // Initialize the allocated space first (like variable definition)
358                if element_gen_type.is_aggregate() {
359                    self.emit_initialize_memory_for_any_type(
360                        &location.location,
361                        node,
362                        "initialize vec.push allocated space",
363                    );
364                }
365
366                self.emit_expression_into_target_memory(
367                    &location.location,
368                    element_expr,
369                    "vec push",
370                    ctx,
371                );
372            }
373
374            IntrinsicFunction::VecPop => {
375                let element_type = self_basic_type.element().unwrap();
376                let pop_target_reg = if let Some(found_target_reg) = output_destination.register() {
377                    found_target_reg.clone()
378                } else {
379                    let temp = self.temp_registers.allocate(
380                        VmType::new_contained_in_register(element_type.clone()),
381                        "temp for vec pop",
382                    );
383                    temp.register
384                };
385                self.builder.add_vec_pop(
386                    &pop_target_reg,
387                    &self_ptr_reg.ptr_reg, // mut self
388                    element_type.total_size,
389                    node,
390                    "vec pop",
391                );
392                let source_memory_location = MemoryLocation {
393                    base_ptr_reg: pop_target_reg,
394                    offset: MemoryOffset(0),
395                    ty: VmType::new_unknown_placement(element_type),
396                };
397
398                self.emit_copy_value_from_memory_location(
399                    output_destination,
400                    &source_memory_location,
401                    node,
402                    "copy from vec pop",
403                );
404            }
405
406            IntrinsicFunction::VecSlice => {
407                let range_expr = &arguments[0];
408                let range_region = self.emit_scalar_rvalue(range_expr, ctx);
409
410                let output_pointer = self.emit_compute_effective_address_to_register(
411                    output_destination,
412                    node,
413                    "get absolute pointer for vec slice destination",
414                );
415                let output_pointer_location = PointerLocation::new(output_pointer);
416
417                self.builder.add_vec_copy_range(&output_pointer_location, self_ptr_reg, &range_region, node, "vec slice");
418            }
419
420            IntrinsicFunction::VecRemoveIndex => {
421                let index_region_expr = &arguments[0];
422                let index_region = self.emit_scalar_rvalue(index_region_expr, ctx);
423
424                let element_type = self_basic_type.element().unwrap();
425
426                self.builder.add_vec_remove_index(
427                    &self_ptr_reg.ptr_reg,
428                    &index_region,
429                    node,
430                    "remove index",
431                );
432            }
433            IntrinsicFunction::VecRemoveIndexGetValue => {
434                let key_expr = &arguments[0];
435                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
436                let element_type = self_basic_type.element().unwrap();
437
438                // Handle both register and memory destinations
439                if let Some(target_reg) = output_destination.register() {
440                    // Direct register destination
441                    self.builder.add_vec_remove_index_get_value(
442                        target_reg,
443                        &self_ptr_reg.ptr_reg, // mut self
444                        &key_region,
445                        node,
446                        "vec remove index get value to register",
447                    );
448                } else {
449                    // Memory destination or other
450                    let temp_reg = self.temp_registers.allocate(
451                        VmType::new_contained_in_register(element_type),
452                        "temp for vec remove index get value",
453                    );
454
455                    self.builder.add_vec_remove_index_get_value(
456                        &temp_reg.register,
457                        &self_ptr_reg.ptr_reg,
458                        &key_region,
459                        node,
460                        "vec remove index get value to temp",
461                    );
462
463                    // Copy from temporary register to destination
464                    let source = Destination::Register(temp_reg.register);
465                    self.emit_copy_value_between_destinations(
466                        output_destination,
467                        &source,
468                        node,
469                        "copy vec element to destination",
470                    );
471                }
472            }
473            IntrinsicFunction::VecRemoveFirstIndexGetValue => {
474                let zero_reg = self.temp_registers.allocate(
475                    VmType::new_contained_in_register(u8_type()),
476                    "vec remove first. set index 0",
477                );
478                self.builder
479                    .add_mov8_immediate(zero_reg.register(), 0, node, "zero index");
480                let value_addr_reg = self.temp_registers.allocate(
481                    VmType::new_contained_in_register(u32_type()),
482                    "vec entry addr to copy from",
483                );
484                let element_type = self_basic_type.element().unwrap();
485                self.builder.add_vec_subscript(
486                    value_addr_reg.register(),
487                    &self_ptr_reg.ptr_reg,
488                    zero_reg.register(),
489                    element_type.total_size,
490                    node,
491                    "lookup first entry in vec",
492                );
493
494                let source_memory_location = MemoryLocation {
495                    base_ptr_reg: value_addr_reg.register,
496                    offset: MemoryOffset(0),
497                    ty: VmType::new_unknown_placement(element_type),
498                };
499
500                self.emit_copy_value_from_memory_location(
501                    output_destination,
502                    &source_memory_location,
503                    node,
504                    "load the vec entry to target register",
505                );
506
507                self.builder.add_vec_remove_index(
508                    &self_ptr_reg.ptr_reg, // mut self
509                    zero_reg.register(),
510                    node,
511                    "vec remove first index",
512                );
513            }
514            IntrinsicFunction::VecClear => {
515                let temp_element_count_reg = self.temp_registers.allocate(
516                    VmType::new_contained_in_register(u16_type()),
517                    "vec_clear zero",
518                );
519                self.builder.add_mov_16_immediate_value(
520                    temp_element_count_reg.register(),
521                    0,
522                    node,
523                    "set to zero",
524                );
525
526                let self_memory_location = AggregateMemoryLocation::new(MemoryLocation::new_copy_over_whole_type_with_zero_offset(
527                    self_ptr_reg.ptr_reg.clone(),
528                ));
529
530                self.builder.add_st16_using_ptr_with_offset(
531                    &self_memory_location.offset(COLLECTION_ELEMENT_COUNT_OFFSET, u16_type()).location,
532                    temp_element_count_reg.register(),
533                    node,
534                    "set element_count to zero",
535                );
536            }
537
538            IntrinsicFunction::VecGet => {
539                let key_expr = &arguments[0];
540                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
541                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
542
543                // Similar approach as GridGet - get pointer to element and use copy helpers
544                let temp_element_ptr = self.temp_registers.allocate(
545                    VmType::new_contained_in_register(pointer_type()),
546                    "temp for vec element address",
547                );
548
549                // Get the address of the vector element
550                self.builder.add_vec_subscript(
551                    temp_element_ptr.register(),
552                    &self_ptr_reg.ptr_reg,
553                    &key_region,
554                    element_type.total_size,
555                    node,
556                    "get vec element address",
557                );
558
559                // Create a memory location for the element
560                let element_memory_location = MemoryLocation {
561                    base_ptr_reg: temp_element_ptr.register,
562                    offset: MemoryOffset(0),
563                    ty: VmType::new_unknown_placement(element_type),
564                };
565
566                // Copy from memory location to destination (works for both register and memory)
567                self.emit_copy_value_from_memory_location(
568                    output_destination,
569                    &element_memory_location,
570                    node,
571                    "copy vec element to destination",
572                );
573            }
574            _ => todo!("Vec {intrinsic_fn}"),
575        }
576
577        /*
578                   IntrinsicFunction::VecSwap => {
579               let index_a = self
580                   .emit_for_access_or_location(&arguments[0], ctx)
581                   .grab_rvalue()
582                   .clone();
583               let index_b = self
584                   .emit_for_access_or_location(&arguments[1], ctx)
585                   .grab_rvalue()
586                   .clone();
587               self.builder
588                   .add_vec_swap(self_addr.unwrap(), &index_a, &index_b, node, "vec swap");
589           }
590
591           IntrinsicFunction::VecInsert => { // Low prio
592           }
593           IntrinsicFunction::VecFirst => { // Low prio
594           }
595           IntrinsicFunction::VecLast => { // Low prio
596           }
597
598        */
599    }
600
601    fn emit_intrinsic_call_int(
602        &mut self,
603        target_reg: &TypedRegister,
604        intrinsic_fn: &IntrinsicFunction,
605        arguments: &[TypedRegister],
606        node: &Node,
607    ) {
608        let first_argument = &arguments[0];
609
610        // Intrinsics can operate on any register directly, no need for register protection
611        match intrinsic_fn {
612            IntrinsicFunction::IntAbs => {
613                self.builder
614                    .add_int_abs(target_reg, first_argument, node, "int abs");
615            }
616
617            IntrinsicFunction::IntRnd => {
618                self.builder
619                    .add_int_rnd(target_reg, first_argument, node, "int pseudo random");
620            }
621            IntrinsicFunction::IntMax => {
622                let int_register = &arguments[1];
623
624                self.builder
625                    .add_int_max(target_reg, first_argument, int_register, node, "int max");
626            }
627            IntrinsicFunction::IntMin => {
628                let int_register = &arguments[1];
629
630                self.builder
631                    .add_int_min(target_reg, first_argument, int_register, node, "int min");
632            }
633            IntrinsicFunction::IntClamp => {
634                let min_reg = &arguments[1];
635                let max_reg = &arguments[2];
636                self.builder.add_int_clamp(
637                    target_reg,
638                    first_argument,
639                    min_reg,
640                    max_reg,
641                    node,
642                    "int clamp",
643                );
644            }
645            IntrinsicFunction::IntToFloat => {
646                self.builder.add_int_to_float(
647                    target_reg,
648                    first_argument,
649                    node,
650                    &format!("int to float {}", first_argument.comment()),
651                );
652            }
653            IntrinsicFunction::IntToString => {
654                self.builder
655                    .add_int_to_string(target_reg, first_argument, node, "int_to_string");
656            }
657            _ => {}
658        }
659        // No need to copy from a temporary register as we're using target_reg directly
660    }
661
662    #[allow(clippy::too_many_lines)]
663    fn emit_intrinsic_call_fixed(
664        &mut self,
665        target_reg: &TypedRegister,
666        intrinsic_fn: &IntrinsicFunction,
667        arguments: &[TypedRegister],
668        node: &Node,
669    ) {
670        // Intrinsics can operate directly on any register, no need for temporary registers
671        let first_argument_reg = &arguments[0];
672        match intrinsic_fn {
673            IntrinsicFunction::FloatRound => {
674                self.builder
675                    .add_float_round(target_reg, first_argument_reg, node, "float round");
676            }
677            IntrinsicFunction::FloatFloor => {
678                self.builder
679                    .add_float_floor(target_reg, first_argument_reg, node, "float floor");
680            }
681            IntrinsicFunction::FloatSqrt => {
682                self.builder
683                    .add_float_sqrt(target_reg, first_argument_reg, node, "float sqr");
684            }
685            IntrinsicFunction::FloatSign => {
686                self.builder
687                    .add_float_sign(target_reg, first_argument_reg, node, "float sign");
688            }
689            IntrinsicFunction::FloatAbs => {
690                self.builder
691                    .add_float_abs(target_reg, first_argument_reg, node, "float abs");
692            }
693            IntrinsicFunction::FloatRnd => {
694                self.builder.add_float_prnd(
695                    target_reg,
696                    first_argument_reg,
697                    node,
698                    "float pseudo random",
699                );
700            }
701            IntrinsicFunction::FloatCos => {
702                self.builder
703                    .add_float_cos(target_reg, first_argument_reg, node, "float cos");
704            }
705            IntrinsicFunction::FloatSin => {
706                self.builder
707                    .add_float_sin(target_reg, first_argument_reg, node, "float sin");
708            }
709            IntrinsicFunction::FloatAcos => {
710                self.builder
711                    .add_float_acos(target_reg, first_argument_reg, node, "float acos");
712            }
713            IntrinsicFunction::FloatAsin => {
714                self.builder
715                    .add_float_asin(target_reg, first_argument_reg, node, "float asin");
716            }
717            IntrinsicFunction::FloatAtan2 => {
718                self.builder
719                    .add_float_atan2(target_reg, first_argument_reg, node, "float atan2");
720            }
721            IntrinsicFunction::FloatMin => {
722                let float_region = &arguments[1];
723                self.builder.add_float_min(
724                    target_reg,
725                    first_argument_reg,
726                    float_region,
727                    node,
728                    "float min",
729                );
730            }
731            IntrinsicFunction::FloatMax => {
732                let float_region = &arguments[1];
733                self.builder.add_float_max(
734                    target_reg,
735                    first_argument_reg,
736                    float_region,
737                    node,
738                    "float max",
739                );
740            }
741            IntrinsicFunction::FloatClamp => {
742                let float_region = &arguments[1];
743                let float_b_region = &arguments[2];
744
745                self.builder.add_float_clamp(
746                    target_reg,
747                    float_region,
748                    first_argument_reg,
749                    float_b_region,
750                    node,
751                    "float round",
752                );
753            }
754            IntrinsicFunction::FloatToString => self.builder.add_float_to_string(
755                target_reg,
756                first_argument_reg,
757                node,
758                "float_to_string",
759            ),
760            _ => panic!("wasn't a fixed operation"),
761        }
762        // No need to copy from temp register to target as we're using target_reg directly
763    }
764
765    pub fn emit_intrinsic_transformer(
766        &mut self,
767        target_destination: &Destination,
768        intrinsic_fn: &IntrinsicFunction,
769        self_addr: &PointerLocation,
770        lambda: (Vec<VariableRef>, &Expression),
771        node: &Node,
772        ctx: &Context,
773    ) {
774        match intrinsic_fn {
775            IntrinsicFunction::TransformerFold => { // Low prio
776            }
777            IntrinsicFunction::TransformerFilter => {
778                self.emit_iterate_over_collection_with_lambda(
779                    target_destination,
780                    node,
781                    Collection::Vec,
782                    Transformer::Filter,
783                    &self_addr.ptr_reg,
784                    lambda,
785                    ctx,
786                );
787            }
788
789            IntrinsicFunction::TransformerFor => {
790                self.emit_iterate_over_collection_with_lambda(
791                    target_destination,
792                    node,
793                    Collection::Vec,
794                    Transformer::For,
795                    &self_addr.ptr_reg,
796                    lambda,
797                    ctx,
798                );
799            }
800            IntrinsicFunction::TransformerWhile => {
801                self.emit_iterate_over_collection_with_lambda(
802                    target_destination,
803                    node,
804                    Collection::Vec,
805                    Transformer::While,
806                    &self_addr.ptr_reg,
807                    lambda,
808                    ctx,
809                );
810            }
811
812            IntrinsicFunction::TransformerFind => {
813                self.emit_iterate_over_collection_with_lambda(
814                    target_destination,
815                    node,
816                    Collection::Vec,
817                    Transformer::Find,
818                    &self_addr.ptr_reg,
819                    lambda,
820                    ctx,
821                );
822            }
823            _ => todo!("{intrinsic_fn}"),
824        }
825    }
826
827    #[allow(clippy::too_many_lines)]
828    #[allow(clippy::too_many_arguments)]
829    pub fn emit_single_intrinsic_call_with_self_destination(
830        &mut self,
831        target_destination: &Destination,
832        node: &Node,
833        intrinsic_fn: &IntrinsicFunction,
834        self_destination: Option<&Destination>,
835        arguments: &[ArgumentExpression],
836        ctx: &Context,
837        comment: &str,
838    ) {
839        // Use the helper function to properly materialize the self argument
840        let self_reg = if let Some(self_dest) = self_destination {
841            self.emit_load_scalar_or_absolute_aggregate_pointer(self_dest, node, comment)
842        } else {
843            None
844        };
845
846        // Delegate to the existing function
847        self.emit_single_intrinsic_call_with_self(
848            target_destination,
849            node,
850            intrinsic_fn,
851            self_reg.as_ref(),
852            arguments,
853            ctx,
854            comment,
855        );
856    }
857
858    #[allow(clippy::too_many_lines)]
859    #[allow(clippy::too_many_arguments)]
860    pub fn emit_single_intrinsic_call_with_self(
861        &mut self,
862        target_destination: &Destination,
863        node: &Node,
864        intrinsic_fn: &IntrinsicFunction,
865        self_reg: Option<&TypedRegister>,
866        arguments: &[ArgumentExpression],
867        ctx: &Context,
868        comment: &str,
869    ) {
870        let maybe_target = target_destination.register();
871
872        match intrinsic_fn {
873            IntrinsicFunction::Float2Magnitude
874            | IntrinsicFunction::FloatAbs
875            | IntrinsicFunction::FloatRound
876            | IntrinsicFunction::FloatFloor
877            | IntrinsicFunction::FloatSqrt
878            | IntrinsicFunction::FloatSign
879            | IntrinsicFunction::FloatRnd
880            | IntrinsicFunction::FloatCos
881            | IntrinsicFunction::FloatSin
882            | IntrinsicFunction::FloatAcos
883            | IntrinsicFunction::FloatAsin
884            | IntrinsicFunction::FloatAtan2
885            | IntrinsicFunction::FloatMin
886            | IntrinsicFunction::FloatMax
887            | IntrinsicFunction::FloatClamp
888            | IntrinsicFunction::FloatToString => {
889                // Float
890                let (temp_reg, dest_reg) = if target_destination.is_register() {
891                    (None, target_destination.register().unwrap().clone())
892                } else {
893                    let temp_reg = self.temp_registers.allocate(
894                        VmType::new_contained_in_register(float_type()),
895                        "temporary destination for low level intrinsic",
896                    );
897
898                    (Some(temp_reg.register.clone()), temp_reg.register)
899                };
900
901                // Materialize self to ensure we have the actual scalar value
902                let mut converted_regs = vec![self_reg.unwrap().clone()];
903                for arg in arguments {
904                    let ArgumentExpression::Expression(found_expression) = arg else {
905                        panic!("must be expression");
906                    };
907                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
908                    converted_regs.push(materialized_arg);
909                }
910
911                self.emit_intrinsic_call_fixed(&dest_reg, intrinsic_fn, &converted_regs, node);
912
913                if let Some(temp_reg) = temp_reg {
914                    self.emit_store_scalar_to_memory_offset_instruction(
915                        target_destination.grab_memory_location(),
916                        &temp_reg,
917                        node,
918                        "store the fixed point value into memory",
919                    );
920                }
921            }
922
923            IntrinsicFunction::IntToFloat => {
924                // IntToFloat - special case because it returns a float, not an int
925                let (temp_reg, dest_reg) = if target_destination.is_register() {
926                    (None, target_destination.register().unwrap().clone())
927                } else {
928                    let temp_reg = self.temp_registers.allocate(
929                        VmType::new_contained_in_register(float_type()),
930                        "temporary destination for int to float intrinsic",
931                    );
932
933                    (Some(temp_reg.register.clone()), temp_reg.register)
934                };
935
936                // Self is already materialized as a register
937                let int_value_reg = self_reg.unwrap();
938
939                // Now convert the materialized integer value to float
940                self.builder.add_int_to_float(
941                    &dest_reg,
942                    int_value_reg,
943                    node,
944                    &format!("int to float {}", int_value_reg.comment()),
945                );
946
947                if let Some(temp_reg) = temp_reg {
948                    self.emit_store_scalar_to_memory_offset_instruction(
949                        target_destination.grab_memory_location(),
950                        &temp_reg,
951                        node,
952                        "store the float result from int to float conversion",
953                    );
954                }
955            }
956
957            IntrinsicFunction::IntAbs
958            | IntrinsicFunction::IntRnd
959            | IntrinsicFunction::IntMax
960            | IntrinsicFunction::IntMin
961            | IntrinsicFunction::IntClamp
962            | IntrinsicFunction::IntToString => {
963                // Int
964                let (temp_reg, dest_reg) = if target_destination.is_register() {
965                    let target_reg = target_destination.register().unwrap();
966                    // Intrinsics can operate on any register directly, no special treatment needed
967                    (None, target_reg.clone())
968                } else {
969                    let temp_reg = self.temp_registers.allocate(
970                        VmType::new_contained_in_register(u32_type()),
971                        "temporary destination for low level intrinsic",
972                    );
973
974                    (Some(temp_reg.register.clone()), temp_reg.register)
975                };
976
977                // Materialize additional arguments (self is already materialized)
978                let mut converted_regs = vec![self_reg.unwrap().clone()];
979                for arg in arguments {
980                    let ArgumentExpression::Expression(found_expression) = arg else {
981                        panic!("must be expression");
982                    };
983                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
984                    converted_regs.push(materialized_arg);
985                }
986
987                self.emit_intrinsic_call_int(&dest_reg, intrinsic_fn, &converted_regs, node);
988
989                if let Some(temp_reg) = temp_reg {
990                    if target_destination.is_register() {
991                        // Copy from temp to target register
992                        self.builder.add_mov_reg(
993                            target_destination.register().unwrap(),
994                            &temp_reg,
995                            node,
996                            "copy intrinsic result from temp to target register",
997                        );
998                    } else {
999                        // Store to memory location
1000                        self.emit_store_scalar_to_memory_offset_instruction(
1001                            target_destination.grab_memory_location(),
1002                            &temp_reg,
1003                            node,
1004                            "put the low level intrinsic fixed (int) back to memory",
1005                        );
1006                    }
1007                }
1008            }
1009
1010            IntrinsicFunction::VecPush
1011            | IntrinsicFunction::VecPop
1012            | IntrinsicFunction::VecRemoveIndex
1013            | IntrinsicFunction::VecRemoveIndexGetValue
1014            | IntrinsicFunction::VecRemoveFirstIndexGetValue
1015            | IntrinsicFunction::VecClear
1016            | IntrinsicFunction::VecSlice
1017            | IntrinsicFunction::VecSwap
1018            | IntrinsicFunction::VecInsert
1019            | IntrinsicFunction::VecFirst
1020            | IntrinsicFunction::VecGet
1021            | IntrinsicFunction::VecLast => {
1022                // Vec
1023                // Self is assumed to be a flattened pointer:
1024                let vec_self_ptr_reg = PointerLocation {
1025                    ptr_reg: self_reg.unwrap().clone(),
1026                };
1027                let converted_to_expressions: Vec<_> = arguments
1028                    .iter()
1029                    .map(|arg| {
1030                        let ArgumentExpression::Expression(found_expression) = arg else {
1031                            panic!("must be expression");
1032                        };
1033                        found_expression.clone()
1034                    })
1035                    .collect();
1036
1037                self.emit_intrinsic_call_vec(
1038                    target_destination,
1039                    intrinsic_fn,
1040                    &vec_self_ptr_reg,
1041                    &converted_to_expressions,
1042                    node,
1043                    ctx,
1044                );
1045            }
1046
1047            IntrinsicFunction::GridGet
1048            | IntrinsicFunction::GridSet
1049            | IntrinsicFunction::GridWidth
1050            | IntrinsicFunction::GridHeight => {
1051                // Grid
1052                // Self is assumed to be a flattened pointer:
1053                let grid_self_ptr_reg = PointerLocation {
1054                    ptr_reg: self_reg.unwrap().clone(),
1055                };
1056                let converted_to_expressions: Vec<_> = arguments
1057                    .iter()
1058                    .map(|arg| {
1059                        let ArgumentExpression::Expression(found_expression) = arg else {
1060                            panic!("must be expression");
1061                        };
1062                        found_expression.clone()
1063                    })
1064                    .collect();
1065                self.emit_intrinsic_grid(
1066                    target_destination,
1067                    intrinsic_fn,
1068                    &grid_self_ptr_reg,
1069                    &converted_to_expressions,
1070                    node,
1071                    comment,
1072                    ctx,
1073                );
1074            }
1075
1076            IntrinsicFunction::SparseIsAlive
1077            | IntrinsicFunction::SparseRemove
1078            | IntrinsicFunction::SparseAdd => {
1079                // Sparse
1080                // Self is assumed to be a flattened pointer:
1081                let grid_self_ptr_reg = PointerLocation {
1082                    ptr_reg: self_reg.unwrap().clone(),
1083                };
1084                let converted_to_expressions: Vec<_> = arguments
1085                    .iter()
1086                    .map(|arg| {
1087                        let ArgumentExpression::Expression(found_expression) = arg else {
1088                            panic!("must be expression");
1089                        };
1090                        found_expression.clone()
1091                    })
1092                    .collect();
1093                self.emit_intrinsic_sparse(
1094                    target_destination,
1095                    intrinsic_fn,
1096                    &grid_self_ptr_reg,
1097                    &converted_to_expressions,
1098                    node,
1099                    comment,
1100                    ctx,
1101                );
1102            }
1103
1104            IntrinsicFunction::TransformerFor
1105            | IntrinsicFunction::TransformerWhile
1106            | IntrinsicFunction::TransformerFindMap
1107            | IntrinsicFunction::TransformerAny
1108            | IntrinsicFunction::TransformerAll
1109            | IntrinsicFunction::TransformerMap
1110            | IntrinsicFunction::TransformerFilter
1111            | IntrinsicFunction::TransformerFilterMap
1112            | IntrinsicFunction::TransformerFind
1113            | IntrinsicFunction::TransformerFold => {
1114                // Self is assumed to be a flattened pointer:
1115                let collection_self_ptr_reg = PointerLocation {
1116                    ptr_reg: self_reg.unwrap().clone(),
1117                };
1118
1119                let lambda_expression = &arguments[0];
1120
1121                // Take out lambda and other lookups before generating the code
1122                let ArgumentExpression::Expression(expr) = lambda_expression else {
1123                    panic!("err");
1124                };
1125
1126                let ExpressionKind::Lambda(lambda_variables, lambda_expr) = &expr.kind else {
1127                    panic!("must have lambda for transformers");
1128                };
1129
1130                self.emit_intrinsic_transformer(
1131                    target_destination,
1132                    intrinsic_fn,
1133                    &collection_self_ptr_reg,
1134                    (lambda_variables.clone(), lambda_expr),
1135                    node,
1136                    ctx,
1137                );
1138            }
1139
1140            IntrinsicFunction::RuntimePanic => {
1141                self.builder
1142                    .add_panic(self_reg.unwrap(), node, "intrinsic panic");
1143            }
1144
1145            IntrinsicFunction::RuntimeHalt => {
1146                self.builder.add_halt(node, "intrinsic halt");
1147            }
1148
1149            IntrinsicFunction::RuntimeStep => {
1150                self.builder.add_step(node, "intrinsic step");
1151            }
1152
1153            IntrinsicFunction::RangeInit => {
1154                let start_reg = self_reg.unwrap();
1155                // let MutRefOrImmutableExpression::Expression(start_arg_expr) = start_arg else {
1156                //    panic!();
1157                //};
1158                // let start_reg = self.emit_scalar_rvalue(start_arg_expr, ctx);
1159
1160                let end_arg = &arguments[0];
1161                let ArgumentExpression::Expression(end_arg_expr) = end_arg else {
1162                    panic!();
1163                };
1164                let end_reg = self.emit_scalar_rvalue(end_arg_expr, ctx);
1165
1166                let is_inclusive = &arguments[1];
1167                let ArgumentExpression::Expression(is_inclusive_expr) = is_inclusive else {
1168                    panic!();
1169                };
1170                let is_inclusive_reg = self.emit_scalar_rvalue(is_inclusive_expr, ctx);
1171                let absolute_range_pointer = self.emit_compute_effective_address_to_register(
1172                    target_destination,
1173                    node,
1174                    "create range target pointer",
1175                );
1176                self.builder.add_range_init(
1177                    &absolute_range_pointer,
1178                    start_reg,
1179                    &end_reg,
1180                    &is_inclusive_reg,
1181                    node,
1182                    "create a range",
1183                );
1184            }
1185
1186            // Bool
1187            IntrinsicFunction::CodepointToString => {
1188                if target_destination.is_register() {
1189                    self.builder.add_codepoint_to_string(
1190                        target_destination.register().unwrap(),
1191                        self_reg.unwrap(),
1192                        node,
1193                        "char_to_string",
1194                    );
1195                } else {
1196                    let temp_reg = self.temp_registers.allocate(
1197                        VmType::new_contained_in_register(u32_type()),
1198                        "temporary for codepoint to string intrinsic",
1199                    );
1200
1201                    self.builder.add_codepoint_to_string(
1202                        &temp_reg.register,
1203                        self_reg.unwrap(),
1204                        node,
1205                        "char_to_string",
1206                    );
1207
1208                    self.emit_store_scalar_to_memory_offset_instruction(
1209                        target_destination.grab_memory_location(),
1210                        &temp_reg.register,
1211                        node,
1212                        "store codepoint to string result to memory",
1213                    );
1214                }
1215            }
1216
1217            IntrinsicFunction::CodepointToInt => {
1218                if target_destination.is_register() {
1219                    self.builder.add_mov_reg(
1220                        target_destination.register().unwrap(),
1221                        self_reg.unwrap(),
1222                        node,
1223                        "char_to_int",
1224                    );
1225                } else {
1226                    self.emit_store_scalar_to_memory_offset_instruction(
1227                        target_destination.grab_memory_location(),
1228                        self_reg.unwrap(),
1229                        node,
1230                        "store codepoint to int result to memory",
1231                    );
1232                }
1233            }
1234
1235            // Bool
1236            IntrinsicFunction::ByteToString => {
1237                if target_destination.is_register() {
1238                    self.builder.byte_to_string(
1239                        target_destination.register().unwrap(),
1240                        self_reg.unwrap(),
1241                        node,
1242                        "byte_to_string",
1243                    );
1244                } else {
1245                    let temp_reg = self.temp_registers.allocate(
1246                        VmType::new_contained_in_register(u32_type()),
1247                        "temporary for byte to string intrinsic",
1248                    );
1249
1250                    self.builder.byte_to_string(
1251                        &temp_reg.register,
1252                        self_reg.unwrap(),
1253                        node,
1254                        "byte_to_string",
1255                    );
1256
1257                    self.emit_store_scalar_to_memory_offset_instruction(
1258                        target_destination.grab_memory_location(),
1259                        &temp_reg.register,
1260                        node,
1261                        "store byte to string result to memory",
1262                    );
1263                }
1264            }
1265
1266            IntrinsicFunction::ByteToInt => {
1267                if target_destination.is_register() {
1268                    // It is safe to "upcast" to an i32 from a u8, so just copy the register
1269                    // TODO: Make something smarter so we don't have to copy
1270                    self.builder.add_mov_reg(
1271                        target_destination.register().unwrap(),
1272                        self_reg.unwrap(),
1273                        node,
1274                        "byte_to_int",
1275                    );
1276                } else {
1277                    self.emit_store_scalar_to_memory_offset_instruction(
1278                        target_destination.grab_memory_location(),
1279                        self_reg.unwrap(),
1280                        node,
1281                        "store byte to int result to memory",
1282                    );
1283                }
1284            }
1285
1286            IntrinsicFunction::ByteToFloat => {
1287                if target_destination.is_register() {
1288                    // Use existing int-to-float conversion since byte is just a small integer
1289                    self.builder.add_int_to_float(
1290                        target_destination.register().unwrap(),
1291                        self_reg.unwrap(),
1292                        node,
1293                        "byte_to_float",
1294                    );
1295                } else {
1296                    let temp_reg = self.temp_registers.allocate(
1297                        VmType::new_contained_in_register(float_type()),
1298                        "temporary for byte to float intrinsic",
1299                    );
1300
1301                    // Use existing int-to-float conversion since byte is just a small integer
1302                    self.builder.add_int_to_float(
1303                        &temp_reg.register,
1304                        self_reg.unwrap(),
1305                        node,
1306                        "byte_to_float",
1307                    );
1308
1309                    self.emit_store_scalar_to_memory_offset_instruction(
1310                        target_destination.grab_memory_location(),
1311                        &temp_reg.register,
1312                        node,
1313                        "store byte to float result to memory",
1314                    );
1315                }
1316            }
1317
1318            IntrinsicFunction::ByteToCodepoint => {
1319                if target_destination.is_register() {
1320                    // It is safe to "upcast" to a codepoint from a u8, so just copy the register
1321                    // TODO: Make something smarter so we don't have to copy
1322                    self.builder.add_mov_reg(
1323                        target_destination.register().unwrap(),
1324                        self_reg.unwrap(),
1325                        node,
1326                        "byte_to_codepoint",
1327                    );
1328                } else {
1329                    self.emit_store_scalar_to_memory_offset_instruction(
1330                        target_destination.grab_memory_location(),
1331                        self_reg.unwrap(),
1332                        node,
1333                        "store byte to codepoint result to memory",
1334                    );
1335                }
1336            }
1337
1338            // Bool
1339            IntrinsicFunction::BoolToString => {
1340                if target_destination.is_register() {
1341                    self.builder.bool_to_string(
1342                        target_destination.register().unwrap(),
1343                        self_reg.unwrap(),
1344                        node,
1345                        "bool_to_string",
1346                    );
1347                } else {
1348                    let temp_reg = self.temp_registers.allocate(
1349                        VmType::new_contained_in_register(u32_type()),
1350                        "temporary for bool to string intrinsic",
1351                    );
1352
1353                    self.builder.bool_to_string(
1354                        &temp_reg.register,
1355                        self_reg.unwrap(),
1356                        node,
1357                        "bool_to_string",
1358                    );
1359
1360                    self.emit_store_scalar_to_memory_offset_instruction(
1361                        target_destination.grab_memory_location(),
1362                        &temp_reg.register,
1363                        node,
1364                        "store bool to string result to memory",
1365                    );
1366                }
1367            }
1368
1369            IntrinsicFunction::StringToString => {
1370                if target_destination.is_register() {
1371                    self.builder.add_string_to_string(
1372                        target_destination.register().unwrap(),
1373                        self_reg.unwrap(),
1374                        node,
1375                        "string_to_string",
1376                    );
1377                } else {
1378                    let temp_reg = self.temp_registers.allocate(
1379                        VmType::new_contained_in_register(u32_type()),
1380                        "temporary for string to string intrinsic",
1381                    );
1382
1383                    self.builder.add_string_to_string(
1384                        &temp_reg.register,
1385                        self_reg.unwrap(),
1386                        node,
1387                        "string_to_string",
1388                    );
1389
1390                    self.emit_store_scalar_to_memory_offset_instruction(
1391                        target_destination.grab_memory_location(),
1392                        &temp_reg.register,
1393                        node,
1394                        "store string to string result to memory",
1395                    );
1396                }
1397            }
1398
1399            IntrinsicFunction::StringStartsWith => {
1400                let converted_to_expressions: Vec<_> = arguments
1401                    .iter()
1402                    .map(|arg| {
1403                        let ArgumentExpression::Expression(found_expression) = arg else {
1404                            panic!("must be expression");
1405                        };
1406                        found_expression.clone()
1407                    })
1408                    .collect();
1409                let other_str = self.emit_scalar_rvalue(&converted_to_expressions[0], ctx);
1410                if target_destination.is_register() {
1411                    self.builder.add_string_starts_with(
1412                        target_destination.register().unwrap(),
1413                        self_reg.unwrap(),
1414                        &other_str,
1415                        node,
1416                        "string_starts_with",
1417                    );
1418                } else {
1419                    let temp_reg = self.temp_registers.allocate(
1420                        VmType::new_contained_in_register(u8_type()),
1421                        "temporary for string starts with intrinsic",
1422                    );
1423
1424                    self.builder.add_string_starts_with(
1425                        &temp_reg.register,
1426                        self_reg.unwrap(),
1427                        &other_str,
1428                        node,
1429                        "string_starts_with",
1430                    );
1431
1432                    self.emit_store_scalar_to_memory_offset_instruction(
1433                        target_destination.grab_memory_location(),
1434                        &temp_reg.register,
1435                        node,
1436                        "store string starts with result to memory",
1437                    );
1438                }
1439            }
1440
1441            IntrinsicFunction::StringToInt => {
1442                let pointer = self.emit_compute_effective_address_to_register(target_destination, node, "need pointer to tuple");
1443                self.builder.add_string_to_int(
1444                    &pointer,
1445                    self_reg.unwrap(),
1446                    node,
1447                    "string to int",
1448                );
1449            }
1450            IntrinsicFunction::StringToFloat => {
1451                let pointer = self.emit_compute_effective_address_to_register(target_destination, node, "need pointer to tuple");
1452                self.builder.add_string_to_float(
1453                    &pointer,
1454                    self_reg.unwrap(),
1455                    node,
1456                    "string to float",
1457                );
1458            }
1459
1460            // Common Collection
1461            IntrinsicFunction::MapIsEmpty | IntrinsicFunction::VecIsEmpty => {
1462                let collection_pointer = PointerLocation {
1463                    ptr_reg: self_reg.unwrap().clone(),
1464                };
1465                if target_destination.is_register() {
1466                    self.emit_collection_is_empty(
1467                        target_destination.register().unwrap().clone(),
1468                        &collection_pointer,
1469                        node,
1470                        "vec empty",
1471                    );
1472                } else {
1473                    let temp_reg = self.temp_registers.allocate(
1474                        VmType::new_contained_in_register(u8_type()),
1475                        "temporary for collection is empty intrinsic",
1476                    );
1477
1478                    self.emit_collection_is_empty(
1479                        temp_reg.register.clone(),
1480                        &collection_pointer,
1481                        node,
1482                        "vec empty",
1483                    );
1484
1485                    self.emit_store_scalar_to_memory_offset_instruction(
1486                        target_destination.grab_memory_location(),
1487                        &temp_reg.register,
1488                        node,
1489                        "store collection is empty result to memory",
1490                    );
1491                }
1492            }
1493
1494            IntrinsicFunction::StringLen
1495            | IntrinsicFunction::MapLen
1496            | IntrinsicFunction::VecLen => {
1497                let collection_pointer = PointerLocation {
1498                    ptr_reg: self_reg.unwrap().clone(),
1499                };
1500                if target_destination.is_register() {
1501                    self.emit_collection_len(
1502                        target_destination.register().unwrap(),
1503                        &collection_pointer,
1504                        node,
1505                        "get the collection element_count",
1506                    );
1507                } else {
1508                    let temp_reg = self.temp_registers.allocate(
1509                        VmType::new_contained_in_register(u16_type()),
1510                        "temporary for collection len intrinsic",
1511                    );
1512
1513                    self.emit_collection_len(
1514                        &temp_reg.register,
1515                        &collection_pointer,
1516                        node,
1517                        "get the collection element_count",
1518                    );
1519
1520                    self.emit_store_scalar_to_memory_offset_instruction(
1521                        target_destination.grab_memory_location(),
1522                        &temp_reg.register,
1523                        node,
1524                        "store collection len result to memory",
1525                    );
1526                }
1527            }
1528            IntrinsicFunction::MapCapacity | IntrinsicFunction::VecCapacity => {
1529                let collection_pointer = PointerLocation {
1530                    ptr_reg: self_reg.unwrap().clone(),
1531                };
1532                if target_destination.is_register() {
1533                    self.emit_collection_capacity(
1534                        target_destination.register().unwrap(),
1535                        &collection_pointer,
1536                        node,
1537                        "get the collection element_count",
1538                    );
1539                } else {
1540                    let temp_reg = self.temp_registers.allocate(
1541                        VmType::new_contained_in_register(u16_type()),
1542                        "temporary for collection capacity intrinsic",
1543                    );
1544
1545                    self.emit_collection_capacity(
1546                        &temp_reg.register,
1547                        &collection_pointer,
1548                        node,
1549                        "get the collection element_count",
1550                    );
1551
1552                    self.emit_store_scalar_to_memory_offset_instruction(
1553                        target_destination.grab_memory_location(),
1554                        &temp_reg.register,
1555                        node,
1556                        "store collection capacity result to memory",
1557                    );
1558                }
1559            }
1560
1561            IntrinsicFunction::MapRemove | IntrinsicFunction::MapHas => {
1562                // Map
1563                // Self is assumed to be a flattened pointer:
1564                let grid_self_ptr_reg = PointerLocation {
1565                    ptr_reg: self_reg.unwrap().clone(),
1566                };
1567                let converted_to_expressions: Vec<_> = arguments
1568                    .iter()
1569                    .map(|arg| {
1570                        let ArgumentExpression::Expression(found_expression) = arg else {
1571                            panic!("must be expression");
1572                        };
1573                        found_expression.clone()
1574                    })
1575                    .collect();
1576                self.emit_intrinsic_map(
1577                    target_destination,
1578                    intrinsic_fn,
1579                    &grid_self_ptr_reg,
1580                    &converted_to_expressions,
1581                    node,
1582                    comment,
1583                    ctx,
1584                );
1585            } // All intrinsic cases are now handled above
1586        }
1587    }
1588
1589    fn emit_intrinsic_map_remove(
1590        &mut self,
1591        map_header_reg: &PointerLocation,
1592        key_expression: &Expression,
1593        ctx: &Context,
1594    ) {
1595        let key_register =
1596            self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_expression, ctx);
1597
1598        self.builder
1599            .add_map_remove(map_header_reg, &key_register, &key_expression.node, "");
1600    }
1601
1602    fn emit_collection_capacity(
1603        &mut self,
1604        output_reg: &TypedRegister,
1605        collection_addr: &PointerLocation,
1606        node: &Node,
1607        comment: &str,
1608    ) {
1609        self.builder.add_ld16_from_pointer_with_offset_u16(
1610            output_reg,
1611            &collection_addr.ptr_reg,
1612            COLLECTION_CAPACITY_OFFSET,
1613            node,
1614            comment,
1615        );
1616    }
1617
1618    fn emit_collection_len(
1619        &mut self,
1620        output_reg: &TypedRegister,
1621        collection_addr: &PointerLocation,
1622        node: &Node,
1623        comment: &str,
1624    ) {
1625        self.builder.add_ld16_from_pointer_with_offset_u16(
1626            output_reg,
1627            &collection_addr.ptr_reg,
1628            COLLECTION_ELEMENT_COUNT_OFFSET,
1629            node,
1630            &format!("{comment} - collection element_count"),
1631        );
1632    }
1633
1634    fn emit_collection_is_empty(
1635        &mut self,
1636        output_reg: TypedRegister,
1637        collection_addr: &PointerLocation,
1638        node: &Node,
1639        _comment: &str,
1640    ) {
1641        self.builder.add_ld16_from_pointer_with_offset_u16(
1642            &output_reg,
1643            &collection_addr.ptr_reg,
1644            COLLECTION_ELEMENT_COUNT_OFFSET,
1645            node,
1646            "get the map length for testing if it is empty",
1647        );
1648        self.builder.add_meqz(
1649            &output_reg,
1650            &output_reg,
1651            node,
1652            "convert the map length to inverted bool",
1653        );
1654    }
1655}