swamp_code_gen/
intr.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::code_bld::CodeBuilder;
6use crate::ctx::Context;
7
8use crate::transformer::{Collection, Transformer};
9use source_map_node::Node;
10use swamp_semantic::intr::IntrinsicFunction;
11use swamp_semantic::{ArgumentExpression, Expression, ExpressionKind, VariableRef};
12use swamp_vm_types::types::{
13    Destination, TypedRegister, VmType, float_type, int_type, pointer_type, u8_type, u16_type,
14    u32_type,
15};
16use swamp_vm_types::{
17    AggregateMemoryLocation, COLLECTION_CAPACITY_OFFSET, COLLECTION_ELEMENT_COUNT_OFFSET,
18    GRID_HEADER_HEIGHT_OFFSET, GRID_HEADER_WIDTH_OFFSET, MemoryLocation, MemoryOffset,
19    PointerLocation,
20};
21
22impl CodeBuilder<'_> {
23    #[allow(clippy::too_many_lines)]
24    #[allow(clippy::single_match_else)]
25    pub fn emit_single_intrinsic_call(
26        &mut self,
27        target_reg: &Destination,
28        node: &Node,
29        intrinsic_fn: &IntrinsicFunction,
30        arguments: &[ArgumentExpression],
31        ctx: &Context,
32    ) {
33        {
34            // For primitive intrinsics, materialize the self argument to a register early
35            let self_reg = if arguments.is_empty() {
36                None
37            } else {
38                let ArgumentExpression::Expression(self_expr) = &arguments[0] else {
39                    panic!("Expected expression for self argument");
40                };
41                Some(self.emit_scalar_rvalue(self_expr, ctx))
42            };
43
44            let rest_args = if arguments.len() > 1 {
45                &arguments[1..]
46            } else {
47                &vec![]
48            };
49            self.emit_single_intrinsic_call_with_self(
50                target_reg,
51                node,
52                intrinsic_fn,
53                self_reg.as_ref(),
54                rest_args,
55                ctx,
56                "single intrinsic call",
57            );
58        }
59    }
60
61    pub fn emit_intrinsic_map(
62        &mut self,
63        output_destination: &Destination,
64        intrinsic_fn: &IntrinsicFunction,
65        self_ptr_reg: &PointerLocation,
66        arguments: &[Expression],
67        node: &Node,
68        comment: &str,
69        ctx: &Context,
70    ) {
71        match intrinsic_fn {
72            IntrinsicFunction::MapHas => {
73                let key_argument = &arguments[0];
74                // We have to get the key materialized in a temporary storage, so the map can calculate the hash for it.
75                let key_temp_storage_reg =
76                    self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_argument, ctx);
77
78                self.builder.add_map_has(
79                    output_destination.register().unwrap(),
80                    self_ptr_reg,
81                    &key_temp_storage_reg,
82                    node,
83                    "map_has",
84                );
85            }
86            IntrinsicFunction::MapRemove => {
87                let key_argument = &arguments[0];
88                self.emit_intrinsic_map_remove(self_ptr_reg, key_argument, ctx);
89            }
90            _ => todo!("missing intrinsic_map {intrinsic_fn}"),
91        }
92    }
93
94    pub fn emit_intrinsic_sparse(
95        &mut self,
96        output_destination: &Destination,
97        intrinsic_fn: &IntrinsicFunction,
98        self_ptr_reg: &PointerLocation,
99        arguments: &[Expression],
100        node: &Node,
101        comment: &str,
102        ctx: &Context,
103    ) {
104        match intrinsic_fn {
105            IntrinsicFunction::SparseAdd => {
106                let element_to_add_expression = &arguments[0];
107                self.emit_sparse_add(
108                    &output_destination.register().unwrap().clone(),
109                    self_ptr_reg,
110                    element_to_add_expression,
111                    node,
112                    ctx,
113                );
114            }
115
116            IntrinsicFunction::SparseRemove => {
117                let sparse_id_int_expression = &arguments[0];
118                self.emit_sparse_remove(self_ptr_reg, sparse_id_int_expression, node, ctx);
119            }
120
121            IntrinsicFunction::SparseIsAlive => {
122                let sparse_id_int_expression = &arguments[0];
123                self.emit_sparse_is_alive(
124                    &output_destination.register().unwrap().clone(),
125                    self_ptr_reg,
126                    sparse_id_int_expression,
127                    node,
128                    ctx,
129                );
130            }
131            _ => todo!("unknown sparse {intrinsic_fn}"),
132        }
133    }
134    pub fn emit_intrinsic_grid(
135        &mut self,
136        target_destination: &Destination,
137        intrinsic_fn: &IntrinsicFunction,
138        self_ptr_reg: &PointerLocation,
139        arguments: &[Expression],
140        node: &Node,
141        comment: &str,
142        ctx: &Context,
143    ) {
144        let (temp_reg, dest_reg) = if target_destination.is_register() {
145            (None, target_destination.register().unwrap().clone())
146        } else {
147            let temp_reg = self.temp_registers.allocate(
148                VmType::new_contained_in_register(float_type()),
149                "temporary destination for low level intrinsic",
150            );
151
152            (Some(temp_reg.register.clone()), temp_reg.register)
153        };
154        match intrinsic_fn {
155            IntrinsicFunction::GridSet => {
156                let x_expr = &arguments[0];
157                let y_expr = &arguments[1];
158                let value_expr = &arguments[2];
159
160                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
161                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
162                let element_gen_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
163
164                let temp_element_ptr = self.temp_registers.allocate(
165                    VmType::new_contained_in_register(element_gen_type.clone()),
166                    "temporary scalar",
167                );
168
169                self.builder.add_grid_get_entry_addr(
170                    &temp_element_ptr.register,
171                    self_ptr_reg,
172                    &x_reg,
173                    &y_reg,
174                    element_gen_type.total_size,
175                    node,
176                    comment,
177                );
178
179                let location = AggregateMemoryLocation {
180                    location: MemoryLocation {
181                        base_ptr_reg: temp_element_ptr.register,
182                        offset: MemoryOffset(0),
183                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
184                    },
185                };
186
187                // Initialize the allocated space first (like variable definition)
188                if element_gen_type.is_aggregate() {
189                    self.emit_initialize_memory_for_any_type(
190                        &location.location,
191                        node,
192                        "initialize grid set allocated space",
193                    );
194                }
195
196                self.emit_expression_into_target_memory(
197                    &location.location,
198                    value_expr,
199                    "grid set",
200                    ctx,
201                );
202            }
203            IntrinsicFunction::GridGet => {
204                let x_expr = &arguments[0];
205                let y_expr = &arguments[1];
206
207                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
208                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
209
210                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
211
212                // Allocate a temporary register to hold the address of the grid element
213                let temp_element_ptr = self.temp_registers.allocate(
214                    VmType::new_contained_in_register(pointer_type()),
215                    "temp for grid element address",
216                );
217
218                // Get the address of the grid element using the opcode
219                self.builder.add_grid_get_entry_addr(
220                    &temp_element_ptr.register,
221                    self_ptr_reg,
222                    &x_reg,
223                    &y_reg,
224                    element_type.total_size,
225                    node,
226                    comment,
227                );
228
229                // Create a memory location from the element address with correct type information
230                let element_memory_location = MemoryLocation {
231                    base_ptr_reg: temp_element_ptr.register,
232                    offset: MemoryOffset(0),
233                    ty: VmType::new_unknown_placement(element_type),
234                };
235
236                // Use emit_copy_value_from_memory_location to handle both register and memory destinations
237                // This will properly handle aggregates (like optionals) vs scalars
238                self.emit_copy_value_from_memory_location(
239                    target_destination,
240                    &element_memory_location,
241                    node,
242                    "copy grid element value to destination",
243                );
244            }
245
246            IntrinsicFunction::GridWidth => {
247                // Allocate a temporary register for the width value
248                let temp = self.temp_registers.allocate(
249                    VmType::new_contained_in_register(u16_type()),
250                    "temp for grid width",
251                );
252
253                // Get the memory location of the width field in the grid header
254                let self_memory_location = AggregateMemoryLocation::new(
255                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
256                        self_ptr_reg.ptr_reg.clone(),
257                    ),
258                );
259                let width_location =
260                    self_memory_location.offset(GRID_HEADER_WIDTH_OFFSET, int_type());
261
262                // Load the width value from the grid header into the temporary register
263                self.builder.add_ld16_from_pointer_from_memory_location(
264                    &temp.register,
265                    &width_location.location,
266                    node,
267                    comment,
268                );
269
270                // Create a source destination from the temporary register
271                let value_source = Destination::Register(temp.register);
272
273                // Use emit_copy_value_between_destinations to handle both register and memory destinations
274                self.emit_copy_value_between_destinations(
275                    target_destination,
276                    &value_source,
277                    node,
278                    "store grid width to destination",
279                );
280            }
281            IntrinsicFunction::GridHeight => {
282                // Allocate a temporary register for the height value
283                let temp = self.temp_registers.allocate(
284                    VmType::new_contained_in_register(u16_type()),
285                    "temp for grid height",
286                );
287
288                // Get the memory location of the height field in the grid header
289                let self_memory_location = AggregateMemoryLocation::new(
290                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
291                        self_ptr_reg.ptr_reg.clone(),
292                    ),
293                );
294                let height_location =
295                    self_memory_location.offset(GRID_HEADER_HEIGHT_OFFSET, int_type());
296
297                // Load the height value from the grid header into the temporary register
298                self.builder.add_ld16_from_pointer_from_memory_location(
299                    &temp.register,
300                    &height_location.location,
301                    node,
302                    comment,
303                );
304
305                // Create a source destination from the temporary register
306                let value_source = Destination::Register(temp.register);
307
308                // Use emit_copy_value_between_destinations to handle both register and memory destinations
309                self.emit_copy_value_between_destinations(
310                    target_destination,
311                    &value_source,
312                    node,
313                    "store grid height to destination",
314                );
315            }
316            _ => todo!("wrong grid {intrinsic_fn}"),
317        }
318    }
319
320    #[allow(clippy::too_many_lines)]
321    fn emit_intrinsic_call_vec(
322        &mut self,
323        output_destination: &Destination,
324        intrinsic_fn: &IntrinsicFunction,
325        self_ptr_reg: &PointerLocation,
326        arguments: &[Expression],
327        node: &Node,
328        ctx: &Context,
329    ) {
330        let self_basic_type = &self_ptr_reg.ptr_reg.ty.basic_type;
331        match intrinsic_fn {
332            IntrinsicFunction::VecPush => {
333                let element_expr = &arguments[0];
334
335                let element_gen_type = self.state.layout_cache.layout(&element_expr.ty);
336
337                let temp_element_ptr = self.temp_registers.allocate(
338                    VmType::new_contained_in_register(pointer_type()),
339                    "pointer to new element",
340                );
341
342                self.builder.add_vec_push_addr(
343                    temp_element_ptr.register(),
344                    &self_ptr_reg.ptr_reg,
345                    node,
346                    "set pointer to new element",
347                );
348
349                let location = AggregateMemoryLocation {
350                    location: MemoryLocation {
351                        base_ptr_reg: temp_element_ptr.register,
352                        offset: MemoryOffset(0),
353                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
354                    },
355                };
356
357                // Initialize the allocated space first (like variable definition)
358                if element_gen_type.is_aggregate() {
359                    self.emit_initialize_memory_for_any_type(
360                        &location.location,
361                        node,
362                        "initialize vec.push allocated space",
363                    );
364                }
365
366                self.emit_expression_into_target_memory(
367                    &location.location,
368                    element_expr,
369                    "vec push",
370                    ctx,
371                );
372            }
373
374            IntrinsicFunction::VecExtend => {
375                let element_expr = &arguments[0];
376                let other_vec_reg = self.emit_scalar_rvalue(element_expr, ctx);
377
378                self.builder.add_vec_extend(
379                    &self_ptr_reg.ptr_reg,
380                    &other_vec_reg,
381                    node,
382                    "extend vec",
383                );
384            }
385
386            IntrinsicFunction::VecPop => {
387                let element_type = self_basic_type.element().unwrap();
388                let pop_target_reg = if let Some(found_target_reg) = output_destination.register() {
389                    found_target_reg.clone()
390                } else {
391                    let temp = self.temp_registers.allocate(
392                        VmType::new_contained_in_register(element_type.clone()),
393                        "temp for vec pop",
394                    );
395                    temp.register
396                };
397                self.builder.add_vec_pop(
398                    &pop_target_reg,
399                    &self_ptr_reg.ptr_reg, // mut self
400                    element_type.total_size,
401                    node,
402                    "vec pop",
403                );
404                let source_memory_location = MemoryLocation {
405                    base_ptr_reg: pop_target_reg,
406                    offset: MemoryOffset(0),
407                    ty: VmType::new_unknown_placement(element_type),
408                };
409
410                self.emit_copy_value_from_memory_location(
411                    output_destination,
412                    &source_memory_location,
413                    node,
414                    "copy from vec pop",
415                );
416            }
417
418            IntrinsicFunction::VecSlice => {
419                let range_expr = &arguments[0];
420                let range_region = self.emit_scalar_rvalue(range_expr, ctx);
421
422                let output_pointer = self.emit_compute_effective_address_to_register(
423                    output_destination,
424                    node,
425                    "get absolute pointer for vec slice destination",
426                );
427                let output_pointer_location = PointerLocation::new(output_pointer);
428
429                self.builder.add_vec_copy_range(
430                    &output_pointer_location,
431                    self_ptr_reg,
432                    &range_region,
433                    node,
434                    "vec slice",
435                );
436            }
437
438            IntrinsicFunction::VecRemoveIndex => {
439                let index_region_expr = &arguments[0];
440                let index_region = self.emit_scalar_rvalue(index_region_expr, ctx);
441
442                let element_type = self_basic_type.element().unwrap();
443
444                self.builder.add_vec_remove_index(
445                    &self_ptr_reg.ptr_reg,
446                    &index_region,
447                    node,
448                    "remove index",
449                );
450            }
451            IntrinsicFunction::VecRemoveIndexGetValue => {
452                let key_expr = &arguments[0];
453                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
454                let element_type = self_basic_type.element().unwrap();
455
456                // Handle both register and memory destinations
457                if let Some(target_reg) = output_destination.register() {
458                    // Direct register destination
459                    self.builder.add_vec_remove_index_get_value(
460                        target_reg,
461                        &self_ptr_reg.ptr_reg, // mut self
462                        &key_region,
463                        node,
464                        "vec remove index get value to register",
465                    );
466                } else {
467                    // Memory destination or other
468                    let temp_reg = self.temp_registers.allocate(
469                        VmType::new_contained_in_register(element_type),
470                        "temp for vec remove index get value",
471                    );
472
473                    self.builder.add_vec_remove_index_get_value(
474                        &temp_reg.register,
475                        &self_ptr_reg.ptr_reg,
476                        &key_region,
477                        node,
478                        "vec remove index get value to temp",
479                    );
480
481                    // Copy from temporary register to destination
482                    let source = Destination::Register(temp_reg.register);
483                    self.emit_copy_value_between_destinations(
484                        output_destination,
485                        &source,
486                        node,
487                        "copy vec element to destination",
488                    );
489                }
490            }
491            IntrinsicFunction::VecRemoveFirstIndexGetValue => {
492                let zero_reg = self.temp_registers.allocate(
493                    VmType::new_contained_in_register(u8_type()),
494                    "vec remove first. set index 0",
495                );
496                self.builder
497                    .add_mov8_immediate(zero_reg.register(), 0, node, "zero index");
498                let value_addr_reg = self.temp_registers.allocate(
499                    VmType::new_contained_in_register(u32_type()),
500                    "vec entry addr to copy from",
501                );
502                let element_type = self_basic_type.element().unwrap();
503                self.builder.add_vec_subscript(
504                    value_addr_reg.register(),
505                    &self_ptr_reg.ptr_reg,
506                    zero_reg.register(),
507                    element_type.total_size,
508                    node,
509                    "lookup first entry in vec",
510                );
511
512                let source_memory_location = MemoryLocation {
513                    base_ptr_reg: value_addr_reg.register,
514                    offset: MemoryOffset(0),
515                    ty: VmType::new_unknown_placement(element_type),
516                };
517
518                self.emit_copy_value_from_memory_location(
519                    output_destination,
520                    &source_memory_location,
521                    node,
522                    "load the vec entry to target register",
523                );
524
525                self.builder.add_vec_remove_index(
526                    &self_ptr_reg.ptr_reg, // mut self
527                    zero_reg.register(),
528                    node,
529                    "vec remove first index",
530                );
531            }
532            IntrinsicFunction::VecClear => {
533                let temp_element_count_reg = self.temp_registers.allocate(
534                    VmType::new_contained_in_register(u16_type()),
535                    "vec_clear zero",
536                );
537                self.builder.add_mov_16_immediate_value(
538                    temp_element_count_reg.register(),
539                    0,
540                    node,
541                    "set to zero",
542                );
543
544                let self_memory_location = AggregateMemoryLocation::new(
545                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
546                        self_ptr_reg.ptr_reg.clone(),
547                    ),
548                );
549
550                self.builder.add_st16_using_ptr_with_offset(
551                    &self_memory_location
552                        .offset(COLLECTION_ELEMENT_COUNT_OFFSET, u16_type())
553                        .location,
554                    temp_element_count_reg.register(),
555                    node,
556                    "set element_count to zero",
557                );
558            }
559
560            IntrinsicFunction::VecGet => {
561                let key_expr = &arguments[0];
562                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
563                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
564
565                // Similar approach as GridGet - get pointer to element and use copy helpers
566                let temp_element_ptr = self.temp_registers.allocate(
567                    VmType::new_contained_in_register(pointer_type()),
568                    "temp for vec element address",
569                );
570
571                // Get the address of the vector element
572                self.builder.add_vec_subscript(
573                    temp_element_ptr.register(),
574                    &self_ptr_reg.ptr_reg,
575                    &key_region,
576                    element_type.total_size,
577                    node,
578                    "get vec element address",
579                );
580
581                // Create a memory location for the element
582                let element_memory_location = MemoryLocation {
583                    base_ptr_reg: temp_element_ptr.register,
584                    offset: MemoryOffset(0),
585                    ty: VmType::new_unknown_placement(element_type),
586                };
587
588                // Copy from memory location to destination (works for both register and memory)
589                self.emit_copy_value_from_memory_location(
590                    output_destination,
591                    &element_memory_location,
592                    node,
593                    "copy vec element to destination",
594                );
595            }
596            _ => todo!("Vec {intrinsic_fn}"),
597        }
598
599        /*
600                   IntrinsicFunction::VecSwap => {
601               let index_a = self
602                   .emit_for_access_or_location(&arguments[0], ctx)
603                   .grab_rvalue()
604                   .clone();
605               let index_b = self
606                   .emit_for_access_or_location(&arguments[1], ctx)
607                   .grab_rvalue()
608                   .clone();
609               self.builder
610                   .add_vec_swap(self_addr.unwrap(), &index_a, &index_b, node, "vec swap");
611           }
612
613           IntrinsicFunction::VecInsert => { // Low prio
614           }
615           IntrinsicFunction::VecFirst => { // Low prio
616           }
617           IntrinsicFunction::VecLast => { // Low prio
618           }
619
620        */
621    }
622
623    fn emit_intrinsic_call_int(
624        &mut self,
625        target_reg: &TypedRegister,
626        intrinsic_fn: &IntrinsicFunction,
627        arguments: &[TypedRegister],
628        node: &Node,
629    ) {
630        let first_argument = &arguments[0];
631
632        // Intrinsics can operate on any register directly, no need for register protection
633        match intrinsic_fn {
634            IntrinsicFunction::IntAbs => {
635                self.builder
636                    .add_int_abs(target_reg, first_argument, node, "int abs");
637            }
638
639            IntrinsicFunction::IntRnd => {
640                self.builder
641                    .add_int_rnd(target_reg, first_argument, node, "int pseudo random");
642            }
643            IntrinsicFunction::IntMax => {
644                let int_register = &arguments[1];
645
646                self.builder
647                    .add_int_max(target_reg, first_argument, int_register, node, "int max");
648            }
649            IntrinsicFunction::IntMin => {
650                let int_register = &arguments[1];
651
652                self.builder
653                    .add_int_min(target_reg, first_argument, int_register, node, "int min");
654            }
655            IntrinsicFunction::IntClamp => {
656                let min_reg = &arguments[1];
657                let max_reg = &arguments[2];
658                self.builder.add_int_clamp(
659                    target_reg,
660                    first_argument,
661                    min_reg,
662                    max_reg,
663                    node,
664                    "int clamp",
665                );
666            }
667            IntrinsicFunction::IntToFloat => {
668                self.builder.add_int_to_float(
669                    target_reg,
670                    first_argument,
671                    node,
672                    &format!("int to float {}", first_argument.comment()),
673                );
674            }
675            IntrinsicFunction::IntToString => {
676                self.builder
677                    .add_int_to_string(target_reg, first_argument, node, "int_to_string");
678            }
679            _ => {}
680        }
681        // No need to copy from a temporary register as we're using target_reg directly
682    }
683
684    #[allow(clippy::too_many_lines)]
685    fn emit_intrinsic_call_fixed(
686        &mut self,
687        target_reg: &TypedRegister,
688        intrinsic_fn: &IntrinsicFunction,
689        arguments: &[TypedRegister],
690        node: &Node,
691    ) {
692        // Intrinsics can operate directly on any register, no need for temporary registers
693        let first_argument_reg = &arguments[0];
694        match intrinsic_fn {
695            IntrinsicFunction::FloatRound => {
696                self.builder
697                    .add_float_round(target_reg, first_argument_reg, node, "float round");
698            }
699            IntrinsicFunction::FloatFloor => {
700                self.builder
701                    .add_float_floor(target_reg, first_argument_reg, node, "float floor");
702            }
703            IntrinsicFunction::FloatSqrt => {
704                self.builder
705                    .add_float_sqrt(target_reg, first_argument_reg, node, "float sqr");
706            }
707            IntrinsicFunction::FloatSign => {
708                self.builder
709                    .add_float_sign(target_reg, first_argument_reg, node, "float sign");
710            }
711            IntrinsicFunction::FloatAbs => {
712                self.builder
713                    .add_float_abs(target_reg, first_argument_reg, node, "float abs");
714            }
715            IntrinsicFunction::FloatRnd => {
716                self.builder.add_float_prnd(
717                    target_reg,
718                    first_argument_reg,
719                    node,
720                    "float pseudo random",
721                );
722            }
723            IntrinsicFunction::FloatCos => {
724                self.builder
725                    .add_float_cos(target_reg, first_argument_reg, node, "float cos");
726            }
727            IntrinsicFunction::FloatSin => {
728                self.builder
729                    .add_float_sin(target_reg, first_argument_reg, node, "float sin");
730            }
731            IntrinsicFunction::FloatAcos => {
732                self.builder
733                    .add_float_acos(target_reg, first_argument_reg, node, "float acos");
734            }
735            IntrinsicFunction::FloatAsin => {
736                self.builder
737                    .add_float_asin(target_reg, first_argument_reg, node, "float asin");
738            }
739            IntrinsicFunction::FloatAtan2 => {
740                self.builder
741                    .add_float_atan2(target_reg, first_argument_reg, node, "float atan2");
742            }
743            IntrinsicFunction::FloatMin => {
744                let float_region = &arguments[1];
745                self.builder.add_float_min(
746                    target_reg,
747                    first_argument_reg,
748                    float_region,
749                    node,
750                    "float min",
751                );
752            }
753            IntrinsicFunction::FloatMax => {
754                let float_region = &arguments[1];
755                self.builder.add_float_max(
756                    target_reg,
757                    first_argument_reg,
758                    float_region,
759                    node,
760                    "float max",
761                );
762            }
763            IntrinsicFunction::FloatClamp => {
764                let float_region = &arguments[1];
765                let float_b_region = &arguments[2];
766
767                self.builder.add_float_clamp(
768                    target_reg,
769                    float_region,
770                    first_argument_reg,
771                    float_b_region,
772                    node,
773                    "float round",
774                );
775            }
776            IntrinsicFunction::FloatToString => self.builder.add_float_to_string(
777                target_reg,
778                first_argument_reg,
779                node,
780                "float_to_string",
781            ),
782            _ => panic!("wasn't a fixed operation"),
783        }
784        // No need to copy from temp register to target as we're using target_reg directly
785    }
786
787    pub fn emit_intrinsic_transformer(
788        &mut self,
789        target_destination: &Destination,
790        intrinsic_fn: &IntrinsicFunction,
791        self_addr: &PointerLocation,
792        lambda: (Vec<VariableRef>, &Expression),
793        node: &Node,
794        ctx: &Context,
795    ) {
796        match intrinsic_fn {
797            IntrinsicFunction::TransformerFold => { // Low prio
798            }
799            IntrinsicFunction::TransformerFilter => {
800                self.emit_iterate_over_collection_with_lambda(
801                    target_destination,
802                    node,
803                    Collection::Vec,
804                    Transformer::Filter,
805                    &self_addr.ptr_reg,
806                    lambda,
807                    ctx,
808                );
809            }
810
811            IntrinsicFunction::TransformerFor => {
812                self.emit_iterate_over_collection_with_lambda(
813                    target_destination,
814                    node,
815                    Collection::Vec,
816                    Transformer::For,
817                    &self_addr.ptr_reg,
818                    lambda,
819                    ctx,
820                );
821            }
822            IntrinsicFunction::TransformerWhile => {
823                self.emit_iterate_over_collection_with_lambda(
824                    target_destination,
825                    node,
826                    Collection::Vec,
827                    Transformer::While,
828                    &self_addr.ptr_reg,
829                    lambda,
830                    ctx,
831                );
832            }
833
834            IntrinsicFunction::TransformerFind => {
835                self.emit_iterate_over_collection_with_lambda(
836                    target_destination,
837                    node,
838                    Collection::Vec,
839                    Transformer::Find,
840                    &self_addr.ptr_reg,
841                    lambda,
842                    ctx,
843                );
844            }
845            _ => todo!("{intrinsic_fn}"),
846        }
847    }
848
849    #[allow(clippy::too_many_lines)]
850    #[allow(clippy::too_many_arguments)]
851    pub fn emit_single_intrinsic_call_with_self_destination(
852        &mut self,
853        target_destination: &Destination,
854        node: &Node,
855        intrinsic_fn: &IntrinsicFunction,
856        self_destination: Option<&Destination>,
857        arguments: &[ArgumentExpression],
858        ctx: &Context,
859        comment: &str,
860    ) {
861        // Use the helper function to properly materialize the self argument
862        let self_reg = if let Some(self_dest) = self_destination {
863            self.emit_load_scalar_or_absolute_aggregate_pointer(self_dest, node, comment)
864        } else {
865            None
866        };
867
868        // Delegate to the existing function
869        self.emit_single_intrinsic_call_with_self(
870            target_destination,
871            node,
872            intrinsic_fn,
873            self_reg.as_ref(),
874            arguments,
875            ctx,
876            comment,
877        );
878    }
879
880    #[allow(clippy::too_many_lines)]
881    #[allow(clippy::too_many_arguments)]
882    pub fn emit_single_intrinsic_call_with_self(
883        &mut self,
884        target_destination: &Destination,
885        node: &Node,
886        intrinsic_fn: &IntrinsicFunction,
887        self_reg: Option<&TypedRegister>,
888        arguments: &[ArgumentExpression],
889        ctx: &Context,
890        comment: &str,
891    ) {
892        let maybe_target = target_destination.register();
893
894        match intrinsic_fn {
895            IntrinsicFunction::Float2Magnitude
896            | IntrinsicFunction::FloatAbs
897            | IntrinsicFunction::FloatRound
898            | IntrinsicFunction::FloatFloor
899            | IntrinsicFunction::FloatSqrt
900            | IntrinsicFunction::FloatSign
901            | IntrinsicFunction::FloatRnd
902            | IntrinsicFunction::FloatCos
903            | IntrinsicFunction::FloatSin
904            | IntrinsicFunction::FloatAcos
905            | IntrinsicFunction::FloatAsin
906            | IntrinsicFunction::FloatAtan2
907            | IntrinsicFunction::FloatMin
908            | IntrinsicFunction::FloatMax
909            | IntrinsicFunction::FloatClamp
910            | IntrinsicFunction::FloatToString => {
911                // Float
912                let (temp_reg, dest_reg) = if target_destination.is_register() {
913                    (None, target_destination.register().unwrap().clone())
914                } else {
915                    let temp_reg = self.temp_registers.allocate(
916                        VmType::new_contained_in_register(float_type()),
917                        "temporary destination for low level intrinsic",
918                    );
919
920                    (Some(temp_reg.register.clone()), temp_reg.register)
921                };
922
923                // Materialize self to ensure we have the actual scalar value
924                let mut converted_regs = vec![self_reg.unwrap().clone()];
925                for arg in arguments {
926                    let (ArgumentExpression::Expression(found_expression)
927                    | ArgumentExpression::MaterializedExpression(found_expression)) = arg
928                    else {
929                        panic!("must be expression");
930                    };
931                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
932                    converted_regs.push(materialized_arg);
933                }
934
935                self.emit_intrinsic_call_fixed(&dest_reg, intrinsic_fn, &converted_regs, node);
936
937                if let Some(temp_reg) = temp_reg {
938                    self.emit_store_scalar_to_memory_offset_instruction(
939                        target_destination.grab_memory_location(),
940                        &temp_reg,
941                        node,
942                        "store the fixed point value into memory",
943                    );
944                }
945            }
946
947            IntrinsicFunction::IntToFloat => {
948                // IntToFloat - special case because it returns a float, not an int
949                let (temp_reg, dest_reg) = if target_destination.is_register() {
950                    (None, target_destination.register().unwrap().clone())
951                } else {
952                    let temp_reg = self.temp_registers.allocate(
953                        VmType::new_contained_in_register(float_type()),
954                        "temporary destination for int to float intrinsic",
955                    );
956
957                    (Some(temp_reg.register.clone()), temp_reg.register)
958                };
959
960                // Self is already materialized as a register
961                let int_value_reg = self_reg.unwrap();
962
963                // Now convert the materialized integer value to float
964                self.builder.add_int_to_float(
965                    &dest_reg,
966                    int_value_reg,
967                    node,
968                    &format!("int to float {}", int_value_reg.comment()),
969                );
970
971                if let Some(temp_reg) = temp_reg {
972                    self.emit_store_scalar_to_memory_offset_instruction(
973                        target_destination.grab_memory_location(),
974                        &temp_reg,
975                        node,
976                        "store the float result from int to float conversion",
977                    );
978                }
979            }
980
981            IntrinsicFunction::IntAbs
982            | IntrinsicFunction::IntRnd
983            | IntrinsicFunction::IntMax
984            | IntrinsicFunction::IntMin
985            | IntrinsicFunction::IntClamp
986            | IntrinsicFunction::IntToString => {
987                // Int
988                let (temp_reg, dest_reg) = if target_destination.is_register() {
989                    let target_reg = target_destination.register().unwrap();
990                    // Intrinsics can operate on any register directly, no special treatment needed
991                    (None, target_reg.clone())
992                } else {
993                    let temp_reg = self.temp_registers.allocate(
994                        VmType::new_contained_in_register(u32_type()),
995                        "temporary destination for low level intrinsic",
996                    );
997
998                    (Some(temp_reg.register.clone()), temp_reg.register)
999                };
1000
1001                // Materialize additional arguments (self is already materialized)
1002                let mut converted_regs = vec![self_reg.unwrap().clone()];
1003                for arg in arguments {
1004                    let (ArgumentExpression::Expression(found_expression)
1005                    | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1006                    else {
1007                        panic!("must be expression");
1008                    };
1009                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
1010                    converted_regs.push(materialized_arg);
1011                }
1012
1013                self.emit_intrinsic_call_int(&dest_reg, intrinsic_fn, &converted_regs, node);
1014
1015                if let Some(temp_reg) = temp_reg {
1016                    if target_destination.is_register() {
1017                        // Copy from temp to target register
1018                        self.builder.add_mov_reg(
1019                            target_destination.register().unwrap(),
1020                            &temp_reg,
1021                            node,
1022                            "copy intrinsic result from temp to target register",
1023                        );
1024                    } else {
1025                        // Store to memory location
1026                        self.emit_store_scalar_to_memory_offset_instruction(
1027                            target_destination.grab_memory_location(),
1028                            &temp_reg,
1029                            node,
1030                            "put the low level intrinsic fixed (int) back to memory",
1031                        );
1032                    }
1033                }
1034            }
1035
1036            IntrinsicFunction::EnumFromDiscriminant => {
1037                let enum_pointer = PointerLocation {
1038                    ptr_reg: self_reg.unwrap().clone(),
1039                };
1040
1041                assert!(arguments.len() == 1, "problem");
1042                // Materialize additional arguments (self is already materialized)
1043                let mut converted_regs = vec![self_reg.unwrap().clone()];
1044                for arg in arguments {
1045                    let (ArgumentExpression::Expression(found_expression)
1046                    | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1047                    else {
1048                        panic!("must be expression");
1049                    };
1050                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
1051                    converted_regs.push(materialized_arg);
1052                }
1053
1054                // TODO: @important: Support different sizes of discriminants
1055                self.builder.add_st8_using_ptr_with_offset(
1056                    &MemoryLocation::new_copy_over_whole_type_with_zero_offset(
1057                        enum_pointer.ptr_reg,
1058                    ),
1059                    &converted_regs[1], // the discriminant to set
1060                    node,
1061                    "overwrite the discriminant in the enum pointer",
1062                );
1063            }
1064
1065            IntrinsicFunction::EnumDiscriminant => {
1066                let enum_pointer = PointerLocation {
1067                    ptr_reg: self_reg.unwrap().clone(),
1068                };
1069
1070                let discriminant_temp_reg = self.temp_registers.allocate(
1071                    VmType::new_contained_in_register(u8_type()),
1072                    "temp register for fetching discriminant",
1073                );
1074
1075                // TODO: @important: Support different sizes of discriminants
1076                self.builder.add_ld8_from_pointer_with_offset(
1077                    discriminant_temp_reg.register(),
1078                    &enum_pointer.ptr_reg,
1079                    MemoryOffset(0), // Support proper tag offsets
1080                    node,
1081                    "get the discriminant from the enum pointer",
1082                );
1083
1084                if target_destination.is_register() {
1085                    self.builder.add_mov_reg(
1086                        target_destination.register().unwrap(),
1087                        &discriminant_temp_reg.register,
1088                        node,
1089                        "copy discriminant in register to target register",
1090                    );
1091                } else {
1092                    self.emit_store_scalar_to_memory_offset_instruction(
1093                        target_destination.grab_memory_location(),
1094                        &discriminant_temp_reg.register,
1095                        node,
1096                        "store discriminant in register to target memory location",
1097                    );
1098                }
1099            }
1100
1101            /*
1102            IntrinsicFunction::EnumFromDiscriminant => {
1103
1104            }
1105
1106             */
1107            IntrinsicFunction::VecPush
1108            | IntrinsicFunction::VecPop
1109            | IntrinsicFunction::VecExtend
1110            | IntrinsicFunction::VecRemoveIndex
1111            | IntrinsicFunction::VecRemoveIndexGetValue
1112            | IntrinsicFunction::VecRemoveFirstIndexGetValue
1113            | IntrinsicFunction::VecClear
1114            | IntrinsicFunction::VecSlice
1115            | IntrinsicFunction::VecSwap
1116            | IntrinsicFunction::VecInsert
1117            | IntrinsicFunction::VecFirst
1118            | IntrinsicFunction::VecGet
1119            | IntrinsicFunction::VecLast => {
1120                // Vec
1121                // Self is assumed to be a flattened pointer:
1122                let vec_self_ptr_reg = PointerLocation {
1123                    ptr_reg: self_reg.unwrap().clone(),
1124                };
1125                let converted_to_expressions: Vec<_> = arguments
1126                    .iter()
1127                    .map(|arg| {
1128                        let (ArgumentExpression::Expression(found_expression)
1129                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1130                        else {
1131                            panic!("must be expression");
1132                        };
1133                        found_expression.clone()
1134                    })
1135                    .collect();
1136
1137                self.emit_intrinsic_call_vec(
1138                    target_destination,
1139                    intrinsic_fn,
1140                    &vec_self_ptr_reg,
1141                    &converted_to_expressions,
1142                    node,
1143                    ctx,
1144                );
1145            }
1146
1147            IntrinsicFunction::GridGet
1148            | IntrinsicFunction::GridSet
1149            | IntrinsicFunction::GridWidth
1150            | IntrinsicFunction::GridHeight => {
1151                // Grid
1152                // Self is assumed to be a flattened pointer:
1153                let grid_self_ptr_reg = PointerLocation {
1154                    ptr_reg: self_reg.unwrap().clone(),
1155                };
1156                let converted_to_expressions: Vec<_> = arguments
1157                    .iter()
1158                    .map(|arg| {
1159                        let (ArgumentExpression::Expression(found_expression)
1160                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1161                        else {
1162                            panic!("must be expression");
1163                        };
1164                        found_expression.clone()
1165                    })
1166                    .collect();
1167                self.emit_intrinsic_grid(
1168                    target_destination,
1169                    intrinsic_fn,
1170                    &grid_self_ptr_reg,
1171                    &converted_to_expressions,
1172                    node,
1173                    comment,
1174                    ctx,
1175                );
1176            }
1177
1178            IntrinsicFunction::SparseIsAlive
1179            | IntrinsicFunction::SparseRemove
1180            | IntrinsicFunction::SparseAdd => {
1181                // Sparse
1182                // Self is assumed to be a flattened pointer:
1183                let grid_self_ptr_reg = PointerLocation {
1184                    ptr_reg: self_reg.unwrap().clone(),
1185                };
1186                let converted_to_expressions: Vec<_> = arguments
1187                    .iter()
1188                    .map(|arg| {
1189                        let (ArgumentExpression::Expression(found_expression)
1190                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1191                        else {
1192                            panic!("must be expression");
1193                        };
1194                        found_expression.clone()
1195                    })
1196                    .collect();
1197                self.emit_intrinsic_sparse(
1198                    target_destination,
1199                    intrinsic_fn,
1200                    &grid_self_ptr_reg,
1201                    &converted_to_expressions,
1202                    node,
1203                    comment,
1204                    ctx,
1205                );
1206            }
1207
1208            IntrinsicFunction::TransformerFor
1209            | IntrinsicFunction::TransformerWhile
1210            | IntrinsicFunction::TransformerFindMap
1211            | IntrinsicFunction::TransformerAny
1212            | IntrinsicFunction::TransformerAll
1213            | IntrinsicFunction::TransformerMap
1214            | IntrinsicFunction::TransformerFilter
1215            | IntrinsicFunction::TransformerFilterMap
1216            | IntrinsicFunction::TransformerFind
1217            | IntrinsicFunction::TransformerFold => {
1218                // Self is assumed to be a flattened pointer:
1219                let collection_self_ptr_reg = PointerLocation {
1220                    ptr_reg: self_reg.unwrap().clone(),
1221                };
1222
1223                let lambda_expression = &arguments[0];
1224
1225                // Take out lambda and other lookups before generating the code
1226                let ArgumentExpression::Expression(expr) = lambda_expression else {
1227                    panic!("err");
1228                };
1229
1230                let ExpressionKind::Lambda(lambda_variables, lambda_expr) = &expr.kind else {
1231                    panic!("must have lambda for transformers");
1232                };
1233
1234                self.emit_intrinsic_transformer(
1235                    target_destination,
1236                    intrinsic_fn,
1237                    &collection_self_ptr_reg,
1238                    (lambda_variables.clone(), lambda_expr),
1239                    node,
1240                    ctx,
1241                );
1242            }
1243
1244            IntrinsicFunction::RuntimePanic => {
1245                self.builder
1246                    .add_panic(self_reg.unwrap(), node, "intrinsic panic");
1247            }
1248
1249            IntrinsicFunction::RuntimeHalt => {
1250                self.builder.add_halt(node, "intrinsic halt");
1251            }
1252
1253            IntrinsicFunction::RuntimeStep => {
1254                self.builder.add_step(node, "intrinsic step");
1255            }
1256
1257            IntrinsicFunction::RangeInit => {
1258                let start_reg = self_reg.unwrap();
1259                // let MutRefOrImmutableExpression::Expression(start_arg_expr) = start_arg else {
1260                //    panic!();
1261                //};
1262                // let start_reg = self.emit_scalar_rvalue(start_arg_expr, ctx);
1263
1264                let end_arg = &arguments[0];
1265                let ArgumentExpression::Expression(end_arg_expr) = end_arg else {
1266                    panic!();
1267                };
1268                let end_reg = self.emit_scalar_rvalue(end_arg_expr, ctx);
1269
1270                let is_inclusive = &arguments[1];
1271                let ArgumentExpression::Expression(is_inclusive_expr) = is_inclusive else {
1272                    panic!();
1273                };
1274                let is_inclusive_reg = self.emit_scalar_rvalue(is_inclusive_expr, ctx);
1275                let absolute_range_pointer = self.emit_compute_effective_address_to_register(
1276                    target_destination,
1277                    node,
1278                    "create range target pointer",
1279                );
1280                self.builder.add_range_init(
1281                    &absolute_range_pointer,
1282                    start_reg,
1283                    &end_reg,
1284                    &is_inclusive_reg,
1285                    node,
1286                    "create a range",
1287                );
1288            }
1289
1290            // Bool
1291            IntrinsicFunction::CodepointToString => {
1292                if target_destination.is_register() {
1293                    self.builder.add_codepoint_to_string(
1294                        target_destination.register().unwrap(),
1295                        self_reg.unwrap(),
1296                        node,
1297                        "char_to_string",
1298                    );
1299                } else {
1300                    let temp_reg = self.temp_registers.allocate(
1301                        VmType::new_contained_in_register(u32_type()),
1302                        "temporary for codepoint to string intrinsic",
1303                    );
1304
1305                    self.builder.add_codepoint_to_string(
1306                        &temp_reg.register,
1307                        self_reg.unwrap(),
1308                        node,
1309                        "char_to_string",
1310                    );
1311
1312                    self.emit_store_scalar_to_memory_offset_instruction(
1313                        target_destination.grab_memory_location(),
1314                        &temp_reg.register,
1315                        node,
1316                        "store codepoint to string result to memory",
1317                    );
1318                }
1319            }
1320
1321            IntrinsicFunction::CodepointToInt => {
1322                if target_destination.is_register() {
1323                    self.builder.add_mov_reg(
1324                        target_destination.register().unwrap(),
1325                        self_reg.unwrap(),
1326                        node,
1327                        "char_to_int",
1328                    );
1329                } else {
1330                    self.emit_store_scalar_to_memory_offset_instruction(
1331                        target_destination.grab_memory_location(),
1332                        self_reg.unwrap(),
1333                        node,
1334                        "store codepoint to int result to memory",
1335                    );
1336                }
1337            }
1338
1339            // Bool
1340            IntrinsicFunction::ByteToString => {
1341                if target_destination.is_register() {
1342                    self.builder.byte_to_string(
1343                        target_destination.register().unwrap(),
1344                        self_reg.unwrap(),
1345                        node,
1346                        "byte_to_string",
1347                    );
1348                } else {
1349                    let temp_reg = self.temp_registers.allocate(
1350                        VmType::new_contained_in_register(u32_type()),
1351                        "temporary for byte to string intrinsic",
1352                    );
1353
1354                    self.builder.byte_to_string(
1355                        &temp_reg.register,
1356                        self_reg.unwrap(),
1357                        node,
1358                        "byte_to_string",
1359                    );
1360
1361                    self.emit_store_scalar_to_memory_offset_instruction(
1362                        target_destination.grab_memory_location(),
1363                        &temp_reg.register,
1364                        node,
1365                        "store byte to string result to memory",
1366                    );
1367                }
1368            }
1369
1370            IntrinsicFunction::ByteToInt => {
1371                if target_destination.is_register() {
1372                    // It is safe to "upcast" to an i32 from a u8, so just copy the register
1373                    // TODO: Make something smarter so we don't have to copy
1374                    self.builder.add_mov_reg(
1375                        target_destination.register().unwrap(),
1376                        self_reg.unwrap(),
1377                        node,
1378                        "byte_to_int",
1379                    );
1380                } else {
1381                    self.emit_store_scalar_to_memory_offset_instruction(
1382                        target_destination.grab_memory_location(),
1383                        self_reg.unwrap(),
1384                        node,
1385                        "store byte to int result to memory",
1386                    );
1387                }
1388            }
1389
1390            IntrinsicFunction::ByteToFloat => {
1391                if target_destination.is_register() {
1392                    // Use existing int-to-float conversion since byte is just a small integer
1393                    self.builder.add_int_to_float(
1394                        target_destination.register().unwrap(),
1395                        self_reg.unwrap(),
1396                        node,
1397                        "byte_to_float",
1398                    );
1399                } else {
1400                    let temp_reg = self.temp_registers.allocate(
1401                        VmType::new_contained_in_register(float_type()),
1402                        "temporary for byte to float intrinsic",
1403                    );
1404
1405                    // Use existing int-to-float conversion since byte is just a small integer
1406                    self.builder.add_int_to_float(
1407                        &temp_reg.register,
1408                        self_reg.unwrap(),
1409                        node,
1410                        "byte_to_float",
1411                    );
1412
1413                    self.emit_store_scalar_to_memory_offset_instruction(
1414                        target_destination.grab_memory_location(),
1415                        &temp_reg.register,
1416                        node,
1417                        "store byte to float result to memory",
1418                    );
1419                }
1420            }
1421
1422            IntrinsicFunction::ByteToCodepoint => {
1423                if target_destination.is_register() {
1424                    // It is safe to "upcast" to a codepoint from a u8, so just copy the register
1425                    // TODO: Make something smarter so we don't have to copy
1426                    self.builder.add_mov_reg(
1427                        target_destination.register().unwrap(),
1428                        self_reg.unwrap(),
1429                        node,
1430                        "byte_to_codepoint",
1431                    );
1432                } else {
1433                    self.emit_store_scalar_to_memory_offset_instruction(
1434                        target_destination.grab_memory_location(),
1435                        self_reg.unwrap(),
1436                        node,
1437                        "store byte to codepoint result to memory",
1438                    );
1439                }
1440            }
1441
1442            // Bool
1443            IntrinsicFunction::BoolToString => {
1444                if target_destination.is_register() {
1445                    self.builder.bool_to_string(
1446                        target_destination.register().unwrap(),
1447                        self_reg.unwrap(),
1448                        node,
1449                        "bool_to_string",
1450                    );
1451                } else {
1452                    let temp_reg = self.temp_registers.allocate(
1453                        VmType::new_contained_in_register(u32_type()),
1454                        "temporary for bool to string intrinsic",
1455                    );
1456
1457                    self.builder.bool_to_string(
1458                        &temp_reg.register,
1459                        self_reg.unwrap(),
1460                        node,
1461                        "bool_to_string",
1462                    );
1463
1464                    self.emit_store_scalar_to_memory_offset_instruction(
1465                        target_destination.grab_memory_location(),
1466                        &temp_reg.register,
1467                        node,
1468                        "store bool to string result to memory",
1469                    );
1470                }
1471            }
1472
1473            IntrinsicFunction::StringToString => {
1474                if target_destination.is_register() {
1475                    self.builder.add_string_to_string(
1476                        target_destination.register().unwrap(),
1477                        self_reg.unwrap(),
1478                        node,
1479                        "string_to_string",
1480                    );
1481                } else {
1482                    let temp_reg = self.temp_registers.allocate(
1483                        VmType::new_contained_in_register(u32_type()),
1484                        "temporary for string to string intrinsic",
1485                    );
1486
1487                    self.builder.add_string_to_string(
1488                        &temp_reg.register,
1489                        self_reg.unwrap(),
1490                        node,
1491                        "string_to_string",
1492                    );
1493
1494                    self.emit_store_scalar_to_memory_offset_instruction(
1495                        target_destination.grab_memory_location(),
1496                        &temp_reg.register,
1497                        node,
1498                        "store string to string result to memory",
1499                    );
1500                }
1501            }
1502
1503            IntrinsicFunction::StringStartsWith => {
1504                let converted_to_expressions: Vec<_> = arguments
1505                    .iter()
1506                    .map(|arg| {
1507                        let (ArgumentExpression::Expression(found_expression)
1508                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1509                        else {
1510                            panic!("must be expression");
1511                        };
1512                        found_expression.clone()
1513                    })
1514                    .collect();
1515                let other_str = self.emit_scalar_rvalue(&converted_to_expressions[0], ctx);
1516                if target_destination.is_register() {
1517                    self.builder.add_string_starts_with(
1518                        target_destination.register().unwrap(),
1519                        self_reg.unwrap(),
1520                        &other_str,
1521                        node,
1522                        "string_starts_with",
1523                    );
1524                } else {
1525                    let temp_reg = self.temp_registers.allocate(
1526                        VmType::new_contained_in_register(u8_type()),
1527                        "temporary for string starts with intrinsic",
1528                    );
1529
1530                    self.builder.add_string_starts_with(
1531                        &temp_reg.register,
1532                        self_reg.unwrap(),
1533                        &other_str,
1534                        node,
1535                        "string_starts_with",
1536                    );
1537
1538                    self.emit_store_scalar_to_memory_offset_instruction(
1539                        target_destination.grab_memory_location(),
1540                        &temp_reg.register,
1541                        node,
1542                        "store string starts with result to memory",
1543                    );
1544                }
1545            }
1546
1547            IntrinsicFunction::StringToInt => {
1548                let pointer = self.emit_compute_effective_address_to_register(
1549                    target_destination,
1550                    node,
1551                    "need pointer to tuple",
1552                );
1553                self.builder
1554                    .add_string_to_int(&pointer, self_reg.unwrap(), node, "string to int");
1555            }
1556            IntrinsicFunction::StringToFloat => {
1557                let pointer = self.emit_compute_effective_address_to_register(
1558                    target_destination,
1559                    node,
1560                    "need pointer to tuple",
1561                );
1562                self.builder.add_string_to_float(
1563                    &pointer,
1564                    self_reg.unwrap(),
1565                    node,
1566                    "string to float",
1567                );
1568            }
1569
1570            // Common Collection
1571            IntrinsicFunction::MapIsEmpty | IntrinsicFunction::VecIsEmpty => {
1572                let collection_pointer = PointerLocation {
1573                    ptr_reg: self_reg.unwrap().clone(),
1574                };
1575                if target_destination.is_register() {
1576                    self.emit_collection_is_empty(
1577                        target_destination.register().unwrap().clone(),
1578                        &collection_pointer,
1579                        node,
1580                        "vec empty",
1581                    );
1582                } else {
1583                    let temp_reg = self.temp_registers.allocate(
1584                        VmType::new_contained_in_register(u8_type()),
1585                        "temporary for collection is empty intrinsic",
1586                    );
1587
1588                    self.emit_collection_is_empty(
1589                        temp_reg.register.clone(),
1590                        &collection_pointer,
1591                        node,
1592                        "vec empty",
1593                    );
1594
1595                    self.emit_store_scalar_to_memory_offset_instruction(
1596                        target_destination.grab_memory_location(),
1597                        &temp_reg.register,
1598                        node,
1599                        "store collection is empty result to memory",
1600                    );
1601                }
1602            }
1603
1604            IntrinsicFunction::StringLen
1605            | IntrinsicFunction::MapLen
1606            | IntrinsicFunction::VecLen => {
1607                let collection_pointer = PointerLocation {
1608                    ptr_reg: self_reg.unwrap().clone(),
1609                };
1610                if target_destination.is_register() {
1611                    self.emit_collection_len(
1612                        target_destination.register().unwrap(),
1613                        &collection_pointer,
1614                        node,
1615                        "get the collection element_count",
1616                    );
1617                } else {
1618                    let temp_reg = self.temp_registers.allocate(
1619                        VmType::new_contained_in_register(u16_type()),
1620                        "temporary for collection len intrinsic",
1621                    );
1622
1623                    self.emit_collection_len(
1624                        &temp_reg.register,
1625                        &collection_pointer,
1626                        node,
1627                        "get the collection element_count",
1628                    );
1629
1630                    self.emit_store_scalar_to_memory_offset_instruction(
1631                        target_destination.grab_memory_location(),
1632                        &temp_reg.register,
1633                        node,
1634                        "store collection len result to memory",
1635                    );
1636                }
1637            }
1638            IntrinsicFunction::MapCapacity | IntrinsicFunction::VecCapacity => {
1639                let collection_pointer = PointerLocation {
1640                    ptr_reg: self_reg.unwrap().clone(),
1641                };
1642                if target_destination.is_register() {
1643                    self.emit_collection_capacity(
1644                        target_destination.register().unwrap(),
1645                        &collection_pointer,
1646                        node,
1647                        "get the collection element_count",
1648                    );
1649                } else {
1650                    let temp_reg = self.temp_registers.allocate(
1651                        VmType::new_contained_in_register(u16_type()),
1652                        "temporary for collection capacity intrinsic",
1653                    );
1654
1655                    self.emit_collection_capacity(
1656                        &temp_reg.register,
1657                        &collection_pointer,
1658                        node,
1659                        "get the collection element_count",
1660                    );
1661
1662                    self.emit_store_scalar_to_memory_offset_instruction(
1663                        target_destination.grab_memory_location(),
1664                        &temp_reg.register,
1665                        node,
1666                        "store collection capacity result to memory",
1667                    );
1668                }
1669            }
1670
1671            IntrinsicFunction::MapRemove | IntrinsicFunction::MapHas => {
1672                // Map
1673                // Self is assumed to be a flattened pointer:
1674                let grid_self_ptr_reg = PointerLocation {
1675                    ptr_reg: self_reg.unwrap().clone(),
1676                };
1677                let converted_to_expressions: Vec<_> = arguments
1678                    .iter()
1679                    .map(|arg| {
1680                        let (ArgumentExpression::Expression(found_expression)
1681                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1682                        else {
1683                            panic!("must be expression");
1684                        };
1685                        found_expression.clone()
1686                    })
1687                    .collect();
1688                self.emit_intrinsic_map(
1689                    target_destination,
1690                    intrinsic_fn,
1691                    &grid_self_ptr_reg,
1692                    &converted_to_expressions,
1693                    node,
1694                    comment,
1695                    ctx,
1696                );
1697            } // All intrinsic cases are now handled above
1698        }
1699    }
1700
1701    fn emit_intrinsic_map_remove(
1702        &mut self,
1703        map_header_reg: &PointerLocation,
1704        key_expression: &Expression,
1705        ctx: &Context,
1706    ) {
1707        let key_register =
1708            self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_expression, ctx);
1709
1710        self.builder
1711            .add_map_remove(map_header_reg, &key_register, &key_expression.node, "");
1712    }
1713
1714    fn emit_collection_capacity(
1715        &mut self,
1716        output_reg: &TypedRegister,
1717        collection_addr: &PointerLocation,
1718        node: &Node,
1719        comment: &str,
1720    ) {
1721        self.builder.add_ld16_from_pointer_with_offset_u16(
1722            output_reg,
1723            &collection_addr.ptr_reg,
1724            COLLECTION_CAPACITY_OFFSET,
1725            node,
1726            comment,
1727        );
1728    }
1729
1730    fn emit_collection_len(
1731        &mut self,
1732        output_reg: &TypedRegister,
1733        collection_addr: &PointerLocation,
1734        node: &Node,
1735        comment: &str,
1736    ) {
1737        self.builder.add_ld16_from_pointer_with_offset_u16(
1738            output_reg,
1739            &collection_addr.ptr_reg,
1740            COLLECTION_ELEMENT_COUNT_OFFSET,
1741            node,
1742            &format!("{comment} - collection element_count"),
1743        );
1744    }
1745
1746    fn emit_collection_is_empty(
1747        &mut self,
1748        output_reg: TypedRegister,
1749        collection_addr: &PointerLocation,
1750        node: &Node,
1751        _comment: &str,
1752    ) {
1753        self.builder.add_ld16_from_pointer_with_offset_u16(
1754            &output_reg,
1755            &collection_addr.ptr_reg,
1756            COLLECTION_ELEMENT_COUNT_OFFSET,
1757            node,
1758            "get the map length for testing if it is empty",
1759        );
1760        self.builder.add_meqz(
1761            &output_reg,
1762            &output_reg,
1763            node,
1764            "convert the map length to inverted bool",
1765        );
1766    }
1767}